diff --git a/modules/deeplearning/icing.py b/modules/deeplearning/icing.py index d97e2fbc04bab2cec2f48d5ff5b90c54018b9faa..2d9924db1802ccff2eac411bd069bf71b8b82f32 100644 --- a/modules/deeplearning/icing.py +++ b/modules/deeplearning/icing.py @@ -201,6 +201,7 @@ class IcingIntensityNN: # binary, two class label = np.where(label != 0, 1, label) + label = label.reshape((label.shape[0], 1)) # TODO: Implement in memory cache # for key in keys: @@ -351,6 +352,7 @@ class IcingIntensityNN: else: flat = self.X_img n_hidden = self.X_img.shape[1] + n_hidden = 100 fac = 1 @@ -364,20 +366,20 @@ class IcingIntensityNN: fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_5') - # activation = tf.nn.softmax - activation = tf.nn.sigmoid # For binary - fc = tf.keras.layers.Dense(n_hidden, activation=activation)(fc) fc = tf.keras.layers.BatchNormalization()(fc) print(fc.shape) - logits = tf.keras.layers.Dense(NumLabels)(fc) + # activation = tf.nn.softmax + activation = tf.nn.sigmoid # For binary + + logits = tf.keras.layers.Dense(NumLabels, activation=activation)(fc) print(logits.shape) self.logits = logits def build_training(self): - self.loss = tf.keras.losses.BinaryCrossentropy() # for two-class only + self.loss = tf.keras.losses.BinaryCrossentropy(from_logits=True) # for two-class only #self.loss = tf.keras.losses.SparseCategoricalCrossentropy() # For multi-class # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)