Skip to content
Snippets Groups Projects
Commit 2dfbd8de authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 100113ed
No related branches found
No related tags found
No related merge requests found
......@@ -201,6 +201,7 @@ class IcingIntensityNN:
# binary, two class
label = np.where(label != 0, 1, label)
label = label.reshape((label.shape[0], 1))
# TODO: Implement in memory cache
# for key in keys:
......@@ -351,6 +352,7 @@ class IcingIntensityNN:
else:
flat = self.X_img
n_hidden = self.X_img.shape[1]
n_hidden = 100
fac = 1
......@@ -364,20 +366,20 @@ class IcingIntensityNN:
fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_5')
# activation = tf.nn.softmax
activation = tf.nn.sigmoid # For binary
fc = tf.keras.layers.Dense(n_hidden, activation=activation)(fc)
fc = tf.keras.layers.BatchNormalization()(fc)
print(fc.shape)
logits = tf.keras.layers.Dense(NumLabels)(fc)
# activation = tf.nn.softmax
activation = tf.nn.sigmoid # For binary
logits = tf.keras.layers.Dense(NumLabels, activation=activation)(fc)
print(logits.shape)
self.logits = logits
def build_training(self):
self.loss = tf.keras.losses.BinaryCrossentropy() # for two-class only
self.loss = tf.keras.losses.BinaryCrossentropy(from_logits=True) # for two-class only
#self.loss = tf.keras.losses.SparseCategoricalCrossentropy() # For multi-class
# decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment