Skip to content
Snippets Groups Projects
Commit 236521c5 authored by tomrink's avatar tomrink
Browse files

snapshot..

parent 504b713b
No related branches found
No related tags found
No related merge requests found
......@@ -20,8 +20,8 @@ if NumClasses == 2:
else:
NumLogits = NumClasses
BATCH_SIZE = 128
NUM_EPOCHS = 50
BATCH_SIZE = 64
NUM_EPOCHS = 80
TRACK_MOVING_AVERAGE = False
EARLY_STOP = True
......@@ -317,9 +317,17 @@ class IcingIntensityFCN:
data_alt_lr = np.copy(data_alt)
label_lr = np.copy(label)
data = np.concatenate([data, data_ud, data_lr])
data_alt = np.concatenate([data_alt, data_alt_ud, data_alt_lr])
label = np.concatenate([label, label_ud, label_lr])
data_r1 = np.rot90(data, k=1)
data_alt_r1 = np.copy(data_alt)
label_r1 = np.copy(label)
data_r2 = np.rot90(data, k=1)
data_alt_r2 = np.copy(data_alt)
label_r2 = np.copy(label)
data = np.concatenate([data, data_ud, data_lr, data_r1, data_r2])
data_alt = np.concatenate([data_alt, data_alt_ud, data_alt_lr, data_alt_r1, data_alt_r2])
label = np.concatenate([label, label_ud, label_lr, label_r1, label_r2])
return data, data_alt, label
......@@ -721,7 +729,7 @@ class IcingIntensityFCN:
self.loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False) # For multi-class
# decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
initial_learning_rate = 0.002
initial_learning_rate = 0.0006
decay_rate = 0.95
steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch
decay_steps = int(steps_per_epoch)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment