Skip to content
Snippets Groups Projects
Commit 645337ab authored by tomrink's avatar tomrink
Browse files

snapshot..

parent 6810c1f2
No related branches found
No related tags found
No related merge requests found
...@@ -20,8 +20,8 @@ if NumClasses == 2: ...@@ -20,8 +20,8 @@ if NumClasses == 2:
else: else:
NumLogits = NumClasses NumLogits = NumClasses
BATCH_SIZE = 64 BATCH_SIZE = 128
NUM_EPOCHS = 80 NUM_EPOCHS = 60
TRACK_MOVING_AVERAGE = False TRACK_MOVING_AVERAGE = False
EARLY_STOP = True EARLY_STOP = True
...@@ -583,7 +583,7 @@ class IcingIntensityFCN: ...@@ -583,7 +583,7 @@ class IcingIntensityFCN:
self.get_evaluate_dataset(idxs) self.get_evaluate_dataset(idxs)
def build_cnn(self, do_drop_out=False, do_batch_norm=True, drop_rate=0.5): def build_cnn(self, do_drop_out=True, do_batch_norm=True, drop_rate=0.5):
print('build_cnn') print('build_cnn')
# padding = "VALID" # padding = "VALID"
padding = "SAME" padding = "SAME"
...@@ -731,7 +731,7 @@ class IcingIntensityFCN: ...@@ -731,7 +731,7 @@ class IcingIntensityFCN:
self.loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False) # For multi-class self.loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False) # For multi-class
# decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps) # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
initial_learning_rate = 0.0006 initial_learning_rate = 0.0005
decay_rate = 0.95 decay_rate = 0.95
steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch
decay_steps = int(steps_per_epoch) decay_steps = int(steps_per_epoch)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment