diff --git a/modules/deeplearning/icing_cnn.py b/modules/deeplearning/icing_cnn.py index cd8b863489641b04464187c8c5930270e6b77caf..e9cbd3a8501a2a936c2c379cb399c928c4e11d24 100644 --- a/modules/deeplearning/icing_cnn.py +++ b/modules/deeplearning/icing_cnn.py @@ -638,11 +638,10 @@ class IcingIntensityNN: self.loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False) # For multi-class # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps) - initial_learning_rate = 0.002 + initial_learning_rate = 0.006 decay_rate = 0.95 steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch - # decay_steps = int(steps_per_epoch / 2) - decay_steps = 8 * steps_per_epoch + decay_steps = int(steps_per_epoch / 2) print('initial rate, decay rate, steps/epoch, decay steps: ', initial_learning_rate, decay_rate, steps_per_epoch, decay_steps) self.learningRateSchedule = tf.keras.optimizers.schedules.ExponentialDecay(initial_learning_rate, decay_steps, decay_rate)