diff --git a/modules/deeplearning/srcnn_cld_frac.py b/modules/deeplearning/srcnn_cld_frac.py index 39d509728732e529eb78e946409e6ff46013bd6f..f136b92efd6b88f07d8275123713d24c36a26e8e 100644 --- a/modules/deeplearning/srcnn_cld_frac.py +++ b/modules/deeplearning/srcnn_cld_frac.py @@ -183,9 +183,9 @@ def get_label_data(grd): blen, ylen, xlen = grd.shape grd = grd.flatten() grd = np.where(np.isnan(grd), 0, grd) - cat_0 = np.logical_and(grd >= 0.0, grd < 0.3) - cat_1 = np.logical_and(grd >= 0.3, grd < 0.7) - cat_2 = np.logical_and(grd >= 0.7, grd <= 1.0) + cat_0 = np.logical_and(grd >= 0.0, grd < 0.2) + cat_1 = np.logical_and(grd >= 0.2, grd < 0.8) + cat_2 = np.logical_and(grd >= 0.8, grd <= 1.0) grd[cat_0] = 0 grd[cat_1] = 1 @@ -526,10 +526,10 @@ class SRCNN: # self.loss = tf.keras.losses.MeanAbsoluteError() # Regression # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps) - initial_learning_rate = 0.005 + initial_learning_rate = 0.006 decay_rate = 0.95 steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch - decay_steps = int(steps_per_epoch) + decay_steps = int(steps_per_epoch) * 4 print('initial rate, decay rate, steps/epoch, decay steps: ', initial_learning_rate, decay_rate, steps_per_epoch, decay_steps) self.learningRateSchedule = tf.keras.optimizers.schedules.ExponentialDecay(initial_learning_rate, decay_steps, decay_rate)