Skip to content
Snippets Groups Projects
Commit 525fd1ef authored by tomrink's avatar tomrink
Browse files

snapshot...

parent f4b5cc59
Branches
No related tags found
No related merge requests found
...@@ -183,9 +183,9 @@ def get_label_data(grd): ...@@ -183,9 +183,9 @@ def get_label_data(grd):
blen, ylen, xlen = grd.shape blen, ylen, xlen = grd.shape
grd = grd.flatten() grd = grd.flatten()
grd = np.where(np.isnan(grd), 0, grd) grd = np.where(np.isnan(grd), 0, grd)
cat_0 = np.logical_and(grd >= 0.0, grd < 0.3) cat_0 = np.logical_and(grd >= 0.0, grd < 0.2)
cat_1 = np.logical_and(grd >= 0.3, grd < 0.7) cat_1 = np.logical_and(grd >= 0.2, grd < 0.8)
cat_2 = np.logical_and(grd >= 0.7, grd <= 1.0) cat_2 = np.logical_and(grd >= 0.8, grd <= 1.0)
grd[cat_0] = 0 grd[cat_0] = 0
grd[cat_1] = 1 grd[cat_1] = 1
...@@ -526,10 +526,10 @@ class SRCNN: ...@@ -526,10 +526,10 @@ class SRCNN:
# self.loss = tf.keras.losses.MeanAbsoluteError() # Regression # self.loss = tf.keras.losses.MeanAbsoluteError() # Regression
# decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps) # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
initial_learning_rate = 0.005 initial_learning_rate = 0.006
decay_rate = 0.95 decay_rate = 0.95
steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch
decay_steps = int(steps_per_epoch) decay_steps = int(steps_per_epoch) * 4
print('initial rate, decay rate, steps/epoch, decay steps: ', initial_learning_rate, decay_rate, steps_per_epoch, decay_steps) print('initial rate, decay rate, steps/epoch, decay steps: ', initial_learning_rate, decay_rate, steps_per_epoch, decay_steps)
self.learningRateSchedule = tf.keras.optimizers.schedules.ExponentialDecay(initial_learning_rate, decay_steps, decay_rate) self.learningRateSchedule = tf.keras.optimizers.schedules.ExponentialDecay(initial_learning_rate, decay_steps, decay_rate)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment