From 525fd1ef403d9717264f5d98472b4faf055ae8bc Mon Sep 17 00:00:00 2001
From: tomrink <rink@ssec.wisc.edu>
Date: Tue, 7 Feb 2023 13:20:00 -0600
Subject: [PATCH] snapshot...

---
 modules/deeplearning/srcnn_cld_frac.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/modules/deeplearning/srcnn_cld_frac.py b/modules/deeplearning/srcnn_cld_frac.py
index 39d50972..f136b92e 100644
--- a/modules/deeplearning/srcnn_cld_frac.py
+++ b/modules/deeplearning/srcnn_cld_frac.py
@@ -183,9 +183,9 @@ def get_label_data(grd):
     blen, ylen, xlen = grd.shape
     grd = grd.flatten()
     grd = np.where(np.isnan(grd), 0, grd)
-    cat_0 = np.logical_and(grd >= 0.0, grd < 0.3)
-    cat_1 = np.logical_and(grd >= 0.3, grd < 0.7)
-    cat_2 = np.logical_and(grd >= 0.7, grd <= 1.0)
+    cat_0 = np.logical_and(grd >= 0.0, grd < 0.2)
+    cat_1 = np.logical_and(grd >= 0.2, grd < 0.8)
+    cat_2 = np.logical_and(grd >= 0.8, grd <= 1.0)
 
     grd[cat_0] = 0
     grd[cat_1] = 1
@@ -526,10 +526,10 @@ class SRCNN:
         # self.loss = tf.keras.losses.MeanAbsoluteError()  # Regression
 
         # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
-        initial_learning_rate = 0.005
+        initial_learning_rate = 0.006
         decay_rate = 0.95
         steps_per_epoch = int(self.num_data_samples/BATCH_SIZE)  # one epoch
-        decay_steps = int(steps_per_epoch)
+        decay_steps = int(steps_per_epoch) * 4
         print('initial rate, decay rate, steps/epoch, decay steps: ', initial_learning_rate, decay_rate, steps_per_epoch, decay_steps)
 
         self.learningRateSchedule = tf.keras.optimizers.schedules.ExponentialDecay(initial_learning_rate, decay_steps, decay_rate)
-- 
GitLab