From 645337abfe8f6c8e8e4be945383d8dab97b10bf8 Mon Sep 17 00:00:00 2001
From: tomrink <rink@ssec.wisc.edu>
Date: Wed, 7 Dec 2022 13:00:19 -0600
Subject: [PATCH] snapshot..

---
 modules/deeplearning/icing_fcn.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/modules/deeplearning/icing_fcn.py b/modules/deeplearning/icing_fcn.py
index 21204c88..00900f84 100644
--- a/modules/deeplearning/icing_fcn.py
+++ b/modules/deeplearning/icing_fcn.py
@@ -20,8 +20,8 @@ if NumClasses == 2:
 else:
     NumLogits = NumClasses
 
-BATCH_SIZE = 64
-NUM_EPOCHS = 80
+BATCH_SIZE = 128
+NUM_EPOCHS = 60
 
 TRACK_MOVING_AVERAGE = False
 EARLY_STOP = True
@@ -583,7 +583,7 @@ class IcingIntensityFCN:
 
         self.get_evaluate_dataset(idxs)
 
-    def build_cnn(self, do_drop_out=False, do_batch_norm=True, drop_rate=0.5):
+    def build_cnn(self, do_drop_out=True, do_batch_norm=True, drop_rate=0.5):
         print('build_cnn')
         # padding = "VALID"
         padding = "SAME"
@@ -731,7 +731,7 @@ class IcingIntensityFCN:
             self.loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False)  # For multi-class
 
         # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
-        initial_learning_rate = 0.0006
+        initial_learning_rate = 0.0005
         decay_rate = 0.95
         steps_per_epoch = int(self.num_data_samples/BATCH_SIZE)  # one epoch
         decay_steps = int(steps_per_epoch)
-- 
GitLab