From 8d1a3525318ca0ae3a18e504475ba7e178b75796 Mon Sep 17 00:00:00 2001 From: tomrink <rink@ssec.wisc.edu> Date: Tue, 20 Apr 2021 09:35:42 -0500 Subject: [PATCH] snapshot... --- modules/deeplearning/icing.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/modules/deeplearning/icing.py b/modules/deeplearning/icing.py index 928da2e7..24471ddb 100644 --- a/modules/deeplearning/icing.py +++ b/modules/deeplearning/icing.py @@ -13,18 +13,15 @@ from icing.pirep_goes import split_data, normalize LOG_DEVICE_PLACEMENT = False CACHE_DATA_IN_MEM = True -CACHE_GFS = True PROC_BATCH_SIZE = 60 PROC_BATCH_BUFFER_SIZE = 50000 NumLabels = 1 -BATCH_SIZE = 256 +BATCH_SIZE = 512 NUM_EPOCHS = 200 - TRACK_MOVING_AVERAGE = False -DAY_NIGHT = 'ANY' TRIPLET = False CONV3D = False @@ -159,7 +156,7 @@ class IcingIntensityNN: self.inputs.append(self.X_img) #self.inputs.append(self.X_prof) - self.DISK_CACHE = True + self.DISK_CACHE = False if datapath is not None: self.DISK_CACHE = False @@ -380,7 +377,7 @@ class IcingIntensityNN: self.logits = logits def build_training(self): - self.loss = tf.keras.losses.BinaryCrossentropy(from_logits=True) # for two-class only + self.loss = tf.keras.losses.BinaryCrossentropy(from_logits=False) # for two-class only #self.loss = tf.keras.losses.SparseCategoricalCrossentropy() # For multi-class # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps) -- GitLab