From 2dfbd8de8e1cc5c883bcb403d120a5d75aa9c913 Mon Sep 17 00:00:00 2001
From: tomrink <rink@ssec.wisc.edu>
Date: Mon, 19 Apr 2021 16:15:28 -0500
Subject: [PATCH] snapshot...

---
 modules/deeplearning/icing.py | 12 +++++++-----
 1 file changed, 7 insertions(+), 5 deletions(-)

diff --git a/modules/deeplearning/icing.py b/modules/deeplearning/icing.py
index d97e2fbc..2d9924db 100644
--- a/modules/deeplearning/icing.py
+++ b/modules/deeplearning/icing.py
@@ -201,6 +201,7 @@ class IcingIntensityNN:
 
         # binary, two class
         label = np.where(label != 0, 1, label)
+        label = label.reshape((label.shape[0], 1))
 
         # TODO: Implement in memory cache
         # for key in keys:
@@ -351,6 +352,7 @@ class IcingIntensityNN:
         else:
             flat = self.X_img
             n_hidden = self.X_img.shape[1]
+            n_hidden = 100
 
         fac = 1
 
@@ -364,20 +366,20 @@ class IcingIntensityNN:
 
         fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_5')
 
-        # activation = tf.nn.softmax
-        activation = tf.nn.sigmoid  # For binary
-
         fc = tf.keras.layers.Dense(n_hidden, activation=activation)(fc)
         fc = tf.keras.layers.BatchNormalization()(fc)
         print(fc.shape)
 
-        logits = tf.keras.layers.Dense(NumLabels)(fc)
+        # activation = tf.nn.softmax
+        activation = tf.nn.sigmoid  # For binary
+
+        logits = tf.keras.layers.Dense(NumLabels, activation=activation)(fc)
         print(logits.shape)
         
         self.logits = logits
 
     def build_training(self):
-        self.loss = tf.keras.losses.BinaryCrossentropy()  # for two-class only
+        self.loss = tf.keras.losses.BinaryCrossentropy(from_logits=True)  # for two-class only
         #self.loss = tf.keras.losses.SparseCategoricalCrossentropy()  # For multi-class
 
         # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
-- 
GitLab