From eaee2230fc34258babe4b7024902148709d97cc1 Mon Sep 17 00:00:00 2001
From: tomrink <rink@ssec.wisc.edu>
Date: Tue, 4 May 2021 09:22:20 -0500
Subject: [PATCH] snapshot...

---
 modules/deeplearning/icing.py | 51 -----------------------------------
 1 file changed, 51 deletions(-)

diff --git a/modules/deeplearning/icing.py b/modules/deeplearning/icing.py
index a80d58b2..f04c4321 100644
--- a/modules/deeplearning/icing.py
+++ b/modules/deeplearning/icing.py
@@ -207,12 +207,6 @@ class IcingIntensityNN:
         label = np.where(label == -1, 0, label)
 
         # binary, two class
-        # label = np.where(label != 0, 1, label)
-        # label = label.reshape((label.shape[0], 1))
-
-        keep = (label == 0) | (label == 3) | (label == 4) | (label == 5) | (label == 6)
-        data = data[keep,]
-        label = label[keep]
         label = np.where(label != 0, 1, label)
         label = label.reshape((label.shape[0], 1))
 
@@ -295,51 +289,6 @@ class IcingIntensityNN:
 
         return flat
 
-    def build_cnn(self):
-        print('build_cnn')
-        # padding = "VALID"
-        padding = "SAME"
-
-        # activation = tf.nn.relu
-        # activation = tf.nn.elu
-        activation = tf.nn.leaky_relu
-        momentum = 0.99
-
-        num_filters = 8
-
-        conv = tf.keras.layers.Conv2D(num_filters, 5, strides=[1, 1], padding=padding, activation=activation)(self.inputs[0])
-        conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
-        conv = tf.keras.layers.BatchNormalization()(conv)
-        print(conv.shape)
-
-        num_filters *= 2
-        conv = tf.keras.layers.Conv2D(num_filters, 3, strides=[1, 1], padding=padding, activation=activation)(conv)
-        conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
-        conv = tf.keras.layers.BatchNormalization()(conv)
-        print(conv.shape)
-
-        num_filters *= 2
-        conv = tf.keras.layers.Conv2D(num_filters, 3, strides=[1, 1], padding=padding, activation=activation)(conv)
-        conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
-        conv = tf.keras.layers.BatchNormalization()(conv)
-        print(conv.shape)
-
-        num_filters *= 2
-        conv = tf.keras.layers.Conv2D(num_filters, 3, strides=[1, 1], padding=padding, activation=activation)(conv)
-        conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
-        conv = tf.keras.layers.BatchNormalization()(conv)
-        print(conv.shape)
-
-        num_filters *= 2
-        conv = tf.keras.layers.Conv2D(num_filters, 3, strides=[1, 1], padding=padding, activation=activation)(conv)
-        conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
-        conv = tf.keras.layers.BatchNormalization()(conv)
-        print(conv.shape)
-
-        flat = tf.keras.layers.Flatten()(conv)
-
-        return flat
-
     def build_dnn(self, input_layer=None):
         print('build fully connected layer')
         drop_rate = 0.5
-- 
GitLab