From 2010eabc6b76c755688794e5243bb1d3561d4a60 Mon Sep 17 00:00:00 2001 From: tomrink <rink@ssec.wisc.edu> Date: Mon, 14 Mar 2022 20:52:42 -0500 Subject: [PATCH] snapshot... --- modules/deeplearning/icing_fcn.py | 42 ++++++++++++++++++++++++++----- 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/modules/deeplearning/icing_fcn.py b/modules/deeplearning/icing_fcn.py index 5f0ba197..a6d9f862 100644 --- a/modules/deeplearning/icing_fcn.py +++ b/modules/deeplearning/icing_fcn.py @@ -87,12 +87,12 @@ def build_residual_block_1x1(input_layer, num_filters, activation, block_name, p conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer) print(conv.shape) - if do_drop_out: - conv = tf.keras.layers.Dropout(drop_rate)(conv) - if do_batch_norm: - conv = tf.keras.layers.BatchNormalization()(conv) - conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) - print(conv.shape) + # if do_drop_out: + # conv = tf.keras.layers.Dropout(drop_rate)(conv) + # if do_batch_norm: + # conv = tf.keras.layers.BatchNormalization()(conv) + # conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) + # print(conv.shape) if do_drop_out: conv = tf.keras.layers.Dropout(drop_rate)(conv) @@ -582,25 +582,53 @@ class IcingIntensityFCN: conv = conv + skip conv = tf.keras.layers.LeakyReLU()(conv) print(conv.shape) + # ----------------------------------------------------------------------------------------------------------- + skip = conv num_filters *= 2 conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.BatchNormalization()(conv) print(conv.shape) + skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip) + skip = tf.keras.layers.MaxPool2D(padding=padding)(skip) + skip = tf.keras.layers.BatchNormalization()(skip) + + conv = conv + skip + conv = tf.keras.layers.LeakyReLU()(conv) + print(conv.shape) + # ---------------------------------------------------------------------------------------------------------- + + skip = conv num_filters *= 2 conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.BatchNormalization()(conv) print(conv.shape) + skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip) + skip = tf.keras.layers.MaxPool2D(padding=padding)(skip) + skip = tf.keras.layers.BatchNormalization()(skip) + + conv = conv + skip + conv = tf.keras.layers.LeakyReLU()(conv) + # ----------------------------------------------------------------------------------------------------------- + + skip = conv num_filters *= 2 conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.BatchNormalization()(conv) print(conv.shape) + skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip) + skip = tf.keras.layers.MaxPool2D(padding=padding)(skip) + skip = tf.keras.layers.BatchNormalization()(skip) + + conv = conv + skip + conv = tf.keras.layers.LeakyReLU()(conv) + return conv def build_fcl(self, input_layer): @@ -619,6 +647,8 @@ class IcingIntensityFCN: conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_2', padding=padding) + conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_3', padding=padding) + print(conv.shape) if NumClasses == 2: -- GitLab