From 98f951766aebc15c481b54c1b5d9888ba0fadcf3 Mon Sep 17 00:00:00 2001 From: tomrink <rink@ssec.wisc.edu> Date: Thu, 24 Feb 2022 12:19:19 -0600 Subject: [PATCH] snapshot... --- modules/deeplearning/icing_fcn.py | 111 +++++++++++++++++++----------- 1 file changed, 71 insertions(+), 40 deletions(-) diff --git a/modules/deeplearning/icing_fcn.py b/modules/deeplearning/icing_fcn.py index c99fe687..7aec99f8 100644 --- a/modules/deeplearning/icing_fcn.py +++ b/modules/deeplearning/icing_fcn.py @@ -75,43 +75,75 @@ zero_out_params = ['cld_reff_dcomp', 'cld_opd_dcomp', 'iwc_dcomp', 'lwc_dcomp'] DO_ZERO_OUT = False -def build_residual_block(input, drop_rate, num_neurons, activation, block_name, doDropout=True, doBatchNorm=True): +# def build_residual_block(input, drop_rate, num_neurons, activation, block_name, doDropout=True, doBatchNorm=True): +# with tf.name_scope(block_name): +# if doDropout: +# fc = tf.keras.layers.Dropout(drop_rate)(input) +# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc) +# else: +# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(input) +# if doBatchNorm: +# fc = tf.keras.layers.BatchNormalization()(fc) +# print(fc.shape) +# fc_skip = fc +# +# if doDropout: +# fc = tf.keras.layers.Dropout(drop_rate)(fc) +# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc) +# if doBatchNorm: +# fc = tf.keras.layers.BatchNormalization()(fc) +# print(fc.shape) +# +# if doDropout: +# fc = tf.keras.layers.Dropout(drop_rate)(fc) +# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc) +# if doBatchNorm: +# fc = tf.keras.layers.BatchNormalization()(fc) +# print(fc.shape) +# +# if doDropout: +# fc = tf.keras.layers.Dropout(drop_rate)(fc) +# fc = tf.keras.layers.Dense(num_neurons, activation=None)(fc) +# if doBatchNorm: +# fc = tf.keras.layers.BatchNormalization()(fc) +# +# fc = fc + fc_skip +# fc = tf.keras.layers.LeakyReLU()(fc) +# print(fc.shape) +# +# return fc + + +def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5, + do_drop_out=True, do_batch_norm=True): + with tf.name_scope(block_name): - if doDropout: - fc = tf.keras.layers.Dropout(drop_rate)(input) - fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc) - else: - fc = tf.keras.layers.Dense(num_neurons, activation=activation)(input) - if doBatchNorm: - fc = tf.keras.layers.BatchNormalization()(fc) - print(fc.shape) - fc_skip = fc - - if doDropout: - fc = tf.keras.layers.Dropout(drop_rate)(fc) - fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc) - if doBatchNorm: - fc = tf.keras.layers.BatchNormalization()(fc) - print(fc.shape) - - if doDropout: - fc = tf.keras.layers.Dropout(drop_rate)(fc) - fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc) - if doBatchNorm: - fc = tf.keras.layers.BatchNormalization()(fc) - print(fc.shape) - - if doDropout: - fc = tf.keras.layers.Dropout(drop_rate)(fc) - fc = tf.keras.layers.Dense(num_neurons, activation=None)(fc) - if doBatchNorm: - fc = tf.keras.layers.BatchNormalization()(fc) - - fc = fc + fc_skip - fc = tf.keras.layers.LeakyReLU()(fc) - print(fc.shape) - - return fc + skip = input_layer + if do_drop_out: + input_layer = tf.keras.layers.Dropout(drop_rate)(input_layer) + if do_batch_norm: + input_layer = tf.keras.layers.BatchNormalization()(input_layer) + conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer) + print(conv.shape) + + if do_drop_out: + conv = tf.keras.layers.Dropout(drop_rate)(conv) + if do_batch_norm: + conv = tf.keras.layers.BatchNormalization()(conv) + conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) + print(conv.shape) + + if do_drop_out: + conv = tf.keras.layers.Dropout(drop_rate)(conv) + if do_batch_norm: + conv = tf.keras.layers.BatchNormalization()(conv) + conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=None)(conv) + + conv = conv + skip + conv = tf.keras.layers.LeakyReLU()(conv) + print(conv.shape) + + return conv class IcingIntensityFCN: @@ -609,21 +641,20 @@ class IcingIntensityFCN: # activation = tf.nn.relu # activation = tf.nn.elu activation = tf.nn.leaky_relu - momentum = 0.99 # padding = "VALID" padding = "SAME" - conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer) + conv = build_residual_block_1x1(input_layer, num_filters, activation, 'Residual_Block_1', padding=padding) - conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) + conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_2', padding=padding) - conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) print(conv.shape) if NumClasses == 2: activation = tf.nn.sigmoid # For binary else: activation = tf.nn.softmax # For multi-class + # Called logits, but these are actually probabilities, see activation logits = tf.keras.layers.Conv2D(1, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) -- GitLab