Skip to content
Snippets Groups Projects
Commit 98f95176 authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 59faf5e0
No related branches found
No related tags found
No related merge requests found
...@@ -75,43 +75,75 @@ zero_out_params = ['cld_reff_dcomp', 'cld_opd_dcomp', 'iwc_dcomp', 'lwc_dcomp'] ...@@ -75,43 +75,75 @@ zero_out_params = ['cld_reff_dcomp', 'cld_opd_dcomp', 'iwc_dcomp', 'lwc_dcomp']
DO_ZERO_OUT = False DO_ZERO_OUT = False
def build_residual_block(input, drop_rate, num_neurons, activation, block_name, doDropout=True, doBatchNorm=True): # def build_residual_block(input, drop_rate, num_neurons, activation, block_name, doDropout=True, doBatchNorm=True):
# with tf.name_scope(block_name):
# if doDropout:
# fc = tf.keras.layers.Dropout(drop_rate)(input)
# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc)
# else:
# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(input)
# if doBatchNorm:
# fc = tf.keras.layers.BatchNormalization()(fc)
# print(fc.shape)
# fc_skip = fc
#
# if doDropout:
# fc = tf.keras.layers.Dropout(drop_rate)(fc)
# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc)
# if doBatchNorm:
# fc = tf.keras.layers.BatchNormalization()(fc)
# print(fc.shape)
#
# if doDropout:
# fc = tf.keras.layers.Dropout(drop_rate)(fc)
# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc)
# if doBatchNorm:
# fc = tf.keras.layers.BatchNormalization()(fc)
# print(fc.shape)
#
# if doDropout:
# fc = tf.keras.layers.Dropout(drop_rate)(fc)
# fc = tf.keras.layers.Dense(num_neurons, activation=None)(fc)
# if doBatchNorm:
# fc = tf.keras.layers.BatchNormalization()(fc)
#
# fc = fc + fc_skip
# fc = tf.keras.layers.LeakyReLU()(fc)
# print(fc.shape)
#
# return fc
def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5,
do_drop_out=True, do_batch_norm=True):
with tf.name_scope(block_name): with tf.name_scope(block_name):
if doDropout: skip = input_layer
fc = tf.keras.layers.Dropout(drop_rate)(input) if do_drop_out:
fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc) input_layer = tf.keras.layers.Dropout(drop_rate)(input_layer)
else: if do_batch_norm:
fc = tf.keras.layers.Dense(num_neurons, activation=activation)(input) input_layer = tf.keras.layers.BatchNormalization()(input_layer)
if doBatchNorm: conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer)
fc = tf.keras.layers.BatchNormalization()(fc) print(conv.shape)
print(fc.shape)
fc_skip = fc if do_drop_out:
conv = tf.keras.layers.Dropout(drop_rate)(conv)
if doDropout: if do_batch_norm:
fc = tf.keras.layers.Dropout(drop_rate)(fc) conv = tf.keras.layers.BatchNormalization()(conv)
fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc) conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv)
if doBatchNorm: print(conv.shape)
fc = tf.keras.layers.BatchNormalization()(fc)
print(fc.shape) if do_drop_out:
conv = tf.keras.layers.Dropout(drop_rate)(conv)
if doDropout: if do_batch_norm:
fc = tf.keras.layers.Dropout(drop_rate)(fc) conv = tf.keras.layers.BatchNormalization()(conv)
fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc) conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=None)(conv)
if doBatchNorm:
fc = tf.keras.layers.BatchNormalization()(fc) conv = conv + skip
print(fc.shape) conv = tf.keras.layers.LeakyReLU()(conv)
print(conv.shape)
if doDropout:
fc = tf.keras.layers.Dropout(drop_rate)(fc) return conv
fc = tf.keras.layers.Dense(num_neurons, activation=None)(fc)
if doBatchNorm:
fc = tf.keras.layers.BatchNormalization()(fc)
fc = fc + fc_skip
fc = tf.keras.layers.LeakyReLU()(fc)
print(fc.shape)
return fc
class IcingIntensityFCN: class IcingIntensityFCN:
...@@ -609,21 +641,20 @@ class IcingIntensityFCN: ...@@ -609,21 +641,20 @@ class IcingIntensityFCN:
# activation = tf.nn.relu # activation = tf.nn.relu
# activation = tf.nn.elu # activation = tf.nn.elu
activation = tf.nn.leaky_relu activation = tf.nn.leaky_relu
momentum = 0.99
# padding = "VALID" # padding = "VALID"
padding = "SAME" padding = "SAME"
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer) conv = build_residual_block_1x1(input_layer, num_filters, activation, 'Residual_Block_1', padding=padding)
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_2', padding=padding)
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv)
print(conv.shape) print(conv.shape)
if NumClasses == 2: if NumClasses == 2:
activation = tf.nn.sigmoid # For binary activation = tf.nn.sigmoid # For binary
else: else:
activation = tf.nn.softmax # For multi-class activation = tf.nn.softmax # For multi-class
# Called logits, but these are actually probabilities, see activation # Called logits, but these are actually probabilities, see activation
logits = tf.keras.layers.Conv2D(1, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) logits = tf.keras.layers.Conv2D(1, kernel_size=1, strides=1, padding=padding, activation=activation)(conv)
... ...
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment