Skip to content
Snippets Groups Projects
Commit 23908bc0 authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 9f130131
No related branches found
No related tags found
No related merge requests found
...@@ -76,11 +76,27 @@ DO_ZERO_OUT = False ...@@ -76,11 +76,27 @@ DO_ZERO_OUT = False
lunar_map = {'cld_reff_dcomp': 'cld_reff_nlcomp', 'cld_opd_dcomp': 'cld_opd_nlcomp', 'iwc_dcomp': None, 'lwc_dcomp': None} lunar_map = {'cld_reff_dcomp': 'cld_reff_nlcomp', 'cld_opd_dcomp': 'cld_opd_nlcomp', 'iwc_dcomp': None, 'lwc_dcomp': None}
# def build_residual_block_conv2d(x_in, num_filters, activation, block_name, padding='SAME', drop_rate=0.5,
# do_drop_out=True, do_batch_norm=True):
# conv = tf.keras.layers.Conv2D(num_filters, kernel_size=5, strides=1, padding=padding, activation=activation)(x_in)
# conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
# conv = tf.keras.layers.BatchNormalization()(conv)
#
# skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip)
# skip = tf.keras.layers.MaxPool2D(padding=padding)(skip)
# skip = tf.keras.layers.BatchNormalization()(skip)
#
# conv = conv + skip
# conv = tf.keras.layers.LeakyReLU()(conv)
# print(conv.shape)
def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5, def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5,
do_drop_out=True, do_batch_norm=True): do_drop_out=True, do_batch_norm=True):
with tf.name_scope(block_name): with tf.name_scope(block_name):
skip = input_layer skip = input_layer
if do_drop_out: if do_drop_out:
input_layer = tf.keras.layers.Dropout(drop_rate)(input_layer) input_layer = tf.keras.layers.Dropout(drop_rate)(input_layer)
if do_batch_norm: if do_batch_norm:
...@@ -88,13 +104,6 @@ def build_residual_block_1x1(input_layer, num_filters, activation, block_name, p ...@@ -88,13 +104,6 @@ def build_residual_block_1x1(input_layer, num_filters, activation, block_name, p
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer) conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer)
print(conv.shape) print(conv.shape)
# if do_drop_out:
# conv = tf.keras.layers.Dropout(drop_rate)(conv)
# if do_batch_norm:
# conv = tf.keras.layers.BatchNormalization()(conv)
# conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv)
# print(conv.shape)
if do_drop_out: if do_drop_out:
conv = tf.keras.layers.Dropout(drop_rate)(conv) conv = tf.keras.layers.Dropout(drop_rate)(conv)
if do_batch_norm: if do_batch_norm:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment