Skip to content
Snippets Groups Projects
Commit e127f113 authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 2a729872
No related branches found
No related tags found
No related merge requests found
...@@ -60,56 +60,48 @@ data_param = data_params[data_idx] ...@@ -60,56 +60,48 @@ data_param = data_params[data_idx]
label_param = label_params[label_idx] label_param = label_params[label_idx]
def build_conv2d_block(conv, num_filters, activation, block_name, padding='SAME'): def build_conv2d_block(conv, num_filters, block_name, activation=tf.nn.leaky_relu, padding='SAME'):
with tf.name_scope(block_name):
skip = conv
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=5, strides=1, padding=padding, activation=activation)(conv)
conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
conv = tf.keras.layers.BatchNormalization()(conv)
print(conv.shape)
with tf.name_scope(block_name):
skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv)
skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip) skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip)
skip = tf.keras.layers.MaxPool2D(padding=padding)(skip)
skip = tf.keras.layers.BatchNormalization()(skip)
conv = conv + skip conv = conv + skip
conv = tf.keras.layers.LeakyReLU()(conv)
print(conv.shape) print(conv.shape)
return conv return conv
def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5, # def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5,
do_drop_out=True, do_batch_norm=True): # do_drop_out=True, do_batch_norm=True):
#
with tf.name_scope(block_name): # with tf.name_scope(block_name):
skip = input_layer # skip = input_layer
if do_drop_out: # if do_drop_out:
input_layer = tf.keras.layers.Dropout(drop_rate)(input_layer) # input_layer = tf.keras.layers.Dropout(drop_rate)(input_layer)
if do_batch_norm: # if do_batch_norm:
input_layer = tf.keras.layers.BatchNormalization()(input_layer) # input_layer = tf.keras.layers.BatchNormalization()(input_layer)
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer) # conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer)
print(conv.shape) # print(conv.shape)
#
# if do_drop_out: # # if do_drop_out:
# conv = tf.keras.layers.Dropout(drop_rate)(conv) # # conv = tf.keras.layers.Dropout(drop_rate)(conv)
# if do_batch_norm: # # if do_batch_norm:
# conv = tf.keras.layers.BatchNormalization()(conv) # # conv = tf.keras.layers.BatchNormalization()(conv)
# conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) # # conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv)
# print(conv.shape) # # print(conv.shape)
#
if do_drop_out: # if do_drop_out:
conv = tf.keras.layers.Dropout(drop_rate)(conv) # conv = tf.keras.layers.Dropout(drop_rate)(conv)
if do_batch_norm: # if do_batch_norm:
conv = tf.keras.layers.BatchNormalization()(conv) # conv = tf.keras.layers.BatchNormalization()(conv)
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=None)(conv) # conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=None)(conv)
#
conv = conv + skip # conv = conv + skip
conv = tf.keras.layers.LeakyReLU()(conv) # conv = tf.keras.layers.LeakyReLU()(conv)
print(conv.shape) # print(conv.shape)
#
return conv # return conv
class ESPCN: class ESPCN:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment