Skip to content
Snippets Groups Projects
Commit 2010eabc authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 30f0fdc8
No related branches found
No related tags found
No related merge requests found
...@@ -87,12 +87,12 @@ def build_residual_block_1x1(input_layer, num_filters, activation, block_name, p ...@@ -87,12 +87,12 @@ def build_residual_block_1x1(input_layer, num_filters, activation, block_name, p
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer) conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer)
print(conv.shape) print(conv.shape)
if do_drop_out: # if do_drop_out:
conv = tf.keras.layers.Dropout(drop_rate)(conv) # conv = tf.keras.layers.Dropout(drop_rate)(conv)
if do_batch_norm: # if do_batch_norm:
conv = tf.keras.layers.BatchNormalization()(conv) # conv = tf.keras.layers.BatchNormalization()(conv)
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) # conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv)
print(conv.shape) # print(conv.shape)
if do_drop_out: if do_drop_out:
conv = tf.keras.layers.Dropout(drop_rate)(conv) conv = tf.keras.layers.Dropout(drop_rate)(conv)
...@@ -582,25 +582,53 @@ class IcingIntensityFCN: ...@@ -582,25 +582,53 @@ class IcingIntensityFCN:
conv = conv + skip conv = conv + skip
conv = tf.keras.layers.LeakyReLU()(conv) conv = tf.keras.layers.LeakyReLU()(conv)
print(conv.shape) print(conv.shape)
# -----------------------------------------------------------------------------------------------------------
skip = conv
num_filters *= 2 num_filters *= 2
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv)
conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
conv = tf.keras.layers.BatchNormalization()(conv) conv = tf.keras.layers.BatchNormalization()(conv)
print(conv.shape) print(conv.shape)
skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip)
skip = tf.keras.layers.MaxPool2D(padding=padding)(skip)
skip = tf.keras.layers.BatchNormalization()(skip)
conv = conv + skip
conv = tf.keras.layers.LeakyReLU()(conv)
print(conv.shape)
# ----------------------------------------------------------------------------------------------------------
skip = conv
num_filters *= 2 num_filters *= 2
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv)
conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
conv = tf.keras.layers.BatchNormalization()(conv) conv = tf.keras.layers.BatchNormalization()(conv)
print(conv.shape) print(conv.shape)
skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip)
skip = tf.keras.layers.MaxPool2D(padding=padding)(skip)
skip = tf.keras.layers.BatchNormalization()(skip)
conv = conv + skip
conv = tf.keras.layers.LeakyReLU()(conv)
# -----------------------------------------------------------------------------------------------------------
skip = conv
num_filters *= 2 num_filters *= 2
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv)
conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
conv = tf.keras.layers.BatchNormalization()(conv) conv = tf.keras.layers.BatchNormalization()(conv)
print(conv.shape) print(conv.shape)
skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip)
skip = tf.keras.layers.MaxPool2D(padding=padding)(skip)
skip = tf.keras.layers.BatchNormalization()(skip)
conv = conv + skip
conv = tf.keras.layers.LeakyReLU()(conv)
return conv return conv
def build_fcl(self, input_layer): def build_fcl(self, input_layer):
...@@ -619,6 +647,8 @@ class IcingIntensityFCN: ...@@ -619,6 +647,8 @@ class IcingIntensityFCN:
conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_2', padding=padding) conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_2', padding=padding)
conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_3', padding=padding)
print(conv.shape) print(conv.shape)
if NumClasses == 2: if NumClasses == 2:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment