diff --git a/modules/deeplearning/icing_fcn.py b/modules/deeplearning/icing_fcn.py index 5f0ba197764e6c3f6806b8981f7ecaeeedb5985d..a6d9f862a1ecb6462a891f4a6f92c8aca3531384 100644 --- a/modules/deeplearning/icing_fcn.py +++ b/modules/deeplearning/icing_fcn.py @@ -87,12 +87,12 @@ def build_residual_block_1x1(input_layer, num_filters, activation, block_name, p conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer) print(conv.shape) - if do_drop_out: - conv = tf.keras.layers.Dropout(drop_rate)(conv) - if do_batch_norm: - conv = tf.keras.layers.BatchNormalization()(conv) - conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) - print(conv.shape) + # if do_drop_out: + # conv = tf.keras.layers.Dropout(drop_rate)(conv) + # if do_batch_norm: + # conv = tf.keras.layers.BatchNormalization()(conv) + # conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv) + # print(conv.shape) if do_drop_out: conv = tf.keras.layers.Dropout(drop_rate)(conv) @@ -582,25 +582,53 @@ class IcingIntensityFCN: conv = conv + skip conv = tf.keras.layers.LeakyReLU()(conv) print(conv.shape) + # ----------------------------------------------------------------------------------------------------------- + skip = conv num_filters *= 2 conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.BatchNormalization()(conv) print(conv.shape) + skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip) + skip = tf.keras.layers.MaxPool2D(padding=padding)(skip) + skip = tf.keras.layers.BatchNormalization()(skip) + + conv = conv + skip + conv = tf.keras.layers.LeakyReLU()(conv) + print(conv.shape) + # ---------------------------------------------------------------------------------------------------------- + + skip = conv num_filters *= 2 conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.BatchNormalization()(conv) print(conv.shape) + skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip) + skip = tf.keras.layers.MaxPool2D(padding=padding)(skip) + skip = tf.keras.layers.BatchNormalization()(skip) + + conv = conv + skip + conv = tf.keras.layers.LeakyReLU()(conv) + # ----------------------------------------------------------------------------------------------------------- + + skip = conv num_filters *= 2 conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.BatchNormalization()(conv) print(conv.shape) + skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip) + skip = tf.keras.layers.MaxPool2D(padding=padding)(skip) + skip = tf.keras.layers.BatchNormalization()(skip) + + conv = conv + skip + conv = tf.keras.layers.LeakyReLU()(conv) + return conv def build_fcl(self, input_layer): @@ -619,6 +647,8 @@ class IcingIntensityFCN: conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_2', padding=padding) + conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_3', padding=padding) + print(conv.shape) if NumClasses == 2: