Skip to content
Snippets Groups Projects
Commit 8a3066e7 authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 68d2bc81
Branches
No related tags found
No related merge requests found
...@@ -33,7 +33,7 @@ TRIPLET = False ...@@ -33,7 +33,7 @@ TRIPLET = False
CONV3D = False CONV3D = False
NOISE_TRAINING = False NOISE_TRAINING = False
NOISE_STDDEV = 0.01 NOISE_STDDEV = 0.10
img_width = 16 img_width = 16
...@@ -75,45 +75,6 @@ zero_out_params = ['cld_reff_dcomp', 'cld_opd_dcomp', 'iwc_dcomp', 'lwc_dcomp'] ...@@ -75,45 +75,6 @@ zero_out_params = ['cld_reff_dcomp', 'cld_opd_dcomp', 'iwc_dcomp', 'lwc_dcomp']
DO_ZERO_OUT = False DO_ZERO_OUT = False
# def build_residual_block(input, drop_rate, num_neurons, activation, block_name, doDropout=True, doBatchNorm=True):
# with tf.name_scope(block_name):
# if doDropout:
# fc = tf.keras.layers.Dropout(drop_rate)(input)
# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc)
# else:
# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(input)
# if doBatchNorm:
# fc = tf.keras.layers.BatchNormalization()(fc)
# print(fc.shape)
# fc_skip = fc
#
# if doDropout:
# fc = tf.keras.layers.Dropout(drop_rate)(fc)
# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc)
# if doBatchNorm:
# fc = tf.keras.layers.BatchNormalization()(fc)
# print(fc.shape)
#
# if doDropout:
# fc = tf.keras.layers.Dropout(drop_rate)(fc)
# fc = tf.keras.layers.Dense(num_neurons, activation=activation)(fc)
# if doBatchNorm:
# fc = tf.keras.layers.BatchNormalization()(fc)
# print(fc.shape)
#
# if doDropout:
# fc = tf.keras.layers.Dropout(drop_rate)(fc)
# fc = tf.keras.layers.Dense(num_neurons, activation=None)(fc)
# if doBatchNorm:
# fc = tf.keras.layers.BatchNormalization()(fc)
#
# fc = fc + fc_skip
# fc = tf.keras.layers.LeakyReLU()(fc)
# print(fc.shape)
#
# return fc
def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5, def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5,
do_drop_out=True, do_batch_norm=True): do_drop_out=True, do_batch_norm=True):
...@@ -599,16 +560,27 @@ class IcingIntensityFCN: ...@@ -599,16 +560,27 @@ class IcingIntensityFCN:
num_filters = len(self.train_params) * 4 num_filters = len(self.train_params) * 4
if NOISE_TRAINING:
input_2d = tf.keras.layers.GaussianNoise(stddev=NOISE_STDDEV)(self.inputs[0])
else:
input_2d = self.inputs[0] input_2d = self.inputs[0]
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=5, strides=1, padding=padding, activation=None)(input_2d)
print(conv.shape)
skip = conv
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=5, strides=1, padding=padding, activation=activation)(input_2d) if NOISE_TRAINING:
conv = tf.keras.layers.GaussianNoise(stddev=NOISE_STDDEV)(conv)
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=5, strides=1, padding=padding, activation=activation)(conv)
conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
conv = tf.keras.layers.BatchNormalization()(conv) conv = tf.keras.layers.BatchNormalization()(conv)
print(conv.shape) print(conv.shape)
skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip)
skip = tf.keras.layers.MaxPool2D(padding=padding)(skip)
skip = tf.keras.layers.BatchNormalization()(skip)
conv = conv + skip
conv = tf.keras.layers.LeakyReLU()(conv)
print(conv.shape)
num_filters *= 2 num_filters *= 2
conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv) conv = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv)
conv = tf.keras.layers.MaxPool2D(padding=padding)(conv) conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
... ...
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment