From 9a9ddb53d4f7928d66a9b403da3bd9175f701bb2 Mon Sep 17 00:00:00 2001 From: tomrink <rink@ssec.wisc.edu> Date: Thu, 20 Oct 2022 13:22:34 -0500 Subject: [PATCH] snapshot... --- modules/deeplearning/icing_fcn.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/deeplearning/icing_fcn.py b/modules/deeplearning/icing_fcn.py index 07aa8adf..2cb96427 100644 --- a/modules/deeplearning/icing_fcn.py +++ b/modules/deeplearning/icing_fcn.py @@ -30,7 +30,7 @@ TRIPLET = False CONV3D = False NOISE_TRAINING = True -NOISE_STDDEV = 0.10 +NOISE_STDDEV = 0.01 DO_AUGMENT = True img_width = 16 @@ -566,7 +566,7 @@ class IcingIntensityFCN: # activation = tf.nn.elu activation = tf.nn.leaky_relu - num_filters = len(self.train_params) * 10 + num_filters = len(self.train_params) * 16 input_2d = self.inputs[0] conv = tf.keras.layers.Conv2D(num_filters, kernel_size=5, strides=1, padding=padding, activation=None)(input_2d) @@ -651,11 +651,11 @@ class IcingIntensityFCN: conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_2', padding=padding) - conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_3', padding=padding) + # conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_3', padding=padding) - conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_4', padding=padding) + # conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_4', padding=padding) - conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_5', padding=padding) + # conv = build_residual_block_1x1(conv, num_filters, activation, 'Residual_Block_5', padding=padding) print(conv.shape) @@ -681,7 +681,7 @@ class IcingIntensityFCN: initial_learning_rate = 0.002 decay_rate = 0.95 steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch - decay_steps = int(steps_per_epoch / 2) + decay_steps = int(steps_per_epoch) print('initial rate, decay rate, steps/epoch, decay steps: ', initial_learning_rate, decay_rate, steps_per_epoch, decay_steps) self.learningRateSchedule = tf.keras.optimizers.schedules.ExponentialDecay(initial_learning_rate, decay_steps, decay_rate) -- GitLab