diff --git a/modules/deeplearning/srcnn_l1b_l2.py b/modules/deeplearning/srcnn_l1b_l2.py index 57788cc967d88184cfe388e77b88102e79e2dbb9..027f7765078f6553c3ae58aa8a2710a0f80e955b 100644 --- a/modules/deeplearning/srcnn_l1b_l2.py +++ b/modules/deeplearning/srcnn_l1b_l2.py @@ -51,6 +51,7 @@ f.close() mean_std_dct.update(mean_std_dct_l1b) mean_std_dct.update(mean_std_dct_l2) +IMG_DEPTH = 1 # label_param = 'cloud_fraction' label_param = 'cld_opd_dcomp' # label_param = 'cloud_probability' @@ -92,6 +93,11 @@ elif KERNEL_SIZE == 5: x_2 = np.arange(68) y_2 = np.arange(68) # ---------------------------------------- +# Exp for ESPCN version +slc_x_2 = slice(0, 132, 2) +slc_y_2 = slice(0, 132, 2) +x_128 = slice(2, 130) +y_128 = slice(2, 130) def build_residual_conv2d_block(conv, num_filters, block_name, activation=tf.nn.relu, padding='SAME', @@ -411,7 +417,6 @@ class SRCNN: conv = conv + conv_b print(conv.shape) - # This is effectively a Dense layer self.logits = tf.keras.layers.Conv2D(1, kernel_size=1, strides=1, padding=padding, name='regression')(conv) @@ -747,8 +752,8 @@ def run_evaluate_static(in_file, out_file, ckpt_dir): grd_c = normalize(grd_c, label_param, mean_std_dct) # data = np.stack([grd_a, grd_b, grd_c], axis=2) - data = np.stack([grd_a, grd_c], axis=2) - # data = np.stack([grd_c], axis=2) + # data = np.stack([grd_a, grd_c], axis=2) + data = np.stack([grd_c], axis=2) data = np.expand_dims(data, axis=0) h5f.close()