diff --git a/modules/deeplearning/espcn.py b/modules/deeplearning/espcn.py
index a7a9395c333364b40d14b9b92e4ee2d1b6eb8687..2756f0bf285e4afbc64d5b33b0bb78318e672737 100644
--- a/modules/deeplearning/espcn.py
+++ b/modules/deeplearning/espcn.py
@@ -60,56 +60,48 @@ data_param = data_params[data_idx]
 label_param = label_params[label_idx]
 
 
-def build_conv2d_block(conv, num_filters, activation, block_name, padding='SAME'):
-    with tf.name_scope(block_name):
-        skip = conv
-
-        conv = tf.keras.layers.Conv2D(num_filters, kernel_size=5, strides=1, padding=padding, activation=activation)(conv)
-        conv = tf.keras.layers.MaxPool2D(padding=padding)(conv)
-        conv = tf.keras.layers.BatchNormalization()(conv)
-        print(conv.shape)
+def build_conv2d_block(conv, num_filters, block_name, activation=tf.nn.leaky_relu, padding='SAME'):
 
+    with tf.name_scope(block_name):
+        skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=activation)(conv)
         skip = tf.keras.layers.Conv2D(num_filters, kernel_size=3, strides=1, padding=padding, activation=None)(skip)
-        skip = tf.keras.layers.MaxPool2D(padding=padding)(skip)
-        skip = tf.keras.layers.BatchNormalization()(skip)
 
         conv = conv + skip
-        conv = tf.keras.layers.LeakyReLU()(conv)
         print(conv.shape)
 
     return conv
 
 
-def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5,
-                             do_drop_out=True, do_batch_norm=True):
-
-    with tf.name_scope(block_name):
-        skip = input_layer
-        if do_drop_out:
-            input_layer = tf.keras.layers.Dropout(drop_rate)(input_layer)
-        if do_batch_norm:
-            input_layer = tf.keras.layers.BatchNormalization()(input_layer)
-        conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer)
-        print(conv.shape)
-
-        # if do_drop_out:
-        #     conv = tf.keras.layers.Dropout(drop_rate)(conv)
-        # if do_batch_norm:
-        #     conv = tf.keras.layers.BatchNormalization()(conv)
-        # conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv)
-        # print(conv.shape)
-
-        if do_drop_out:
-            conv = tf.keras.layers.Dropout(drop_rate)(conv)
-        if do_batch_norm:
-            conv = tf.keras.layers.BatchNormalization()(conv)
-        conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=None)(conv)
-
-        conv = conv + skip
-        conv = tf.keras.layers.LeakyReLU()(conv)
-        print(conv.shape)
-
-    return conv
+# def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5,
+#                              do_drop_out=True, do_batch_norm=True):
+#
+#     with tf.name_scope(block_name):
+#         skip = input_layer
+#         if do_drop_out:
+#             input_layer = tf.keras.layers.Dropout(drop_rate)(input_layer)
+#         if do_batch_norm:
+#             input_layer = tf.keras.layers.BatchNormalization()(input_layer)
+#         conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer)
+#         print(conv.shape)
+#
+#         # if do_drop_out:
+#         #     conv = tf.keras.layers.Dropout(drop_rate)(conv)
+#         # if do_batch_norm:
+#         #     conv = tf.keras.layers.BatchNormalization()(conv)
+#         # conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv)
+#         # print(conv.shape)
+#
+#         if do_drop_out:
+#             conv = tf.keras.layers.Dropout(drop_rate)(conv)
+#         if do_batch_norm:
+#             conv = tf.keras.layers.BatchNormalization()(conv)
+#         conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=None)(conv)
+#
+#         conv = conv + skip
+#         conv = tf.keras.layers.LeakyReLU()(conv)
+#         print(conv.shape)
+#
+#     return conv
 
 
 class ESPCN: