diff --git a/modules/deeplearning/srcnn_l1b_l2.py b/modules/deeplearning/srcnn_l1b_l2.py
index 2dcb906ddbd5f017bccbbf1cbaae58a8dba20371..41289cae8c8084418e888796439edd23b26a2721 100644
--- a/modules/deeplearning/srcnn_l1b_l2.py
+++ b/modules/deeplearning/srcnn_l1b_l2.py
@@ -69,21 +69,21 @@ print('data_params_full: ', data_params_full)
 print('label_param: ', label_param)
 
 KERNEL_SIZE = 3  # target size: (128, 128)
-N = 1
+N_X = N_Y = 1
 
 if KERNEL_SIZE == 3:
-    slc_x = slice(2, N*128 + 4)
-    slc_y = slice(2, N*128 + 4)
-    slc_x_2 = slice(1, N*128 + 6, 2)
-    slc_y_2 = slice(1, N*128 + 6, 2)
-    x_2 = np.arange(int((N*128)/2) + 3)
-    y_2 = np.arange(int((N*128)/2) + 3)
-    t = np.arange(0, int((N*128)/2) + 3, 0.5)
-    s = np.arange(0, int((N*128)/2) + 3, 0.5)
-    x_k = slice(1, N*128 + 3)
-    y_k = slice(1, N*128 + 3)
-    x_128 = slice(3, N*128 + 3)
-    y_128 = slice(3, N*128 + 3)
+    slc_x = slice(2, N_X*128 + 4)
+    slc_y = slice(2, N_Y*128 + 4)
+    slc_x_2 = slice(1, N_X*128 + 6, 2)
+    slc_y_2 = slice(1, N_Y*128 + 6, 2)
+    x_2 = np.arange(int((N_X*128)/2) + 3)
+    y_2 = np.arange(int((N_Y*128)/2) + 3)
+    t = np.arange(0, int((N_X*128)/2) + 3, 0.5)
+    s = np.arange(0, int((N_Y*128)/2) + 3, 0.5)
+    x_k = slice(1, N_X*128 + 3)
+    y_k = slice(1, N_Y*128 + 3)
+    x_128 = slice(3, N_X*128 + 3)
+    y_128 = slice(3, N_Y*128 + 3)
 elif KERNEL_SIZE == 5:
     slc_x = slice(3, 135)
     slc_y = slice(3, 135)
@@ -718,47 +718,42 @@ def run_restore_static(directory, ckpt_dir, out_file=None):
 
 
 def run_evaluate_static(in_file, out_file, ckpt_dir):
-    N = 10
-
-    slc_x = slice(2, N*128 + 4)
-    slc_y = slice(2, N*128 + 4)
-    slc_x_2 = slice(1, N*128 + 6, 2)
-    slc_y_2 = slice(1, N*128 + 6, 2)
-    x_2 = np.arange(int((N*128)/2) + 3)
-    y_2 = np.arange(int((N*128)/2) + 3)
-    t = np.arange(0, int((N*128)/2) + 3, 0.5)
-    s = np.arange(0, int((N*128)/2) + 3, 0.5)
-    x_k = slice(1, N*128 + 3)
-    y_k = slice(1, N*128 + 3)
-    x_128 = slice(3, N*128 + 3)
-    y_128 = slice(3, N*128 + 3)
-
-    sub_y, sub_x = (N * 128) + 10, (N * 128) + 10
+    N_X = N_Y = 10
+
+    slc_x = slice(2, N_X*128 + 4)
+    slc_y = slice(2, N_Y*128 + 4)
+    slc_x_2 = slice(1, N_X*128 + 6, 2)
+    slc_y_2 = slice(1, N_Y*128 + 6, 2)
+    x_2 = np.arange(int((N_X*128)/2) + 3)
+    y_2 = np.arange(int((N_Y*128)/2) + 3)
+    t = np.arange(0, int((N_X*128)/2) + 3, 0.5)
+    s = np.arange(0, int((N_Y*128)/2) + 3, 0.5)
+    x_k = slice(1, N_X*128 + 3)
+    y_k = slice(1, N_Y*128 + 3)
+    x_128 = slice(3, N_X*128 + 3)
+    y_128 = slice(3, N_Y*128 + 3)
+
+    sub_y, sub_x = (N_Y * 128) + 10, (N_X * 128) + 10
     y_0, x_0, = 3232 - int(sub_y/2), 3200 - int(sub_x/2)
 
     h5f = h5py.File(in_file, 'r')
 
     grd_a = get_grid_values_all(h5f, 'temp_11_0um_nom')
     grd_a = grd_a[y_0:y_0+sub_y, x_0:x_0+sub_x]
-    grd_a = grd_a.copy()
     grd_a = np.where(np.isnan(grd_a), 0, grd_a)
     hr_grd_a = grd_a.copy()
-    hr_grd_a = hr_grd_a[y_128, x_128]
-    # Full res:
-    # grd_a = grd_a[slc_y, slc_x]
-    # Half res:
-    grd_a = grd_a[slc_y_2, slc_x_2]
-    grd_a = resample_2d_linear_one(x_2, y_2, grd_a, t, s)
-    grd_a = grd_a[y_k, x_k]
+    grd_a = upsample(grd_a)
     grd_a = normalize(grd_a, 'temp_11_0um_nom', mean_std_dct)
+    hr_grd_a = hr_grd_a[y_128, x_128]
+
     # ------------------------------------------------------
     grd_b = get_grid_values_all(h5f, 'refl_0_65um_nom')
     grd_b = grd_b[y_0:y_0+sub_y, x_0:x_0+sub_x]
-    grd_b = grd_b.copy()
     grd_b = np.where(np.isnan(grd_b), 0, grd_b)
     hr_grd_b = grd_b.copy()
     hr_grd_b = hr_grd_b[y_128, x_128]
-    grd_b = grd_b[slc_y, slc_x]
+    # Full res:
+    grd_b = grd_b[:, slc_y, slc_x]
     grd_b = normalize(grd_b, 'refl_0_65um_nom', mean_std_dct)
 
     grd_c = get_grid_values_all(h5f, label_param)
@@ -766,13 +761,9 @@ def run_evaluate_static(in_file, out_file, ckpt_dir):
     hr_grd_c = grd_c.copy()
     hr_grd_c = np.where(np.isnan(hr_grd_c), 0, grd_c)
     hr_grd_c = hr_grd_c[y_128, x_128]
-    # hr_grd_c = smooth_2d_single(hr_grd_c, sigma=1.0)
+
     grd_c = np.where(np.isnan(grd_c), 0, grd_c)
-    grd_c = grd_c.copy()
-    # grd_c = smooth_2d_single(grd_c, sigma=1.0)
-    grd_c = grd_c[slc_y_2, slc_x_2]
-    grd_c = resample_2d_linear_one(x_2, y_2, grd_c, t, s)
-    grd_c = grd_c[y_k, x_k]
+    grd_c = upsample(grd_c)
     if label_param != 'cloud_probability':
         grd_c = normalize(grd_c, label_param, mean_std_dct)