diff --git a/modules/deeplearning/srcnn_l1b_l2.py b/modules/deeplearning/srcnn_l1b_l2.py
index e855896833b3eb500005d1bf9e7f09042df1d4c0..2fb73c2b148978ba7c2136cba3a9d9fa3279df5e 100644
--- a/modules/deeplearning/srcnn_l1b_l2.py
+++ b/modules/deeplearning/srcnn_l1b_l2.py
@@ -690,11 +690,11 @@ class SRCNN:
 
         self.reset_test_metrics()
 
-        print(data.shape, data.min(), data.max())
         pred = self.model([data], training=False)
         self.test_probs = pred
         pred = pred.numpy()
-        print('**: ', pred.shape, pred.min(), pred.max())
+        if label_param != 'cloud_probability':
+            pred = denormalize(pred, label_param, mean_std_dct)
 
         return pred
 
@@ -720,6 +720,7 @@ class SRCNN:
         return self.restore(ckpt_dir)
 
     def run_evaluate(self, data, ckpt_dir):
+        data = tf.convert_to_tensor(data, dtype=tf.float32)
         self.num_data_samples = 80000
         self.build_model()
         self.build_training()
@@ -773,32 +774,31 @@ def run_evaluate_static(in_file, out_file, ckpt_dir):
     # grd_b = normalize(grd_b, 'refl_0_65um_nom', mean_std_dct)
 
     grd_c = get_grid_values_all(h5f, label_param)
-    # grd_c = gaussian_filter(grd_c, sigma=1.0)
     grd_c = grd_c[y_0:y_0+sub_y, x_0:x_0+sub_x]
-    grd_c = grd_c.copy()
-    grd_c = np.where(np.isnan(grd_c), 0, grd_c)
+
     hr_grd_c = grd_c.copy()
     hr_grd_c = hr_grd_c[y_128, x_128]
+
+    grd_c = grd_c.copy()
+    grd_c = np.where(np.isnan(grd_c), 0, grd_c)
     grd_c = grd_c[slc_y_2, slc_x_2]
     grd_c = resample_2d_linear_one(x_2, y_2, grd_c, t, s)
     grd_c = grd_c[y_k, x_k]
+
     if label_param != 'cloud_probability':
         grd_c = normalize(grd_c, label_param, mean_std_dct)
 
     # data = np.stack([grd_a, grd_b, grd_c], axis=2)
-    #data = np.stack([grd_a, grd_c], axis=2)
+    # data = np.stack([grd_a, grd_c], axis=2)
     data = np.stack([grd_c], axis=2)
     data = np.expand_dims(data, axis=0)
-    data = tf.convert_to_tensor(data, dtype=tf.float32)
 
     nn = SRCNN()
     out_sr = nn.run_evaluate(data, ckpt_dir)
-    if label_param != 'cloud_probability':
-        out_sr = denormalize(out_sr, label_param, mean_std_dct)
     if out_file is not None:
         np.save(out_file, [out_sr, hr_grd_c])
     else:
-        return out_sr, None, None
+        return out_sr, hr_grd_c
 
 
 if __name__ == "__main__":