diff --git a/modules/deeplearning/unet_l1b_l2.py b/modules/deeplearning/unet_l1b_l2.py
index 00948a9829b6c2c1b6ea25a6a40ead3bae3b5e07..c15cc582f715b8e9d79172e48614b22880d91024 100644
--- a/modules/deeplearning/unet_l1b_l2.py
+++ b/modules/deeplearning/unet_l1b_l2.py
@@ -331,34 +331,6 @@ class UNET:
         dataset = dataset.map(self.data_function_evaluate, num_parallel_calls=8)
         self.eval_dataset = dataset
 
-    # def setup_pipeline(self, data_nda, label_nda, perc=0.20):
-    #
-    #     num_samples = data_nda.shape[0]
-    #     num_test = int(num_samples * perc)
-    #     self.num_data_samples = num_samples - num_test
-    #     num_train = self.num_data_samples
-    #
-    #     self.train_data_nda = data_nda[0:num_train]
-    #     self.train_label_nda = label_nda[0:num_train]
-    #     self.test_data_nda = data_nda[num_train:]
-    #     self.test_label_nda = label_nda[num_train:]
-    #
-    #     trn_idxs = np.arange(self.train_data_nda.shape[0])
-    #     tst_idxs = np.arange(self.test_data_nda.shape[0])
-    #
-    #     np.random.shuffle(tst_idxs)
-    #
-    #     self.get_train_dataset(trn_idxs)
-    #     self.get_test_dataset(tst_idxs)
-    #
-    #     print('datetime: ', now)
-    #     print('training and test data: ')
-    #     print('---------------------------')
-    #     print('num train samples: ', self.num_data_samples)
-    #     print('BATCH SIZE: ', BATCH_SIZE)
-    #     print('num test samples: ', tst_idxs.shape[0])
-    #     print('setup_pipeline: Done')
-
     def setup_pipeline(self, train_data_files, train_label_files, test_data_files, test_label_files, num_train_samples):
 
         self.train_data_files = train_data_files