From 08dd38e36039430aaf824929a6b7854c802d680b Mon Sep 17 00:00:00 2001
From: tomrink <rink@ssec.wisc.edu>
Date: Mon, 11 Apr 2022 11:39:50 -0500
Subject: [PATCH] minor

---
 modules/deeplearning/unet.py | 19 -------------------
 1 file changed, 19 deletions(-)

diff --git a/modules/deeplearning/unet.py b/modules/deeplearning/unet.py
index 53cfff79..15a89cf2 100644
--- a/modules/deeplearning/unet.py
+++ b/modules/deeplearning/unet.py
@@ -11,9 +11,6 @@ import h5py
 
 LOG_DEVICE_PLACEMENT = False
 
-# Manual (data, label) caching, but has been replaced with tf.data.dataset.cache()
-CACHE_DATA_IN_MEM = False
-
 PROC_BATCH_SIZE = 4096
 PROC_BATCH_BUFFER_SIZE = 50000
 
@@ -241,16 +238,6 @@ class UNET:
 
     def get_in_mem_data_batch(self, idxs, is_training):
 
-        # Pretty much dead, but left in here for reference (See note above)
-        if CACHE_DATA_IN_MEM:
-            key = frozenset(idxs)
-            if is_training:
-                tup = self.in_mem_data_cache.get(key)
-            else:
-                tup = self.in_mem_data_cache_test(key)
-            if tup is not None:
-                return tup[0], tup[1], tup[2]
-
         # sort these to use as numpy indexing arrays
         nd_idxs = np.array(idxs)
         nd_idxs = np.sort(nd_idxs)
@@ -289,12 +276,6 @@ class UNET:
             label = np.where(np.invert(np.logical_or(label == 0, label == 1)), 2, label)
             label = label.reshape((label.shape[0], 1))
 
-        if CACHE_DATA_IN_MEM:
-            if is_training:
-                self.in_mem_data_cache[key] = (data, data_alt, label)
-            else:
-                self.in_mem_data_cache_test[key] = (data, data_alt, label)
-
         if is_training and DO_AUGMENT:
             data_ud = np.flip(data, axis=1)
             data_alt_ud = np.copy(data_alt)
-- 
GitLab