Skip to content
Snippets Groups Projects
Commit 08dd38e3 authored by tomrink's avatar tomrink
Browse files

minor

parent 72ba190d
Branches
No related tags found
No related merge requests found
......@@ -11,9 +11,6 @@ import h5py
LOG_DEVICE_PLACEMENT = False
# Manual (data, label) caching, but has been replaced with tf.data.dataset.cache()
CACHE_DATA_IN_MEM = False
PROC_BATCH_SIZE = 4096
PROC_BATCH_BUFFER_SIZE = 50000
......@@ -241,16 +238,6 @@ class UNET:
def get_in_mem_data_batch(self, idxs, is_training):
# Pretty much dead, but left in here for reference (See note above)
if CACHE_DATA_IN_MEM:
key = frozenset(idxs)
if is_training:
tup = self.in_mem_data_cache.get(key)
else:
tup = self.in_mem_data_cache_test(key)
if tup is not None:
return tup[0], tup[1], tup[2]
# sort these to use as numpy indexing arrays
nd_idxs = np.array(idxs)
nd_idxs = np.sort(nd_idxs)
......@@ -289,12 +276,6 @@ class UNET:
label = np.where(np.invert(np.logical_or(label == 0, label == 1)), 2, label)
label = label.reshape((label.shape[0], 1))
if CACHE_DATA_IN_MEM:
if is_training:
self.in_mem_data_cache[key] = (data, data_alt, label)
else:
self.in_mem_data_cache_test[key] = (data, data_alt, label)
if is_training and DO_AUGMENT:
data_ud = np.flip(data, axis=1)
data_alt_ud = np.copy(data_alt)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment