diff --git a/modules/deeplearning/icing.py b/modules/deeplearning/icing.py
index 730490a2672b599b8cf3e2a899d4f0c499aa80af..7c5f387297783d5af1fe7c558e6e9f205d63b5a4 100644
--- a/modules/deeplearning/icing.py
+++ b/modules/deeplearning/icing.py
@@ -178,22 +178,28 @@ class IcingIntensityNN:
                 # Memory growth must be set before GPUs have been initialized
                 print(e)
 
-    def get_in_mem_data_batch(self, keys):
+    def get_in_mem_data_batch(self, idxs):
+        key = set(idxs)
+
+        if CACHE_DATA_IN_MEM:
+            data, label = self.in_mem_data_cache.get(key)
+            if data is not None:
+                return data, label
 
         # sort these to use as numpy indexing arrays
-        nd_keys = np.array(keys)
-        nd_keys = np.sort(nd_keys)
+        nd_idxs = np.array(idxs)
+        nd_idxs = np.sort(nd_idxs)
 
         data = []
         for param in train_params:
-            nda = self.h5f[param][nd_keys, ]
+            nda = self.h5f[param][nd_idxs, ]
             nda = normalize(nda, param, mean_std_dct)
             data.append(nda)
         data = np.stack(data)
         data = data.astype(np.float32)
         data = np.transpose(data, axes=(1, 0))
 
-        label = self.h5f['icing_intensity'][nd_keys]
+        label = self.h5f['icing_intensity'][nd_idxs]
         label = label.astype(np.int32)
         label = np.where(label == -1, 0, label)
 
@@ -201,18 +207,8 @@ class IcingIntensityNN:
         label = np.where(label != 0, 1, label)
         label = label.reshape((label.shape[0], 1))
 
-        # TODO: Implement in memory cache
-        # for key in keys:
-        #     if CACHE_DATA_IN_MEM:
-        #         tup = self.in_mem_data_cache.get(key)
-        #         if tup is not None:
-        #             images.append(tup[0])
-        #             vprof.append(tup[1])
-        #             label.append(tup[2])
-        #             continue
-        #
-        #     if CACHE_DATA_IN_MEM:
-        #         self.in_mem_data_cache[key] = (nda, ndb, ndc)
+        if CACHE_DATA_IN_MEM:
+            self.in_mem_data_cache[key] = (data, label)
 
         return data, label
 
@@ -354,15 +350,15 @@ class IcingIntensityNN:
 
         fac = 1
 
-        fc = build_residual_block(flat, drop_rate, fac*n_hidden, activation, 'Residual_Block_1')
+        fc = build_residual_block(flat, drop_rate, fac*n_hidden, activation, 'Residual_Block_1', doBatchNorm=False)
 
-        fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_2')
+        fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_2', doBatchNorm=False)
 
-        #fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_3')
+        fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_3', doBatchNorm=False)
 
-        #fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_4')
+        fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_4', doBatchNorm=False)
 
-        #fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_5')
+        fc = build_residual_block(fc, drop_rate, fac*n_hidden, activation, 'Residual_Block_5', doBatchNorm=False)
 
         fc = tf.keras.layers.Dense(n_hidden, activation=activation)(fc)
         fc = tf.keras.layers.BatchNormalization()(fc)