Skip to content
Snippets Groups Projects
Commit 578ac024 authored by tomrink's avatar tomrink
Browse files

minor

parent e81531d1
No related branches found
No related tags found
No related merge requests found
......@@ -72,38 +72,6 @@ def build_conv2d_block(conv, num_filters, block_name, activation=tf.nn.leaky_rel
return conv
# def build_residual_block_1x1(input_layer, num_filters, activation, block_name, padding='SAME', drop_rate=0.5,
# do_drop_out=True, do_batch_norm=True):
#
# with tf.name_scope(block_name):
# skip = input_layer
# if do_drop_out:
# input_layer = tf.keras.layers.Dropout(drop_rate)(input_layer)
# if do_batch_norm:
# input_layer = tf.keras.layers.BatchNormalization()(input_layer)
# conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(input_layer)
# print(conv.shape)
#
# # if do_drop_out:
# # conv = tf.keras.layers.Dropout(drop_rate)(conv)
# # if do_batch_norm:
# # conv = tf.keras.layers.BatchNormalization()(conv)
# # conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=activation)(conv)
# # print(conv.shape)
#
# if do_drop_out:
# conv = tf.keras.layers.Dropout(drop_rate)(conv)
# if do_batch_norm:
# conv = tf.keras.layers.BatchNormalization()(conv)
# conv = tf.keras.layers.Conv2D(num_filters, kernel_size=1, strides=1, padding=padding, activation=None)(conv)
#
# conv = conv + skip
# conv = tf.keras.layers.LeakyReLU()(conv)
# print(conv.shape)
#
# return conv
class ESPCN:
def __init__(self):
......@@ -204,12 +172,10 @@ class ESPCN:
self.X_img = tf.keras.Input(shape=(None, None, self.n_chans))
# self.X_img = tf.keras.Input(shape=(36, 36, self.n_chans))
self.X_img = tf.keras.Input(shape=(32, 32, self.n_chans))
# self.X_img = tf.keras.Input(shape=(32, 32, self.n_chans))
self.inputs.append(self.X_img)
self.DISK_CACHE = False
tf.debugging.set_log_device_placement(LOG_DEVICE_PLACEMENT)
def get_in_mem_data_batch(self, idxs, is_training):
......@@ -652,11 +618,6 @@ class ESPCN:
ckpt_manager.save()
if self.DISK_CACHE and epoch == 0:
f = open(cachepath, 'wb')
pickle.dump(self.in_mem_data_cache, f)
f.close()
if EARLY_STOP and es.check_stop(tst_loss):
break
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment