Skip to content
Snippets Groups Projects
Commit a8547977 authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 8bfbbd68
No related branches found
No related tags found
No related merge requests found
...@@ -19,7 +19,7 @@ from scipy.ndimage import gaussian_filter ...@@ -19,7 +19,7 @@ from scipy.ndimage import gaussian_filter
LOG_DEVICE_PLACEMENT = False LOG_DEVICE_PLACEMENT = False
PROC_BATCH_SIZE = 4 PROC_BATCH_SIZE = 4
PROC_BATCH_BUFFER_SIZE = 50000 PROC_BATCH_BUFFER_SIZE = 5000
NumClasses = 2 NumClasses = 2
if NumClasses == 2: if NumClasses == 2:
...@@ -35,7 +35,7 @@ EARLY_STOP = True ...@@ -35,7 +35,7 @@ EARLY_STOP = True
NOISE_TRAINING = False NOISE_TRAINING = False
NOISE_STDDEV = 0.01 NOISE_STDDEV = 0.01
DO_AUGMENT = True DO_AUGMENT = False
DO_SMOOTH = False DO_SMOOTH = False
SIGMA = 1.0 SIGMA = 1.0
...@@ -267,7 +267,6 @@ class SRCNN: ...@@ -267,7 +267,6 @@ class SRCNN:
for param in data_params_half: for param in data_params_half:
idx = params.index(param) idx = params.index(param)
tmp = input_data[:, idx, :, :] tmp = input_data[:, idx, :, :]
tmp = tmp.copy()
tmp = np.where(np.isnan(tmp), 0, tmp) tmp = np.where(np.isnan(tmp), 0, tmp)
if DO_ESPCN: if DO_ESPCN:
tmp = tmp[:, slc_y_2, slc_x_2] tmp = tmp[:, slc_y_2, slc_x_2]
...@@ -281,7 +280,6 @@ class SRCNN: ...@@ -281,7 +280,6 @@ class SRCNN:
for param in data_params_full: for param in data_params_full:
idx = params.index(param) idx = params.index(param)
tmp = input_data[:, idx, :, :] tmp = input_data[:, idx, :, :]
tmp = tmp.copy()
tmp = np.where(np.isnan(tmp), 0, tmp) tmp = np.where(np.isnan(tmp), 0, tmp)
# Full res: # Full res:
tmp = tmp[:, slc_y, slc_x] tmp = tmp[:, slc_y, slc_x]
...@@ -291,7 +289,6 @@ class SRCNN: ...@@ -291,7 +289,6 @@ class SRCNN:
data_norm.append(tmp) data_norm.append(tmp)
# --------------------------------------------------- # ---------------------------------------------------
tmp = input_data[:, label_idx, :, :] tmp = input_data[:, label_idx, :, :]
tmp = tmp.copy()
tmp = np.where(np.isnan(tmp), 0, tmp) tmp = np.where(np.isnan(tmp), 0, tmp)
if DO_SMOOTH: if DO_SMOOTH:
tmp = smooth_2d(tmp, sigma=SIGMA) tmp = smooth_2d(tmp, sigma=SIGMA)
...@@ -316,7 +313,6 @@ class SRCNN: ...@@ -316,7 +313,6 @@ class SRCNN:
# ----------------------------------------------------- # -----------------------------------------------------
# ----------------------------------------------------- # -----------------------------------------------------
label = input_data[:, label_idx, :, :] label = input_data[:, label_idx, :, :]
label = label.copy()
if DO_SMOOTH: if DO_SMOOTH:
label = np.where(np.isnan(label), 0, label) label = np.where(np.isnan(label), 0, label)
label = smooth_2d(label, sigma=SIGMA) label = smooth_2d(label, sigma=SIGMA)
...@@ -468,10 +464,10 @@ class SRCNN: ...@@ -468,10 +464,10 @@ class SRCNN:
self.loss = tf.keras.losses.MeanSquaredError() # Regression self.loss = tf.keras.losses.MeanSquaredError() # Regression
# decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps) # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
initial_learning_rate = 0.005 initial_learning_rate = 0.002
decay_rate = 0.95 decay_rate = 0.95
steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch steps_per_epoch = int(self.num_data_samples/BATCH_SIZE) # one epoch
decay_steps = int(steps_per_epoch) decay_steps = int(steps_per_epoch) * 2
print('initial rate, decay rate, steps/epoch, decay steps: ', initial_learning_rate, decay_rate, steps_per_epoch, decay_steps) print('initial rate, decay rate, steps/epoch, decay steps: ', initial_learning_rate, decay_rate, steps_per_epoch, decay_steps)
self.learningRateSchedule = tf.keras.optimizers.schedules.ExponentialDecay(initial_learning_rate, decay_steps, decay_rate) self.learningRateSchedule = tf.keras.optimizers.schedules.ExponentialDecay(initial_learning_rate, decay_steps, decay_rate)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment