Skip to content
Snippets Groups Projects
Commit d0cf9ea5 authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 887dc8b2
No related branches found
No related tags found
No related merge requests found
...@@ -30,7 +30,7 @@ NUM_EPOCHS = 60 ...@@ -30,7 +30,7 @@ NUM_EPOCHS = 60
TRACK_MOVING_AVERAGE = False TRACK_MOVING_AVERAGE = False
EARLY_STOP = True EARLY_STOP = True
NOISE_TRAINING = True NOISE_TRAINING = False
NOISE_STDDEV = 0.01 NOISE_STDDEV = 0.01
DO_AUGMENT = True DO_AUGMENT = True
...@@ -246,15 +246,17 @@ class SRCNN: ...@@ -246,15 +246,17 @@ class SRCNN:
DO_ADD_NOISE = True DO_ADD_NOISE = True
data_norm = [] data_norm = []
# for param in data_params: for param in data_params:
# idx = params.index(param) idx = params.index(param)
# # tmp = input_data[:, idx, slc_y_2, slc_x_2] # tmp = input_data[:, idx, slc_y, slc_x]
# tmp = input_data[:, idx, slc_y, slc_x] tmp = input_data[:, idx, :, :]
# tmp = normalize(tmp, param, mean_std_dct) tmp = smooth_2d(tmp, sigma=1.0)
# if DO_ADD_NOISE: tmp = tmp[:, slc_y_2, slc_x_2]
# tmp = add_noise(tmp, noise_scale=NOISE_STDDEV) tmp = normalize(tmp, param, mean_std_dct)
# # tmp = resample_2d_linear(x_2, y_2, tmp, t, s) if DO_ADD_NOISE:
# data_norm.append(tmp) tmp = add_noise(tmp, noise_scale=NOISE_STDDEV)
# tmp = resample_2d_linear(x_2, y_2, tmp, t, s)
data_norm.append(tmp)
# # -------------------------- # # --------------------------
# param = 'refl_0_65um_nom' # param = 'refl_0_65um_nom'
# idx = params.index(param) # idx = params.index(param)
...@@ -420,7 +422,7 @@ class SRCNN: ...@@ -420,7 +422,7 @@ class SRCNN:
activation = tf.nn.relu activation = tf.nn.relu
momentum = 0.99 momentum = 0.99
num_filters = 64 num_filters = 32
input_2d = self.inputs[0] input_2d = self.inputs[0]
print('input: ', input_2d.shape) print('input: ', input_2d.shape)
...@@ -437,7 +439,7 @@ class SRCNN: ...@@ -437,7 +439,7 @@ class SRCNN:
conv_b = build_residual_conv2d_block(conv_b, num_filters, 'Residual_Block_2', kernel_size=KERNEL_SIZE, scale=scale) conv_b = build_residual_conv2d_block(conv_b, num_filters, 'Residual_Block_2', kernel_size=KERNEL_SIZE, scale=scale)
conv_b = build_residual_conv2d_block(conv_b, num_filters, 'Residual_Block_3', kernel_size=KERNEL_SIZE, scale=scale) #conv_b = build_residual_conv2d_block(conv_b, num_filters, 'Residual_Block_3', kernel_size=KERNEL_SIZE, scale=scale)
#conv_b = build_residual_conv2d_block(conv_b, num_filters, 'Residual_Block_4', kernel_size=KERNEL_SIZE, scale=scale) #conv_b = build_residual_conv2d_block(conv_b, num_filters, 'Residual_Block_4', kernel_size=KERNEL_SIZE, scale=scale)
...@@ -749,11 +751,12 @@ def run_evaluate_static(in_file, out_file, ckpt_dir): ...@@ -749,11 +751,12 @@ def run_evaluate_static(in_file, out_file, ckpt_dir):
y_0, x_0, = 2432 - int(sub_y/2), 2432 - int(sub_x/2) y_0, x_0, = 2432 - int(sub_y/2), 2432 - int(sub_x/2)
h5f = h5py.File(in_file, 'r') h5f = h5py.File(in_file, 'r')
# grd_a = get_grid_values_all(h5f, 'temp_11_0um_nom') grd_a = get_grid_values_all(h5f, 'temp_11_0um_nom')
# grd_a = grd_a[y_0:y_0+sub_y, x_0:x_0+sub_x] grd_a = grd_a[y_0:y_0+sub_y, x_0:x_0+sub_x]
# grd_a = grd_a[y_130, x_130] hr_grd_a = grd_a.copy()
# bt = grd_a hr_grd_a = hr_grd_a[y_128, x_128]
# grd_a = normalize(grd_a, 'temp_11_0um_nom', mean_std_dct) grd_a = grd_a[slc_y_2, slc_x_2]
grd_a = normalize(grd_a, 'temp_11_0um_nom', mean_std_dct)
# #
# grd_b = get_grid_values_all(h5f, 'refl_0_65um_nom') # grd_b = get_grid_values_all(h5f, 'refl_0_65um_nom')
# grd_b = grd_b[y_0:y_0+sub_y, x_0:x_0+sub_x] # grd_b = grd_b[y_0:y_0+sub_y, x_0:x_0+sub_x]
...@@ -773,7 +776,7 @@ def run_evaluate_static(in_file, out_file, ckpt_dir): ...@@ -773,7 +776,7 @@ def run_evaluate_static(in_file, out_file, ckpt_dir):
grd_c = grd_c[y_k, x_k] grd_c = grd_c[y_k, x_k]
# data = np.stack([grd_a, grd_b, grd_c], axis=2) # data = np.stack([grd_a, grd_b, grd_c], axis=2)
data = np.stack([grd_c], axis=2) data = np.stack([grd_a, grd_c], axis=2)
data = np.expand_dims(data, axis=0) data = np.expand_dims(data, axis=0)
nn = SRCNN() nn = SRCNN()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment