Skip to content
Snippets Groups Projects
Commit f5e4ef1e authored by tomrink's avatar tomrink
Browse files

snapshot...

parent c3fb4d8c
Branches
No related tags found
No related merge requests found
......@@ -73,23 +73,13 @@ print('data_params_full: ', data_params_full)
print('label_param: ', label_param)
KERNEL_SIZE = 3 # target size: (128, 128)
N = 1
N_X = N_Y = 1
if KERNEL_SIZE == 3:
# # slc_x = slice(2, N*128 + 4)
# # slc_y = slice(2, N*128 + 4)
# slc_x_2 = slice(1, N*128 + 6, 2)
# slc_y_2 = slice(1, N*128 + 6, 2)
# x_2 = np.arange(int((N*128)/2) + 3)
# y_2 = np.arange(int((N*128)/2) + 3)
# t = np.arange(0, int((N*128)/2) + 3, 0.5)
# s = np.arange(0, int((N*128)/2) + 3, 0.5)
# x_k = slice(1, N*128 + 3)
# y_k = slice(1, N*128 + 3)
slc_x = slice(1, int((N*128)/2) + 3)
slc_y = slice(1, int((N*128)/2) + 3)
x_128 = slice(4, N*128 + 4)
y_128 = slice(4, N*128 + 4)
slc_x = slice(1, int((N_X*128)/2) + 3)
slc_y = slice(1, int((N_Y*128)/2) + 3)
x_128 = slice(4, N_X*128 + 4)
y_128 = slice(4, N_Y*128 + 4)
elif KERNEL_SIZE == 5:
slc_x = slice(3, 135)
slc_y = slice(3, 135)
......@@ -127,13 +117,6 @@ def build_residual_conv2d_block(conv, num_filters, block_name, activation=tf.nn.
return conv
# def upsample(tmp):
# tmp = tmp[:, slc_y_2, slc_x_2]
# tmp = resample_2d_linear(x_2, y_2, tmp, t, s)
# tmp = tmp[:, y_k, x_k]
# return tmp
def upsample_mean(grd):
bsize, ylen, xlen = grd.shape
up = np.zeros((bsize, ylen*2, xlen*2))
......@@ -792,65 +775,41 @@ def run_restore_static(directory, ckpt_dir, out_file=None):
def run_evaluate_static(in_file, out_file, ckpt_dir):
N = 10
slc_x = slice(2, N*128 + 4)
slc_y = slice(2, N*128 + 4)
slc_x_2 = slice(1, N*128 + 6, 2)
slc_y_2 = slice(1, N*128 + 6, 2)
x_2 = np.arange(int((N*128)/2) + 3)
y_2 = np.arange(int((N*128)/2) + 3)
t = np.arange(0, int((N*128)/2) + 3, 0.5)
s = np.arange(0, int((N*128)/2) + 3, 0.5)
x_k = slice(1, N*128 + 3)
y_k = slice(1, N*128 + 3)
x_128 = slice(3, N*128 + 3)
y_128 = slice(3, N*128 + 3)
sub_y, sub_x = (N * 128) + 10, (N * 128) + 10
y_0, x_0, = 3232 - int(sub_y/2), 3200 - int(sub_x/2)
N_X = N_Y = 10
sub_y, sub_x = (N_Y * 128) + 10, (N_X * 128) + 10
y_0, x_0, = 3232 - int(sub_y/2), 1100 - int(sub_x/2)
slc_x = slice(1, int((N_X*128)/2) + 3)
slc_y = slice(1, int((N_Y*128)/2) + 3)
h5f = h5py.File(in_file, 'r')
grd_a = get_grid_values_all(h5f, 'temp_11_0um_nom')
grd_a = grd_a[y_0:y_0+sub_y, x_0:x_0+sub_x]
grd_a = grd_a.copy()
grd_a = get_grid_values_all(h5f, 'orig/temp_11_0um')
grd_a = np.where(np.isnan(grd_a), 0, grd_a)
hr_grd_a = grd_a.copy()
hr_grd_a = hr_grd_a[y_128, x_128]
# Full res:
# grd_a = grd_a[slc_y, slc_x]
# Half res:
grd_a = grd_a[slc_y_2, slc_x_2]
grd_a = resample_2d_linear_one(x_2, y_2, grd_a, t, s)
grd_a = grd_a[y_k, x_k]
grd_a = grd_a[y_0:y_0+sub_y, x_0:x_0+sub_x]
grd_a = normalize(grd_a, 'temp_11_0um_nom', mean_std_dct)
# ------------------------------------------------------
grd_b = get_grid_values_all(h5f, 'refl_0_65um_nom')
grd_b = grd_b[y_0:y_0+sub_y, x_0:x_0+sub_x]
grd_b = grd_b.copy()
grd_b = np.where(np.isnan(grd_b), 0, grd_b)
hr_grd_b = grd_b.copy()
hr_grd_b = hr_grd_b[y_128, x_128]
grd_b = grd_b[slc_y, slc_x]
grd_b = normalize(grd_b, 'refl_0_65um_nom', mean_std_dct)
grd_a = grd_a[slc_y, slc_x]
grd_c = get_grid_values_all(h5f, label_param)
grd_c = grd_c[y_0:y_0+sub_y, x_0:x_0+sub_x]
hr_grd_c = grd_c.copy()
hr_grd_c = np.where(np.isnan(hr_grd_c), 0, grd_c)
hr_grd_c = hr_grd_c[y_128, x_128]
# hr_grd_c = smooth_2d_single(hr_grd_c, sigma=1.0)
grd_b = get_grid_values_all(h5f, 'super/refl_0_65um')
grd_b = np.where(np.isnan(grd_b), 0, grd_b)
grd_b = grd_b[y_0:y_0+sub_y, x_0:x_0+sub_x]
lo, hi, std, avg = get_min_max_std(grd_b)
# std = np.where(np.isnan(std), 0, std)
lo = normalize(lo, 'refl_0_65um_nom', mean_std_dct)
hi = normalize(hi, 'refl_0_65um_nom', mean_std_dct)
avg = normalize(avg, 'refl_0_65um_nom', mean_std_dct)
lo = lo[slc_y, slc_x]
hi = hi[slc_y, slc_x]
avg = avg[slc_y, slc_x]
grd_c = get_grid_values_all(h5f, 'orig/'+label_param)
grd_c = np.where(np.isnan(grd_c), 0, grd_c)
grd_c = grd_c.copy()
# grd_c = smooth_2d_single(grd_c, sigma=1.0)
grd_c = grd_c[slc_y_2, slc_x_2]
grd_c = resample_2d_linear_one(x_2, y_2, grd_c, t, s)
grd_c = grd_c[y_k, x_k]
if label_param != 'cloud_probability':
grd_c = normalize(grd_c, label_param, mean_std_dct)
data = np.stack([grd_a, grd_b, grd_c], axis=2)
grd_c = grd_c[y_0:y_0+sub_y, x_0:x_0+sub_x]
grd_c = grd_c[slc_y, slc_x]
data = np.stack([grd_a, lo, hi, avg, grd_c], axis=2)
data = np.expand_dims(data, axis=0)
h5f.close()
......@@ -858,9 +817,9 @@ def run_evaluate_static(in_file, out_file, ckpt_dir):
nn = SRCNN()
out_sr = nn.run_evaluate(data, ckpt_dir)
if out_file is not None:
np.save(out_file, (out_sr[0, :, :, 0], hr_grd_a, hr_grd_b, hr_grd_c))
np.save(out_file, (out_sr[0, :, :, 0], grd_a, avg, grd_c))
else:
return out_sr, hr_grd_a, hr_grd_b, hr_grd_c
return out_sr, grd_a, avg, grd_c
def analyze2(nda_m, nda_i):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment