Skip to content
Snippets Groups Projects
Commit 30d6ebd0 authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 9d174842
No related branches found
No related tags found
No related merge requests found
...@@ -134,6 +134,13 @@ def upsample(tmp): ...@@ -134,6 +134,13 @@ def upsample(tmp):
return tmp return tmp
def upsample_one(tmp):
tmp = tmp[slc_y_2, slc_x_2]
tmp = resample_2d_linear_one(x_2, y_2, tmp, t, s)
tmp = tmp[y_k, x_k]
return tmp
class SRCNN: class SRCNN:
def __init__(self): def __init__(self):
...@@ -676,11 +683,6 @@ class SRCNN: ...@@ -676,11 +683,6 @@ class SRCNN:
pred = self.model([data], training=False) pred = self.model([data], training=False)
self.test_probs = pred self.test_probs = pred
pred = pred.numpy()
if label_param != 'cloud_probability':
pred = denormalize(pred, label_param, mean_std_dct)
return pred
def run(self, directory, ckpt_dir=None, num_data_samples=50000): def run(self, directory, ckpt_dir=None, num_data_samples=50000):
train_data_files = glob.glob(directory+'data_train_*.npy') train_data_files = glob.glob(directory+'data_train_*.npy')
...@@ -718,54 +720,40 @@ def run_restore_static(directory, ckpt_dir, out_file=None): ...@@ -718,54 +720,40 @@ def run_restore_static(directory, ckpt_dir, out_file=None):
def run_evaluate_static(in_file, out_file, ckpt_dir): def run_evaluate_static(in_file, out_file, ckpt_dir):
N_X = N_Y = 10
slc_x = slice(2, N_X*128 + 4)
slc_y = slice(2, N_Y*128 + 4)
slc_x_2 = slice(1, N_X*128 + 6, 2)
slc_y_2 = slice(1, N_Y*128 + 6, 2)
x_2 = np.arange(int((N_X*128)/2) + 3)
y_2 = np.arange(int((N_Y*128)/2) + 3)
t = np.arange(0, int((N_X*128)/2) + 3, 0.5)
s = np.arange(0, int((N_Y*128)/2) + 3, 0.5)
x_k = slice(1, N_X*128 + 3)
y_k = slice(1, N_Y*128 + 3)
x_128 = slice(3, N_X*128 + 3)
y_128 = slice(3, N_Y*128 + 3)
sub_y, sub_x = (N_Y * 128) + 10, (N_X * 128) + 10 sub_y, sub_x = (N_Y * 128) + 10, (N_X * 128) + 10
y_0, x_0, = 3232 - int(sub_y/2), 3200 - int(sub_x/2) y_0, x_0, = 3232 - int(sub_y/2), 1100 - int(sub_x/2)
h5f = h5py.File(in_file, 'r') h5f = h5py.File(in_file, 'r')
grd_a = get_grid_values_all(h5f, 'temp_11_0um_nom') grd_a = get_grid_values_all(h5f, 'super/temp_11_0um')
grd_a = grd_a[y_0:y_0+sub_y, x_0:x_0+sub_x] grd_a = grd_a[y_0:y_0+sub_y, x_0:x_0+sub_x]
grd_a = np.where(np.isnan(grd_a), 0, grd_a) grd_a = np.where(np.isnan(grd_a), 0, grd_a)
hr_grd_a = grd_a.copy() hr_grd_a = grd_a.copy()
grd_a = upsample(grd_a) grd_a = upsample_one(grd_a)
grd_a = normalize(grd_a, 'temp_11_0um_nom', mean_std_dct) grd_a = normalize(grd_a, 'super/temp_11_0um', mean_std_dct)
hr_grd_a = hr_grd_a[y_128, x_128] hr_grd_a = hr_grd_a[y_128, x_128]
# ------------------------------------------------------ # ------------------------------------------------------
grd_b = get_grid_values_all(h5f, 'refl_0_65um_nom') grd_b = get_grid_values_all(h5f, 'super/refl_0_65um')
grd_b = grd_b[y_0:y_0+sub_y, x_0:x_0+sub_x] grd_b = grd_b[y_0:y_0+sub_y, x_0:x_0+sub_x]
grd_b = np.where(np.isnan(grd_b), 0, grd_b) grd_b = np.where(np.isnan(grd_b), 0, grd_b)
hr_grd_b = grd_b.copy() hr_grd_b = grd_b.copy()
hr_grd_b = hr_grd_b[y_128, x_128] hr_grd_b = hr_grd_b[y_128, x_128]
# Full res: # Full res:
grd_b = grd_b[:, slc_y, slc_x] grd_b = grd_b[slc_y, slc_x]
grd_b = normalize(grd_b, 'refl_0_65um_nom', mean_std_dct) grd_b = normalize(grd_b, 'super/refl_0_65um', mean_std_dct)
grd_c = get_grid_values_all(h5f, label_param) grd_c = get_grid_values_all(h5f, 'super/'+label_param)
grd_c = grd_c[y_0:y_0+sub_y, x_0:x_0+sub_x] grd_c = grd_c[y_0:y_0+sub_y, x_0:x_0+sub_x]
hr_grd_c = grd_c.copy() hr_grd_c = grd_c.copy()
hr_grd_c = np.where(np.isnan(hr_grd_c), 0, grd_c) hr_grd_c = np.where(np.isnan(hr_grd_c), 0, grd_c)
hr_grd_c = hr_grd_c[y_128, x_128] hr_grd_c = hr_grd_c[y_128, x_128]
grd_c = np.where(np.isnan(grd_c), 0, grd_c) grd_c = np.where(np.isnan(grd_c), 0, grd_c)
grd_c = upsample(grd_c) grd_c = upsample_one(grd_c)
if label_param != 'cloud_probability': if label_param != 'cloud_probability':
grd_c = normalize(grd_c, label_param, mean_std_dct) grd_c = normalize(grd_c, 'super/'+label_param, mean_std_dct)
data = np.stack([grd_a, grd_b, grd_c], axis=2) data = np.stack([grd_a, grd_b, grd_c], axis=2)
data = np.expand_dims(data, axis=0) data = np.expand_dims(data, axis=0)
...@@ -773,7 +761,12 @@ def run_evaluate_static(in_file, out_file, ckpt_dir): ...@@ -773,7 +761,12 @@ def run_evaluate_static(in_file, out_file, ckpt_dir):
h5f.close() h5f.close()
nn = SRCNN() nn = SRCNN()
out_sr = nn.run_evaluate(data, ckpt_dir) nn.run_evaluate(data, ckpt_dir)
out_sr = nn.test_probs
out_sr = out_sr.numpy()
if label_param != 'cloud_probability':
out_sr = denormalize(out_sr, label_param, mean_std_dct)
if out_file is not None: if out_file is not None:
np.save(out_file, (out_sr[0, :, :, 0], hr_grd_a, hr_grd_b, hr_grd_c)) np.save(out_file, (out_sr[0, :, :, 0], hr_grd_a, hr_grd_b, hr_grd_c))
else: else:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment