Skip to content
Snippets Groups Projects
Commit 4fe6621e authored by tomrink's avatar tomrink
Browse files

snapshot...

parent e577c8fa
No related branches found
No related tags found
No related merge requests found
...@@ -62,9 +62,11 @@ IMG_DEPTH = 1 ...@@ -62,9 +62,11 @@ IMG_DEPTH = 1
label_param = 'cloud_probability' label_param = 'cloud_probability'
params = ['temp_11_0um_nom', 'refl_0_65um_nom', label_param] params = ['temp_11_0um_nom', 'refl_0_65um_nom', label_param]
params_i = ['refl_0_65um_nom', label_param]
data_params_half = ['temp_11_0um_nom'] data_params_half = ['temp_11_0um_nom']
data_params_full = ['refl_0_65um_nom'] data_params_full = ['refl_0_65um_nom']
label_idx_i = params_i.index(label_param)
label_idx = params.index(label_param) label_idx = params.index(label_param)
print('data_params_half: ', data_params_half) print('data_params_half: ', data_params_half)
...@@ -75,8 +77,8 @@ KERNEL_SIZE = 3 # target size: (128, 128) ...@@ -75,8 +77,8 @@ KERNEL_SIZE = 3 # target size: (128, 128)
N = 1 N = 1
if KERNEL_SIZE == 3: if KERNEL_SIZE == 3:
slc_x = slice(2, N*128 + 4) # slc_x = slice(2, N*128 + 4)
slc_y = slice(2, N*128 + 4) # slc_y = slice(2, N*128 + 4)
slc_x_2 = slice(1, N*128 + 6, 2) slc_x_2 = slice(1, N*128 + 6, 2)
slc_y_2 = slice(1, N*128 + 6, 2) slc_y_2 = slice(1, N*128 + 6, 2)
x_2 = np.arange(int((N*128)/2) + 3) x_2 = np.arange(int((N*128)/2) + 3)
...@@ -85,6 +87,8 @@ if KERNEL_SIZE == 3: ...@@ -85,6 +87,8 @@ if KERNEL_SIZE == 3:
s = np.arange(0, int((N*128)/2) + 3, 0.5) s = np.arange(0, int((N*128)/2) + 3, 0.5)
x_k = slice(1, N*128 + 3) x_k = slice(1, N*128 + 3)
y_k = slice(1, N*128 + 3) y_k = slice(1, N*128 + 3)
slc_x = slice(1, N*128 + 3)
slc_y = slice(1, N*128 + 3)
x_128 = slice(2, N*128 + 2) x_128 = slice(2, N*128 + 2)
y_128 = slice(2, N*128 + 2) y_128 = slice(2, N*128 + 2)
elif KERNEL_SIZE == 5: elif KERNEL_SIZE == 5:
...@@ -168,7 +172,8 @@ def build_residual_block_conv2d_down2x(x_in, num_filters, activation, padding='S ...@@ -168,7 +172,8 @@ def build_residual_block_conv2d_down2x(x_in, num_filters, activation, padding='S
def upsample(tmp): def upsample(tmp):
tmp = tmp[:, slc_y_2, slc_x_2] # tmp = tmp[:, slc_y_2, slc_x_2]
tmp = tmp[:, 0:66, 0:66]
tmp = resample_2d_linear(x_2, y_2, tmp, t, s) tmp = resample_2d_linear(x_2, y_2, tmp, t, s)
tmp = tmp[:, y_k, x_k] tmp = tmp[:, y_k, x_k]
return tmp return tmp
...@@ -183,6 +188,8 @@ def upsample_nearest(grd): ...@@ -183,6 +188,8 @@ def upsample_nearest(grd):
up[:, 0::2, 1::2] = grd[:, 0::, 0::] up[:, 0::2, 1::2] = grd[:, 0::, 0::]
up[:, 1::2, 1::2] = grd[:, 0::, 0::] up[:, 1::2, 1::2] = grd[:, 0::, 0::]
up = up[:, y_k, x_k]
return up return up
...@@ -379,24 +386,24 @@ class SRCNN: ...@@ -379,24 +386,24 @@ class SRCNN:
def get_in_mem_data_batch(self, idxs, is_training): def get_in_mem_data_batch(self, idxs, is_training):
if is_training: if is_training:
files = self.train_data_files data_files = self.train_data_files
label_files = self.train_label_files
else: else:
files = self.test_data_files data_files = self.test_data_files
label_files = self.test_label_files
data_s = [] data_s = []
label_s = []
for k in idxs: for k in idxs:
f = files[k] f = data_files[k]
try:
nda = np.load(f) nda = np.load(f)
except Exception:
print(f)
continue
data_s.append(nda) data_s.append(nda)
input_data = np.concatenate(data_s)
DO_ADD_NOISE = False f = label_files[k]
if is_training and NOISE_TRAINING: nda = np.load(f)
DO_ADD_NOISE = True label_s.append(nda)
input_data = np.concatenate(data_s)
input_label = np.concatenate(label_s)
data_norm = [] data_norm = []
for param in data_params_half: for param in data_params_half:
...@@ -412,11 +419,10 @@ class SRCNN: ...@@ -412,11 +419,10 @@ class SRCNN:
data_norm.append(tmp) data_norm.append(tmp)
for param in data_params_full: for param in data_params_full:
idx = params.index(param) idx = params_i.index(param)
tmp = input_data[:, idx, :, :] tmp = input_label[:, idx, :, :]
tmp = tmp.copy() tmp = tmp.copy()
tmp = np.where(np.isnan(tmp), 0, tmp) tmp = np.where(np.isnan(tmp), 0, tmp)
# Full res:
tmp = tmp[:, slc_y, slc_x] tmp = tmp[:, slc_y, slc_x]
tmp = normalize(tmp, param, mean_std_dct) tmp = normalize(tmp, param, mean_std_dct)
data_norm.append(tmp) data_norm.append(tmp)
...@@ -427,9 +433,7 @@ class SRCNN: ...@@ -427,9 +433,7 @@ class SRCNN:
if DO_ESPCN: if DO_ESPCN:
tmp = tmp[:, slc_y_2, slc_x_2] tmp = tmp[:, slc_y_2, slc_x_2]
else: # Half res upsampled to full res: else: # Half res upsampled to full res:
# tmp = upsample(tmp) tmp = upsample_nearest(tmp)
tmp = upsample_mean(tmp)
tmp = tmp[:, slc_y, slc_x]
if label_param != 'cloud_probability': if label_param != 'cloud_probability':
tmp = normalize(tmp, label_param, mean_std_dct) tmp = normalize(tmp, label_param, mean_std_dct)
data_norm.append(tmp) data_norm.append(tmp)
...@@ -438,7 +442,7 @@ class SRCNN: ...@@ -438,7 +442,7 @@ class SRCNN:
data = data.astype(np.float32) data = data.astype(np.float32)
# ----------------------------------------------------- # -----------------------------------------------------
# ----------------------------------------------------- # -----------------------------------------------------
label = input_data[:, label_idx, :, :] label = input_label[:, label_idx_i, :, :]
label = label.copy() label = label.copy()
label = label[:, y_128, x_128] label = label[:, y_128, x_128]
if NumClasses == 5: if NumClasses == 5:
...@@ -504,10 +508,11 @@ class SRCNN: ...@@ -504,10 +508,11 @@ class SRCNN:
dataset = dataset.cache() dataset = dataset.cache()
self.test_dataset = dataset self.test_dataset = dataset
def setup_pipeline(self, train_data_files, test_data_files, num_train_samples): def setup_pipeline(self, train_data_files, train_label_files, test_data_files, test_label_files, num_train_samples):
self.train_data_files = train_data_files self.train_data_files = train_data_files
self.train_label_files = train_label_files
self.test_data_files = test_data_files self.test_data_files = test_data_files
self.test_label_files = test_label_files
trn_idxs = np.arange(len(train_data_files)) trn_idxs = np.arange(len(train_data_files))
np.random.shuffle(trn_idxs) np.random.shuffle(trn_idxs)
...@@ -526,8 +531,10 @@ class SRCNN: ...@@ -526,8 +531,10 @@ class SRCNN:
print('num test samples: ', tst_idxs.shape[0]) print('num test samples: ', tst_idxs.shape[0])
print('setup_pipeline: Done') print('setup_pipeline: Done')
def setup_test_pipeline(self, test_data_files): def setup_test_pipeline(self, test_data_files, test_label_files):
self.test_data_files = test_data_files self.test_data_files = test_data_files
self.test_label_files = test_label_files
tst_idxs = np.arange(len(test_data_files)) tst_idxs = np.arange(len(test_data_files))
self.get_test_dataset(tst_idxs) self.get_test_dataset(tst_idxs)
print('setup_test_pipeline: Done') print('setup_test_pipeline: Done')
...@@ -839,19 +846,24 @@ class SRCNN: ...@@ -839,19 +846,24 @@ class SRCNN:
return pred return pred
def run(self, directory, ckpt_dir=None, num_data_samples=50000): def run(self, directory, ckpt_dir=None, num_data_samples=50000):
train_data_files = glob.glob(directory+'data_train_*.npy') train_data_files = glob.glob(directory+'train*mres*.npy')
valid_data_files = glob.glob(directory+'data_valid_*.npy') valid_data_files = glob.glob(directory+'valid*mres*.npy')
train_label_files = glob.glob(directory+'train*ires*.npy')
valid_label_files = glob.glob(directory+'valid*ires*.npy')
self.setup_pipeline(train_data_files, valid_data_files, num_data_samples) self.setup_pipeline(train_data_files, train_label_files, valid_data_files, valid_label_files, num_data_samples)
self.build_model() self.build_model()
self.build_training() self.build_training()
self.build_evaluation() self.build_evaluation()
self.do_training(ckpt_dir=ckpt_dir) self.do_training(ckpt_dir=ckpt_dir)
def run_restore(self, directory, ckpt_dir): def run_restore(self, directory, ckpt_dir):
valid_data_files = glob.glob(directory + 'data_valid*.npy')
self.num_data_samples = 1000 self.num_data_samples = 1000
self.setup_test_pipeline(valid_data_files)
valid_data_files = glob.glob(directory + 'valid*mres*.npy')
valid_label_files = glob.glob(directory + 'valid*ires*.npy')
self.setup_test_pipeline(valid_data_files, valid_label_files)
self.build_model() self.build_model()
self.build_training() self.build_training()
self.build_evaluation() self.build_evaluation()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment