Skip to content
Snippets Groups Projects
Commit 92bddedb authored by tomrink's avatar tomrink
Browse files

snapshot...

parent c48d91b8
No related branches found
No related tags found
No related merge requests found
...@@ -602,10 +602,10 @@ class SRCNN: ...@@ -602,10 +602,10 @@ class SRCNN:
self.test_loss(t_loss) self.test_loss(t_loss)
self.test_accuracy(labels, pred) self.test_accuracy(labels, pred)
def predict(self, mini_batch): @tf.function(input_signature=[tf.TensorSpec(None, tf.float32), tf.TensorSpec(None, tf.float32)])
inputs = [mini_batch[0]] def predict(self, inputs, labels):
labels = mini_batch[1] labels = tf.squeeze(labels)
pred = self.model(inputs, training=False) pred = self.model([inputs], training=False)
t_loss = self.loss(labels, pred) t_loss = self.loss(labels, pred)
self.test_labels.append(labels) self.test_labels.append(labels)
...@@ -746,7 +746,7 @@ class SRCNN: ...@@ -746,7 +746,7 @@ class SRCNN:
ds = tf.data.Dataset.from_tensor_slices((data, label)) ds = tf.data.Dataset.from_tensor_slices((data, label))
ds = ds.batch(BATCH_SIZE) ds = ds.batch(BATCH_SIZE)
for mini_batch_test in ds: for mini_batch_test in ds:
self.predict(mini_batch_test) self.predict(mini_batch_test[0], mini_batch_test[1])
print('loss, acc: ', self.test_loss.result().numpy(), self.test_accuracy.result().numpy()) print('loss, acc: ', self.test_loss.result().numpy(), self.test_accuracy.result().numpy())
...@@ -754,10 +754,6 @@ class SRCNN: ...@@ -754,10 +754,6 @@ class SRCNN:
preds = np.concatenate(self.test_preds) preds = np.concatenate(self.test_preds)
print(labels.shape, preds.shape) print(labels.shape, preds.shape)
# if label_param != 'cloud_probability':
# labels_denorm = denormalize(labels, label_param, mean_std_dct)
# preds_denorm = denormalize(preds, label_param, mean_std_dct)
return labels, preds return labels, preds
def do_evaluate(self, data, ckpt_dir): def do_evaluate(self, data, ckpt_dir):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment