diff --git a/modules/deeplearning/cloud_fraction_fcn_abi.py b/modules/deeplearning/cloud_fraction_fcn_abi.py index 9046bd584ffcaf3aeab6798da1c033665239c6f0..3f86ac83a2dcb1ea1aa511398595a0f293aa8e94 100644 --- a/modules/deeplearning/cloud_fraction_fcn_abi.py +++ b/modules/deeplearning/cloud_fraction_fcn_abi.py @@ -604,7 +604,7 @@ class SRCNN: step = 0 total_time = 0 - best_test_loss = np.finfo(dtype=np.float).max + best_test_loss = np.finfo(dtype=np.float64).max if EARLY_STOP: es = EarlyStop() @@ -629,7 +629,7 @@ class SRCNN: with self.writer_train.as_default(): tf.summary.scalar('loss_trn', loss.numpy(), step=step) - tf.summary.scalar('learning_rate', self.optimizer._decayed_lr('float32').numpy(), step=step) + tf.summary.scalar('learning_rate', self.optimizer.lr.numpy(), step=step) tf.summary.scalar('num_train_steps', step, step=step) tf.summary.scalar('num_epochs', epoch, step=step) @@ -649,7 +649,7 @@ class SRCNN: tf.summary.scalar('loss_val', self.test_loss.result(), step=step) print('****** test loss, acc, lr: ', self.test_loss.result().numpy(), self.test_accuracy.result().numpy(), - self.optimizer._decayed_lr('float32').numpy()) + self.optimizer.lr.numpy()) step += 1 print('train loss: ', loss.numpy())