Skip to content
Snippets Groups Projects
Commit 8b5f563a authored by tomrink's avatar tomrink
Browse files

snapshot...

parent d4ec91af
Branches
No related tags found
No related merge requests found
...@@ -489,8 +489,8 @@ class IcingIntensityNN: ...@@ -489,8 +489,8 @@ class IcingIntensityNN:
proc_batch_cnt = 0 proc_batch_cnt = 0
n_samples = 0 n_samples = 0
for abi, temp, lbfp in self.train_dataset: for data0, data1, label in self.train_dataset:
trn_ds = tf.data.Dataset.from_tensor_slices((abi, temp, lbfp)) trn_ds = tf.data.Dataset.from_tensor_slices((data0, data1, label))
trn_ds = trn_ds.batch(BATCH_SIZE) trn_ds = trn_ds.batch(BATCH_SIZE)
for mini_batch in trn_ds: for mini_batch in trn_ds:
if self.learningRateSchedule is not None: if self.learningRateSchedule is not None:
...@@ -506,8 +506,8 @@ class IcingIntensityNN: ...@@ -506,8 +506,8 @@ class IcingIntensityNN:
self.test_loss.reset_states() self.test_loss.reset_states()
self.test_accuracy.reset_states() self.test_accuracy.reset_states()
for abi_tst, temp_tst, lbfp_tst in self.test_dataset: for data0_tst, data1_tst, label_tst in self.test_dataset:
tst_ds = tf.data.Dataset.from_tensor_slices((abi_tst, temp_tst, lbfp_tst)) tst_ds = tf.data.Dataset.from_tensor_slices((data0_tst, data1_tst, label_tst))
tst_ds = tst_ds.batch(BATCH_SIZE) tst_ds = tst_ds.batch(BATCH_SIZE)
for mini_batch_test in tst_ds: for mini_batch_test in tst_ds:
self.test_step(mini_batch_test) self.test_step(mini_batch_test)
...@@ -524,7 +524,7 @@ class IcingIntensityNN: ...@@ -524,7 +524,7 @@ class IcingIntensityNN:
print('train loss: ', loss.numpy()) print('train loss: ', loss.numpy())
proc_batch_cnt += 1 proc_batch_cnt += 1
n_samples += abi.shape[0] n_samples += data0.shape[0]
print('proc_batch_cnt: ', proc_batch_cnt, n_samples) print('proc_batch_cnt: ', proc_batch_cnt, n_samples)
t1 = datetime.datetime.now().timestamp() t1 = datetime.datetime.now().timestamp()
...@@ -533,8 +533,8 @@ class IcingIntensityNN: ...@@ -533,8 +533,8 @@ class IcingIntensityNN:
self.test_loss.reset_states() self.test_loss.reset_states()
self.test_accuracy.reset_states() self.test_accuracy.reset_states()
for abi, temp, lbfp in self.test_dataset: for data0, data1, label in self.test_dataset:
ds = tf.data.Dataset.from_tensor_slices((abi, temp, lbfp)) ds = tf.data.Dataset.from_tensor_slices((data0, data1, label))
ds = ds.batch(BATCH_SIZE) ds = ds.batch(BATCH_SIZE)
for mini_batch in ds: for mini_batch in ds:
self.test_step(mini_batch) self.test_step(mini_batch)
... ...
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment