Skip to content
Snippets Groups Projects
Commit d89b3892 authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 84556761
No related branches found
No related tags found
No related merge requests found
...@@ -17,7 +17,8 @@ CACHE_DATA_IN_MEM = True ...@@ -17,7 +17,8 @@ CACHE_DATA_IN_MEM = True
PROC_BATCH_SIZE = 2046 PROC_BATCH_SIZE = 2046
PROC_BATCH_BUFFER_SIZE = 50000 PROC_BATCH_BUFFER_SIZE = 50000
NumLabels = 1 NumClasses = 3
NumLogits = 1
BATCH_SIZE = 256 BATCH_SIZE = 256
NUM_EPOCHS = 50 NUM_EPOCHS = 50
...@@ -210,8 +211,12 @@ class IcingIntensityNN: ...@@ -210,8 +211,12 @@ class IcingIntensityNN:
label = np.where(label == -1, 0, label) label = np.where(label == -1, 0, label)
# binary, two class # binary, two class
label = np.where(label != 0, 1, label) if NumClasses == 2:
label = label.reshape((label.shape[0], 1)) label = np.where(label != 0, 1, label)
label = label.reshape((label.shape[0], 1))
elif NumClasses == 3:
label = np.where((label == 1 | label == 2), 1, label)
label = np.where((label == 3 | label == 4 | label == 5 | label == 6), 2, label)
if CACHE_DATA_IN_MEM: if CACHE_DATA_IN_MEM:
self.in_mem_data_cache[key] = (data, label) self.in_mem_data_cache[key] = (data, label)
...@@ -379,14 +384,17 @@ class IcingIntensityNN: ...@@ -379,14 +384,17 @@ class IcingIntensityNN:
# activation = tf.nn.softmax # For multi-class # activation = tf.nn.softmax # For multi-class
activation = tf.nn.sigmoid # For binary activation = tf.nn.sigmoid # For binary
logits = tf.keras.layers.Dense(NumLabels, activation=activation)(fc) # Called logits, but these are actually probabilities see activation
logits = tf.keras.layers.Dense(NumLogits, activation=activation)(fc)
print(logits.shape) print(logits.shape)
self.logits = logits self.logits = logits
def build_training(self): def build_training(self):
self.loss = tf.keras.losses.BinaryCrossentropy(from_logits=False) # for two-class only if NumClasses == 2:
#self.loss = tf.keras.losses.SparseCategoricalCrossentropy() # For multi-class self.loss = tf.keras.losses.BinaryCrossentropy(from_logits=False) # for two-class only
else:
self.loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=False) # For multi-class
# decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps) # decayed_learning_rate = learning_rate * decay_rate ^ (global_step / decay_steps)
initial_learning_rate = 0.002 initial_learning_rate = 0.002
...@@ -411,14 +419,22 @@ class IcingIntensityNN: ...@@ -411,14 +419,22 @@ class IcingIntensityNN:
self.initial_learning_rate = initial_learning_rate self.initial_learning_rate = initial_learning_rate
def build_evaluation(self): def build_evaluation(self):
self.train_accuracy = tf.keras.metrics.BinaryAccuracy(name='train_accuracy')
self.test_accuracy = tf.keras.metrics.BinaryAccuracy(name='test_accuracy')
self.test_auc = tf.keras.metrics.AUC(name='test_auc')
self.test_recall = tf.keras.metrics.Recall(name='test_recall')
self.test_precision = tf.keras.metrics.Precision(name='test_precision')
self.train_loss = tf.keras.metrics.Mean(name='train_loss') self.train_loss = tf.keras.metrics.Mean(name='train_loss')
self.test_loss = tf.keras.metrics.Mean(name='test_loss') self.test_loss = tf.keras.metrics.Mean(name='test_loss')
if NumClasses == 2:
self.train_accuracy = tf.keras.metrics.BinaryAccuracy(name='train_accuracy')
self.test_accuracy = tf.keras.metrics.BinaryAccuracy(name='test_accuracy')
self.test_auc = tf.keras.metrics.AUC(name='test_auc')
self.test_recall = tf.keras.metrics.Recall(name='test_recall')
self.test_precision = tf.keras.metrics.Precision(name='test_precision')
else:
self.train_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='train_accuracy')
self.test_accuracy = tf.keras.metrics.SparseCategoricalAccuracy(name='test_accuracy')
self.test_auc = tf.keras.metrics.AUC(name='test_auc')
self.test_recall = tf.keras.metrics.Recall(name='test_recall')
self.test_precision = tf.keras.metrics.Precision(name='test_precision')
def build_predict(self): def build_predict(self):
_, pred = tf.nn.top_k(self.logits) _, pred = tf.nn.top_k(self.logits)
self.pred_class = pred self.pred_class = pred
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment