Skip to content
Snippets Groups Projects
Commit 82a04e31 authored by tomrink's avatar tomrink
Browse files

minor

parent b828a3d7
Branches
No related tags found
No related merge requests found
......@@ -217,17 +217,17 @@ class IcingIntensityNN:
tf.debugging.set_log_device_placement(LOG_DEVICE_PLACEMENT)
gpus = tf.config.experimental.list_physical_devices('GPU')
if gpus:
try:
# Currently, memory growth needs to be the same across GPUs
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
logical_gpus = tf.config.experimental.list_logical_devices('GPU')
print(len(gpus), "Physical GPUs,", len(logical_gpus), "Logical GPUs")
except RuntimeError as e:
# Memory growth must be set before GPUs have been initialized
print(e)
# gpus = tf.config.experimental.list_physical_devices('GPU')
# if gpus:
# try:
# # Currently, memory growth needs to be the same across GPUs
# for gpu in gpus:
# tf.config.experimental.set_memory_growth(gpu, True)
# logical_gpus = tf.config.experimental.list_logical_devices('GPU')
# print(len(gpus), "Physical GPUs,", len(logical_gpus), "Logical GPUs")
# except RuntimeError as e:
# # Memory growth must be set before GPUs have been initialized
# print(e)
def get_in_mem_data_batch(self, idxs, is_training):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment