Skip to content
Snippets Groups Projects
Commit 18584809 authored by tomrink's avatar tomrink
Browse files

snapshot...

parent 50558178
No related branches found
No related tags found
No related merge requests found
......@@ -217,21 +217,30 @@ class Trainer(object):
with tf.GradientTape() as gen_tape, tf.GradientTape() as disc_tape:
fake = generator.unsigned_call(image_lr)
logging.debug("Fetched Generator Fake")
fake = utils.preprocess_input(fake)
image_lr = utils.preprocess_input(image_lr)
image_hr = utils.preprocess_input(image_hr)
# TDR, not sure about these...
# fake = utils.preprocess_input(fake)
# image_lr = utils.preprocess_input(image_lr)
# image_hr = utils.preprocess_input(image_hr)
# ------------------------------------------------
# TDR, not using perceptual loss with CLD OPD
# percep_loss = tf.reduce_mean(perceptual_loss(image_hr, fake))
# logging.debug("Calculated Perceptual Loss")
l1_loss = utils.pixel_loss(image_hr, fake)
logging.debug("Calculated Pixel Loss")
loss_RaG = ra_gen(image_hr, fake)
logging.debug("Calculated Relativistic"
"Average (RA) Loss for Generator")
disc_loss = ra_disc(image_hr, fake)
logging.debug("Calculated RA Loss Discriminator")
# TDR, we don't have percep_loss
# gen_loss = percep_loss + lambda_ * loss_RaG + eta * l1_loss
gen_loss = lambda_ * loss_RaG + eta * l1_loss
logging.debug("Calculated Generator Loss")
disc_metric(disc_loss)
gen_metric(gen_loss)
gen_loss = gen_loss * (1.0 / self.batch_size)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment