From fd3b4ad8933ad3e47f2ac998d8c1a2050ad665cb Mon Sep 17 00:00:00 2001 From: jh Date: Sat, 13 Mar 2021 17:25:18 -0800 Subject: [PATCH] uncomment lines --- models/Model_SAEHD/Model.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/models/Model_SAEHD/Model.py b/models/Model_SAEHD/Model.py index 1d88fbe..afdba77 100644 --- a/models/Model_SAEHD/Model.py +++ b/models/Model_SAEHD/Model.py @@ -448,10 +448,10 @@ Examples: df, liae, df-d, df-ud, liae-ud, ... else: gpu_src_loss += tf.reduce_mean ( 300*tf.abs ( gpu_target_src*gpu_target_part_mask - gpu_pred_src_src*gpu_target_part_mask ), axis=[1,2,3]) - # if self.options['ms_ssim_loss']: - # gpu_src_loss += tf.reduce_mean ( 10*nn.MsSsim(resolution)(gpu_target_srcm, gpu_pred_src_srcm, max_val=1.0)) - # else: - gpu_src_loss += tf.reduce_mean ( 10*tf.square( gpu_target_srcm - gpu_pred_src_srcm ),axis=[1,2,3] ) + if self.options['ms_ssim_loss']: + gpu_src_loss += tf.reduce_mean ( 10*nn.MsSsim(resolution)(gpu_target_srcm, gpu_pred_src_srcm, max_val=1.0)) + else: + gpu_src_loss += tf.reduce_mean ( 10*tf.square( gpu_target_srcm - gpu_pred_src_srcm ),axis=[1,2,3] ) face_style_power = self.options['face_style_power'] / 100.0 if face_style_power != 0 and not self.pretrain: @@ -486,10 +486,10 @@ Examples: df, liae, df-d, df-ud, liae-ud, ... else: gpu_dst_loss += tf.reduce_mean ( 300*tf.abs ( gpu_target_dst*gpu_target_part_mask - gpu_pred_dst_dst*gpu_target_part_mask ), axis=[1,2,3]) - # if self.options['ms_ssim_loss']: - # gpu_dst_loss += tf.reduce_mean ( 10*nn.MsSsim(resolution)(gpu_target_dstm, gpu_pred_dst_dstm, max_val=1.0)) - # else: - gpu_dst_loss += tf.reduce_mean ( 10*tf.square( gpu_target_dstm - gpu_pred_dst_dstm ),axis=[1,2,3] ) + if self.options['ms_ssim_loss']: + gpu_dst_loss += tf.reduce_mean ( 10*nn.MsSsim(resolution)(gpu_target_dstm, gpu_pred_dst_dstm, max_val=1.0)) + else: + gpu_dst_loss += tf.reduce_mean ( 10*tf.square( gpu_target_dstm - gpu_pred_dst_dstm ),axis=[1,2,3] ) gpu_src_losses += [gpu_src_loss] gpu_dst_losses += [gpu_dst_loss]