mirror of
https://github.com/iperov/DeepFaceLab.git
synced 2025-07-12 16:13:51 -07:00
remove lr_dropout for plaidml backend
This commit is contained in:
parent
d3e6b435aa
commit
b8182ae42b
2 changed files with 9 additions and 5 deletions
|
@ -65,9 +65,12 @@ class SAEHDModel(ModelBase):
|
|||
default_bg_style_power = self.options.get('bg_style_power', 0.0)
|
||||
|
||||
if is_first_run or ask_override:
|
||||
default_lr_dropout = self.options.get('lr_dropout', False)
|
||||
self.options['lr_dropout'] = io.input_bool ( f"Use learning rate dropout? (y/n, ?:help skip:{yn_str[default_lr_dropout]} ) : ", default_lr_dropout, help_message="When the face is trained enough, you can enable this option to get extra sharpness for less amount of iterations.")
|
||||
|
||||
if nnlib.device.backend != 'plaidML':
|
||||
default_lr_dropout = self.options.get('lr_dropout', False)
|
||||
self.options['lr_dropout'] = io.input_bool ( f"Use learning rate dropout? (y/n, ?:help skip:{yn_str[default_lr_dropout]} ) : ", default_lr_dropout, help_message="When the face is trained enough, you can enable this option to get extra sharpness for less amount of iterations.")
|
||||
else:
|
||||
self.options['lr_dropout'] = False
|
||||
|
||||
default_random_warp = self.options.get('random_warp', True)
|
||||
self.options['random_warp'] = io.input_bool (f"Enable random warp of samples? ( y/n, ?:help skip:{yn_str[default_random_warp]}) : ", default_random_warp, help_message="Random warp is required to generalize facial expressions of both faces. When the face is trained enough, you can disable it to get extra sharpness for less amount of iterations.")
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue