mirror of
https://github.com/iperov/DeepFaceLab.git
synced 2025-07-06 04:52:13 -07:00
1
This commit is contained in:
parent
1bfd65abe5
commit
77b390c04b
4 changed files with 150 additions and 25 deletions
|
@ -92,6 +92,7 @@ Model = keras.models.Model
|
||||||
|
|
||||||
Adam = nnlib.Adam
|
Adam = nnlib.Adam
|
||||||
RMSprop = nnlib.RMSprop
|
RMSprop = nnlib.RMSprop
|
||||||
|
LookaheadOptimizer = nnlib.LookaheadOptimizer
|
||||||
|
|
||||||
modelify = nnlib.modelify
|
modelify = nnlib.modelify
|
||||||
gaussian_blur = nnlib.gaussian_blur
|
gaussian_blur = nnlib.gaussian_blur
|
||||||
|
@ -936,7 +937,85 @@ NLayerDiscriminator = nnlib.NLayerDiscriminator
|
||||||
base_config = super(Adam, self).get_config()
|
base_config = super(Adam, self).get_config()
|
||||||
return dict(list(base_config.items()) + list(config.items()))
|
return dict(list(base_config.items()) + list(config.items()))
|
||||||
nnlib.Adam = Adam
|
nnlib.Adam = Adam
|
||||||
|
|
||||||
|
class LookaheadOptimizer(keras.optimizers.Optimizer):
|
||||||
|
def __init__(self, optimizer, sync_period=5, slow_step=0.5, tf_cpu_mode=0, **kwargs):
|
||||||
|
super(LookaheadOptimizer, self).__init__(**kwargs)
|
||||||
|
self.optimizer = optimizer
|
||||||
|
self.tf_cpu_mode = tf_cpu_mode
|
||||||
|
|
||||||
|
with K.name_scope(self.__class__.__name__):
|
||||||
|
self.sync_period = K.variable(sync_period, dtype='int64', name='sync_period')
|
||||||
|
self.slow_step = K.variable(slow_step, name='slow_step')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lr(self):
|
||||||
|
return self.optimizer.lr
|
||||||
|
|
||||||
|
@lr.setter
|
||||||
|
def lr(self, lr):
|
||||||
|
self.optimizer.lr = lr
|
||||||
|
|
||||||
|
@property
|
||||||
|
def learning_rate(self):
|
||||||
|
return self.optimizer.learning_rate
|
||||||
|
|
||||||
|
@learning_rate.setter
|
||||||
|
def learning_rate(self, learning_rate):
|
||||||
|
self.optimizer.learning_rate = learning_rate
|
||||||
|
|
||||||
|
@property
|
||||||
|
def iterations(self):
|
||||||
|
return self.optimizer.iterations
|
||||||
|
|
||||||
|
def get_updates(self, loss, params):
|
||||||
|
sync_cond = K.equal((self.iterations + 1) // self.sync_period * self.sync_period, (self.iterations + 1))
|
||||||
|
|
||||||
|
e = K.tf.device("/cpu:0") if self.tf_cpu_mode > 0 else None
|
||||||
|
if e: e.__enter__()
|
||||||
|
slow_params = [K.variable(K.get_value(p), name='sp_{}'.format(i)) for i, p in enumerate(params)]
|
||||||
|
if e: e.__exit__(None, None, None)
|
||||||
|
|
||||||
|
|
||||||
|
self.updates = self.optimizer.get_updates(loss, params)
|
||||||
|
slow_updates = []
|
||||||
|
for p, sp in zip(params, slow_params):
|
||||||
|
|
||||||
|
e = K.tf.device("/cpu:0") if self.tf_cpu_mode == 2 else None
|
||||||
|
if e: e.__enter__()
|
||||||
|
sp_t = sp + self.slow_step * (p - sp)
|
||||||
|
if e: e.__exit__(None, None, None)
|
||||||
|
|
||||||
|
slow_updates.append(K.update(sp, K.switch(
|
||||||
|
sync_cond,
|
||||||
|
sp_t,
|
||||||
|
sp,
|
||||||
|
)))
|
||||||
|
slow_updates.append(K.update_add(p, K.switch(
|
||||||
|
sync_cond,
|
||||||
|
sp_t - p,
|
||||||
|
K.zeros_like(p),
|
||||||
|
)))
|
||||||
|
|
||||||
|
self.updates += slow_updates
|
||||||
|
self.weights = self.optimizer.weights + slow_params
|
||||||
|
return self.updates
|
||||||
|
|
||||||
|
def get_config(self):
|
||||||
|
config = {
|
||||||
|
'optimizer': keras.optimizers.serialize(self.optimizer),
|
||||||
|
'sync_period': int(K.get_value(self.sync_period)),
|
||||||
|
'slow_step': float(K.get_value(self.slow_step)),
|
||||||
|
}
|
||||||
|
base_config = super(LookaheadOptimizer, self).get_config()
|
||||||
|
return dict(list(base_config.items()) + list(config.items()))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_config(cls, config):
|
||||||
|
optimizer = keras.optimizers.deserialize(config.pop('optimizer'))
|
||||||
|
return cls(optimizer, **config)
|
||||||
|
nnlib.LookaheadOptimizer = LookaheadOptimizer
|
||||||
|
|
||||||
class DenseMaxout(keras.layers.Layer):
|
class DenseMaxout(keras.layers.Layer):
|
||||||
"""A dense maxout layer.
|
"""A dense maxout layer.
|
||||||
A `MaxoutDense` layer takes the element-wise maximum of
|
A `MaxoutDense` layer takes the element-wise maximum of
|
||||||
|
|
|
@ -24,8 +24,8 @@ class SampleGeneratorFace(SampleGeneratorBase):
|
||||||
random_ct_samples_path=None,
|
random_ct_samples_path=None,
|
||||||
sample_process_options=SampleProcessor.Options(),
|
sample_process_options=SampleProcessor.Options(),
|
||||||
output_sample_types=[],
|
output_sample_types=[],
|
||||||
person_id_mode=False,
|
|
||||||
add_sample_idx=False,
|
add_sample_idx=False,
|
||||||
|
use_caching=False,
|
||||||
generators_count=2,
|
generators_count=2,
|
||||||
generators_random_seed=None,
|
generators_random_seed=None,
|
||||||
**kwargs):
|
**kwargs):
|
||||||
|
@ -34,7 +34,6 @@ class SampleGeneratorFace(SampleGeneratorBase):
|
||||||
self.sample_process_options = sample_process_options
|
self.sample_process_options = sample_process_options
|
||||||
self.output_sample_types = output_sample_types
|
self.output_sample_types = output_sample_types
|
||||||
self.add_sample_idx = add_sample_idx
|
self.add_sample_idx = add_sample_idx
|
||||||
self.person_id_mode = person_id_mode
|
|
||||||
|
|
||||||
if sort_by_yaw_target_samples_path is not None:
|
if sort_by_yaw_target_samples_path is not None:
|
||||||
self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET
|
self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET
|
||||||
|
@ -48,7 +47,7 @@ class SampleGeneratorFace(SampleGeneratorBase):
|
||||||
|
|
||||||
self.generators_random_seed = generators_random_seed
|
self.generators_random_seed = generators_random_seed
|
||||||
|
|
||||||
samples = SampleLoader.load (self.sample_type, self.samples_path, sort_by_yaw_target_samples_path, person_id_mode=person_id_mode)
|
samples = SampleLoader.load (self.sample_type, self.samples_path, sort_by_yaw_target_samples_path, use_caching=use_caching)
|
||||||
np.random.shuffle(samples)
|
np.random.shuffle(samples)
|
||||||
self.samples_len = len(samples)
|
self.samples_len = len(samples)
|
||||||
|
|
||||||
|
@ -149,19 +148,12 @@ class SampleGeneratorFace(SampleGeneratorBase):
|
||||||
if self.add_sample_idx:
|
if self.add_sample_idx:
|
||||||
batches += [ [] ]
|
batches += [ [] ]
|
||||||
i_sample_idx = len(batches)-1
|
i_sample_idx = len(batches)-1
|
||||||
|
|
||||||
if self.person_id_mode:
|
|
||||||
batches += [ [] ]
|
|
||||||
i_person_id = len(batches)-1
|
|
||||||
|
|
||||||
for i in range(len(x)):
|
for i in range(len(x)):
|
||||||
batches[i].append ( x[i] )
|
batches[i].append ( x[i] )
|
||||||
|
|
||||||
if self.add_sample_idx:
|
if self.add_sample_idx:
|
||||||
batches[i_sample_idx].append (idx)
|
batches[i_sample_idx].append (idx)
|
||||||
|
|
||||||
if self.person_id_mode:
|
|
||||||
batches[i_person_id].append ( np.array([sample.person_id]) )
|
|
||||||
|
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
@ -22,8 +22,9 @@ class SampleGeneratorFacePerson(SampleGeneratorBase):
|
||||||
sample_process_options=SampleProcessor.Options(),
|
sample_process_options=SampleProcessor.Options(),
|
||||||
output_sample_types=[],
|
output_sample_types=[],
|
||||||
person_id_mode=1,
|
person_id_mode=1,
|
||||||
|
use_caching=False,
|
||||||
generators_count=2,
|
generators_count=2,
|
||||||
generators_random_seed=None,
|
generators_random_seed=None,
|
||||||
**kwargs):
|
**kwargs):
|
||||||
|
|
||||||
super().__init__(samples_path, debug, batch_size)
|
super().__init__(samples_path, debug, batch_size)
|
||||||
|
@ -35,15 +36,28 @@ class SampleGeneratorFacePerson(SampleGeneratorBase):
|
||||||
raise ValueError("len(generators_random_seed) != generators_count")
|
raise ValueError("len(generators_random_seed) != generators_count")
|
||||||
self.generators_random_seed = generators_random_seed
|
self.generators_random_seed = generators_random_seed
|
||||||
|
|
||||||
samples = SampleLoader.load (SampleType.FACE, self.samples_path, person_id_mode=True)
|
samples = SampleLoader.load (SampleType.FACE, self.samples_path, person_id_mode=True, use_caching=use_caching)
|
||||||
|
|
||||||
if person_id_mode==1:
|
if person_id_mode==1:
|
||||||
new_samples = []
|
|
||||||
for s in samples:
|
|
||||||
new_samples += s
|
|
||||||
samples = new_samples
|
|
||||||
np.random.shuffle(samples)
|
np.random.shuffle(samples)
|
||||||
|
|
||||||
|
new_samples = []
|
||||||
|
while len(samples) > 0:
|
||||||
|
for i in range( len(samples)-1, -1, -1):
|
||||||
|
sample = samples[i]
|
||||||
|
|
||||||
|
if len(sample) > 0:
|
||||||
|
new_samples.append(sample.pop(0))
|
||||||
|
|
||||||
|
if len(sample) == 0:
|
||||||
|
samples.pop(i)
|
||||||
|
samples = new_samples
|
||||||
|
#new_samples = []
|
||||||
|
#for s in samples:
|
||||||
|
# new_samples += s
|
||||||
|
#samples = new_samples
|
||||||
|
#np.random.shuffle(samples)
|
||||||
|
|
||||||
self.samples_len = len(samples)
|
self.samples_len = len(samples)
|
||||||
|
|
||||||
if self.samples_len == 0:
|
if self.samples_len == 0:
|
||||||
|
@ -116,7 +130,7 @@ class SampleGeneratorFacePerson(SampleGeneratorBase):
|
||||||
if self.person_id_mode==1:
|
if self.person_id_mode==1:
|
||||||
if len(shuffle_idxs) == 0:
|
if len(shuffle_idxs) == 0:
|
||||||
shuffle_idxs = samples_idxs.copy()
|
shuffle_idxs = samples_idxs.copy()
|
||||||
np.random.shuffle(shuffle_idxs)
|
#np.random.shuffle(shuffle_idxs)
|
||||||
|
|
||||||
idx = shuffle_idxs.pop()
|
idx = shuffle_idxs.pop()
|
||||||
sample = samples[ idx ]
|
sample = samples[ idx ]
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import operator
|
import operator
|
||||||
|
import pickle
|
||||||
import traceback
|
import traceback
|
||||||
from enum import IntEnum
|
from enum import IntEnum
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
@ -23,7 +24,7 @@ class SampleLoader:
|
||||||
return len ( Path_utils.get_all_dir_names(samples_path) )
|
return len ( Path_utils.get_all_dir_names(samples_path) )
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def load(sample_type, samples_path, target_samples_path=None, person_id_mode=False):
|
def load(sample_type, samples_path, target_samples_path=None, person_id_mode=True, use_caching=False):
|
||||||
cache = SampleLoader.cache
|
cache = SampleLoader.cache
|
||||||
|
|
||||||
if str(samples_path) not in cache.keys():
|
if str(samples_path) not in cache.keys():
|
||||||
|
@ -36,15 +37,54 @@ class SampleLoader:
|
||||||
datas[sample_type] = [ Sample(filename=filename) for filename in io.progress_bar_generator( Path_utils.get_image_paths(samples_path), "Loading") ]
|
datas[sample_type] = [ Sample(filename=filename) for filename in io.progress_bar_generator( Path_utils.get_image_paths(samples_path), "Loading") ]
|
||||||
elif sample_type == SampleType.FACE:
|
elif sample_type == SampleType.FACE:
|
||||||
if datas[sample_type] is None:
|
if datas[sample_type] is None:
|
||||||
if person_id_mode:
|
|
||||||
dir_names = Path_utils.get_all_dir_names(samples_path)
|
if not use_caching:
|
||||||
all_samples = []
|
|
||||||
for i, dir_name in io.progress_bar_generator( [*enumerate(dir_names)] , "Loading"):
|
|
||||||
all_samples += SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename, person_id=i) for filename in Path_utils.get_image_paths( samples_path / dir_name ) ], silent=True )
|
|
||||||
datas[sample_type] = all_samples
|
|
||||||
else:
|
|
||||||
datas[sample_type] = SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename) for filename in Path_utils.get_image_paths(samples_path) ] )
|
datas[sample_type] = SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename) for filename in Path_utils.get_image_paths(samples_path) ] )
|
||||||
|
else:
|
||||||
|
samples_dat = samples_path / 'samples.dat'
|
||||||
|
if samples_dat.exists():
|
||||||
|
io.log_info (f"Using saved samples info from '{samples_dat}' ")
|
||||||
|
|
||||||
|
all_samples = pickle.loads(samples_dat.read_bytes())
|
||||||
|
|
||||||
|
if person_id_mode:
|
||||||
|
for samples in all_samples:
|
||||||
|
for sample in samples:
|
||||||
|
sample.filename = str( samples_path / Path(sample.filename) )
|
||||||
|
else:
|
||||||
|
for sample in all_samples:
|
||||||
|
sample.filename = str( samples_path / Path(sample.filename) )
|
||||||
|
|
||||||
|
datas[sample_type] = all_samples
|
||||||
|
|
||||||
|
else:
|
||||||
|
if person_id_mode:
|
||||||
|
dir_names = Path_utils.get_all_dir_names(samples_path)
|
||||||
|
all_samples = []
|
||||||
|
for i, dir_name in io.progress_bar_generator( [*enumerate(dir_names)] , "Loading"):
|
||||||
|
all_samples += [ SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename, person_id=i) for filename in Path_utils.get_image_paths( samples_path / dir_name ) ], silent=True ) ]
|
||||||
|
datas[sample_type] = all_samples
|
||||||
|
else:
|
||||||
|
datas[sample_type] = all_samples = SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename) for filename in Path_utils.get_image_paths(samples_path) ] )
|
||||||
|
|
||||||
|
if person_id_mode:
|
||||||
|
for samples in all_samples:
|
||||||
|
for sample in samples:
|
||||||
|
sample.filename = str(Path(sample.filename).relative_to(samples_path))
|
||||||
|
else:
|
||||||
|
for sample in all_samples:
|
||||||
|
sample.filename = str(Path(sample.filename).relative_to(samples_path))
|
||||||
|
|
||||||
|
samples_dat.write_bytes (pickle.dumps(all_samples))
|
||||||
|
|
||||||
|
if person_id_mode:
|
||||||
|
for samples in all_samples:
|
||||||
|
for sample in samples:
|
||||||
|
sample.filename = str( samples_path / Path(sample.filename) )
|
||||||
|
else:
|
||||||
|
for sample in all_samples:
|
||||||
|
sample.filename = str( samples_path / Path(sample.filename) )
|
||||||
|
|
||||||
elif sample_type == SampleType.FACE_TEMPORAL_SORTED:
|
elif sample_type == SampleType.FACE_TEMPORAL_SORTED:
|
||||||
if datas[sample_type] is None:
|
if datas[sample_type] is None:
|
||||||
datas[sample_type] = SampleLoader.upgradeToFaceTemporalSortedSamples( SampleLoader.load(SampleType.FACE, samples_path) )
|
datas[sample_type] = SampleLoader.upgradeToFaceTemporalSortedSamples( SampleLoader.load(SampleType.FACE, samples_path) )
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue