SAE : WARNING, RETRAIN IS REQUIRED !

fixed model sizes from previous update.
avoided bug in ML framework(keras) that forces to train the model on random noise.

Converter: added blur on the same keys as sharpness

Added new model 'TrueFace'. This is a GAN model ported from https://github.com/NVlabs/FUNIT
Model produces near zero morphing and high detail face.
Model has higher failure rate than other models.
Keep src and dst faceset in same lighting conditions.
This commit is contained in:
Colombo 2019-09-19 11:13:56 +04:00
parent 201b762541
commit dc11ec32be
26 changed files with 1308 additions and 250 deletions

View file

@ -18,11 +18,23 @@ output_sample_types = [
]
'''
class SampleGeneratorFace(SampleGeneratorBase):
def __init__ (self, samples_path, debug, batch_size, sort_by_yaw=False, sort_by_yaw_target_samples_path=None, random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, generators_count=2, generators_random_seed=None, **kwargs):
def __init__ (self, samples_path, debug=False, batch_size=1,
sort_by_yaw=False,
sort_by_yaw_target_samples_path=None,
random_ct_samples_path=None,
sample_process_options=SampleProcessor.Options(),
output_sample_types=[],
person_id_mode=False,
add_sample_idx=False,
generators_count=2,
generators_random_seed=None,
**kwargs):
super().__init__(samples_path, debug, batch_size)
self.sample_process_options = sample_process_options
self.output_sample_types = output_sample_types
self.add_sample_idx = add_sample_idx
self.person_id_mode = person_id_mode
if sort_by_yaw_target_samples_path is not None:
self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET
@ -35,9 +47,10 @@ class SampleGeneratorFace(SampleGeneratorBase):
raise ValueError("len(generators_random_seed) != generators_count")
self.generators_random_seed = generators_random_seed
samples = SampleLoader.load (self.sample_type, self.samples_path, sort_by_yaw_target_samples_path)
samples = SampleLoader.load (self.sample_type, self.samples_path, sort_by_yaw_target_samples_path, person_id_mode=person_id_mode)
self.total_samples_count = len(samples)
ct_samples = SampleLoader.load (SampleType.FACE, random_ct_samples_path) if random_ct_samples_path is not None else None
self.random_ct_sample_chance = 100
@ -49,7 +62,11 @@ class SampleGeneratorFace(SampleGeneratorBase):
self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (i, samples[i::self.generators_count], ct_samples ) ) for i in range(self.generators_count) ]
self.generator_counter = -1
#overridable
def get_total_sample_count(self):
return self.total_samples_count
def __iter__(self):
return self
@ -58,7 +75,7 @@ class SampleGeneratorFace(SampleGeneratorBase):
generator = self.generators[self.generator_counter % len(self.generators) ]
return next(generator)
def batch_func(self, param ):
def batch_func(self, param ):
generator_id, samples, ct_samples = param
if self.generators_random_seed is not None:
@ -82,7 +99,7 @@ class SampleGeneratorFace(SampleGeneratorBase):
shuffle_idxs = []
shuffle_idxs_2D = [[]]*samples_len
while True:
while True:
batches = None
for n_batch in range(self.batch_size):
while True:
@ -131,12 +148,24 @@ class SampleGeneratorFace(SampleGeneratorBase):
if self.add_sample_idx:
batches += [ [] ]
i_sample_idx = len(batches)-1
if self.person_id_mode:
batches += [ [] ]
i_person_id = len(batches)-1
for i in range(len(x)):
batches[i].append ( x[i] )
if self.add_sample_idx:
batches[i_sample_idx].append (idx)
if self.person_id_mode:
batches[i_person_id].append ( np.array([sample.person_id]) )
break
yield [ np.array(batch) for batch in batches]
@staticmethod
def get_person_id_max_count(samples_path):
return SampleLoader.get_person_id_max_count(samples_path)