From 77b390c04bb521c971d3f76bbae018c86196fad1 Mon Sep 17 00:00:00 2001 From: Colombo Date: Sun, 24 Nov 2019 19:51:07 +0400 Subject: [PATCH] 1 --- nnlib/nnlib.py | 79 ++++++++++++++++++++++++++ samplelib/SampleGeneratorFace.py | 12 +--- samplelib/SampleGeneratorFacePerson.py | 28 ++++++--- samplelib/SampleLoader.py | 56 +++++++++++++++--- 4 files changed, 150 insertions(+), 25 deletions(-) diff --git a/nnlib/nnlib.py b/nnlib/nnlib.py index c9f5059..39a16f1 100644 --- a/nnlib/nnlib.py +++ b/nnlib/nnlib.py @@ -92,6 +92,7 @@ Model = keras.models.Model Adam = nnlib.Adam RMSprop = nnlib.RMSprop +LookaheadOptimizer = nnlib.LookaheadOptimizer modelify = nnlib.modelify gaussian_blur = nnlib.gaussian_blur @@ -936,7 +937,85 @@ NLayerDiscriminator = nnlib.NLayerDiscriminator base_config = super(Adam, self).get_config() return dict(list(base_config.items()) + list(config.items())) nnlib.Adam = Adam + + class LookaheadOptimizer(keras.optimizers.Optimizer): + def __init__(self, optimizer, sync_period=5, slow_step=0.5, tf_cpu_mode=0, **kwargs): + super(LookaheadOptimizer, self).__init__(**kwargs) + self.optimizer = optimizer + self.tf_cpu_mode = tf_cpu_mode + + with K.name_scope(self.__class__.__name__): + self.sync_period = K.variable(sync_period, dtype='int64', name='sync_period') + self.slow_step = K.variable(slow_step, name='slow_step') + @property + def lr(self): + return self.optimizer.lr + + @lr.setter + def lr(self, lr): + self.optimizer.lr = lr + + @property + def learning_rate(self): + return self.optimizer.learning_rate + + @learning_rate.setter + def learning_rate(self, learning_rate): + self.optimizer.learning_rate = learning_rate + + @property + def iterations(self): + return self.optimizer.iterations + + def get_updates(self, loss, params): + sync_cond = K.equal((self.iterations + 1) // self.sync_period * self.sync_period, (self.iterations + 1)) + + e = K.tf.device("/cpu:0") if self.tf_cpu_mode > 0 else None + if e: e.__enter__() + slow_params = [K.variable(K.get_value(p), name='sp_{}'.format(i)) for i, p in enumerate(params)] + if e: e.__exit__(None, None, None) + + + self.updates = self.optimizer.get_updates(loss, params) + slow_updates = [] + for p, sp in zip(params, slow_params): + + e = K.tf.device("/cpu:0") if self.tf_cpu_mode == 2 else None + if e: e.__enter__() + sp_t = sp + self.slow_step * (p - sp) + if e: e.__exit__(None, None, None) + + slow_updates.append(K.update(sp, K.switch( + sync_cond, + sp_t, + sp, + ))) + slow_updates.append(K.update_add(p, K.switch( + sync_cond, + sp_t - p, + K.zeros_like(p), + ))) + + self.updates += slow_updates + self.weights = self.optimizer.weights + slow_params + return self.updates + + def get_config(self): + config = { + 'optimizer': keras.optimizers.serialize(self.optimizer), + 'sync_period': int(K.get_value(self.sync_period)), + 'slow_step': float(K.get_value(self.slow_step)), + } + base_config = super(LookaheadOptimizer, self).get_config() + return dict(list(base_config.items()) + list(config.items())) + + @classmethod + def from_config(cls, config): + optimizer = keras.optimizers.deserialize(config.pop('optimizer')) + return cls(optimizer, **config) + nnlib.LookaheadOptimizer = LookaheadOptimizer + class DenseMaxout(keras.layers.Layer): """A dense maxout layer. A `MaxoutDense` layer takes the element-wise maximum of diff --git a/samplelib/SampleGeneratorFace.py b/samplelib/SampleGeneratorFace.py index eecd3bc..81cca20 100644 --- a/samplelib/SampleGeneratorFace.py +++ b/samplelib/SampleGeneratorFace.py @@ -24,8 +24,8 @@ class SampleGeneratorFace(SampleGeneratorBase): random_ct_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], - person_id_mode=False, add_sample_idx=False, + use_caching=False, generators_count=2, generators_random_seed=None, **kwargs): @@ -34,7 +34,6 @@ class SampleGeneratorFace(SampleGeneratorBase): self.sample_process_options = sample_process_options self.output_sample_types = output_sample_types self.add_sample_idx = add_sample_idx - self.person_id_mode = person_id_mode if sort_by_yaw_target_samples_path is not None: self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET @@ -48,7 +47,7 @@ class SampleGeneratorFace(SampleGeneratorBase): self.generators_random_seed = generators_random_seed - samples = SampleLoader.load (self.sample_type, self.samples_path, sort_by_yaw_target_samples_path, person_id_mode=person_id_mode) + samples = SampleLoader.load (self.sample_type, self.samples_path, sort_by_yaw_target_samples_path, use_caching=use_caching) np.random.shuffle(samples) self.samples_len = len(samples) @@ -149,19 +148,12 @@ class SampleGeneratorFace(SampleGeneratorBase): if self.add_sample_idx: batches += [ [] ] i_sample_idx = len(batches)-1 - - if self.person_id_mode: - batches += [ [] ] - i_person_id = len(batches)-1 for i in range(len(x)): batches[i].append ( x[i] ) if self.add_sample_idx: batches[i_sample_idx].append (idx) - - if self.person_id_mode: - batches[i_person_id].append ( np.array([sample.person_id]) ) break diff --git a/samplelib/SampleGeneratorFacePerson.py b/samplelib/SampleGeneratorFacePerson.py index 2b038dd..aa77feb 100644 --- a/samplelib/SampleGeneratorFacePerson.py +++ b/samplelib/SampleGeneratorFacePerson.py @@ -22,8 +22,9 @@ class SampleGeneratorFacePerson(SampleGeneratorBase): sample_process_options=SampleProcessor.Options(), output_sample_types=[], person_id_mode=1, + use_caching=False, generators_count=2, - generators_random_seed=None, + generators_random_seed=None, **kwargs): super().__init__(samples_path, debug, batch_size) @@ -35,15 +36,28 @@ class SampleGeneratorFacePerson(SampleGeneratorBase): raise ValueError("len(generators_random_seed) != generators_count") self.generators_random_seed = generators_random_seed - samples = SampleLoader.load (SampleType.FACE, self.samples_path, person_id_mode=True) + samples = SampleLoader.load (SampleType.FACE, self.samples_path, person_id_mode=True, use_caching=use_caching) if person_id_mode==1: - new_samples = [] - for s in samples: - new_samples += s - samples = new_samples np.random.shuffle(samples) + new_samples = [] + while len(samples) > 0: + for i in range( len(samples)-1, -1, -1): + sample = samples[i] + + if len(sample) > 0: + new_samples.append(sample.pop(0)) + + if len(sample) == 0: + samples.pop(i) + samples = new_samples + #new_samples = [] + #for s in samples: + # new_samples += s + #samples = new_samples + #np.random.shuffle(samples) + self.samples_len = len(samples) if self.samples_len == 0: @@ -116,7 +130,7 @@ class SampleGeneratorFacePerson(SampleGeneratorBase): if self.person_id_mode==1: if len(shuffle_idxs) == 0: shuffle_idxs = samples_idxs.copy() - np.random.shuffle(shuffle_idxs) + #np.random.shuffle(shuffle_idxs) idx = shuffle_idxs.pop() sample = samples[ idx ] diff --git a/samplelib/SampleLoader.py b/samplelib/SampleLoader.py index c86b0e3..3350411 100644 --- a/samplelib/SampleLoader.py +++ b/samplelib/SampleLoader.py @@ -1,4 +1,5 @@ import operator +import pickle import traceback from enum import IntEnum from pathlib import Path @@ -23,7 +24,7 @@ class SampleLoader: return len ( Path_utils.get_all_dir_names(samples_path) ) @staticmethod - def load(sample_type, samples_path, target_samples_path=None, person_id_mode=False): + def load(sample_type, samples_path, target_samples_path=None, person_id_mode=True, use_caching=False): cache = SampleLoader.cache if str(samples_path) not in cache.keys(): @@ -36,15 +37,54 @@ class SampleLoader: datas[sample_type] = [ Sample(filename=filename) for filename in io.progress_bar_generator( Path_utils.get_image_paths(samples_path), "Loading") ] elif sample_type == SampleType.FACE: if datas[sample_type] is None: - if person_id_mode: - dir_names = Path_utils.get_all_dir_names(samples_path) - all_samples = [] - for i, dir_name in io.progress_bar_generator( [*enumerate(dir_names)] , "Loading"): - all_samples += SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename, person_id=i) for filename in Path_utils.get_image_paths( samples_path / dir_name ) ], silent=True ) - datas[sample_type] = all_samples - else: + + if not use_caching: datas[sample_type] = SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename) for filename in Path_utils.get_image_paths(samples_path) ] ) + else: + samples_dat = samples_path / 'samples.dat' + if samples_dat.exists(): + io.log_info (f"Using saved samples info from '{samples_dat}' ") + + all_samples = pickle.loads(samples_dat.read_bytes()) + + if person_id_mode: + for samples in all_samples: + for sample in samples: + sample.filename = str( samples_path / Path(sample.filename) ) + else: + for sample in all_samples: + sample.filename = str( samples_path / Path(sample.filename) ) + + datas[sample_type] = all_samples + + else: + if person_id_mode: + dir_names = Path_utils.get_all_dir_names(samples_path) + all_samples = [] + for i, dir_name in io.progress_bar_generator( [*enumerate(dir_names)] , "Loading"): + all_samples += [ SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename, person_id=i) for filename in Path_utils.get_image_paths( samples_path / dir_name ) ], silent=True ) ] + datas[sample_type] = all_samples + else: + datas[sample_type] = all_samples = SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename) for filename in Path_utils.get_image_paths(samples_path) ] ) + if person_id_mode: + for samples in all_samples: + for sample in samples: + sample.filename = str(Path(sample.filename).relative_to(samples_path)) + else: + for sample in all_samples: + sample.filename = str(Path(sample.filename).relative_to(samples_path)) + + samples_dat.write_bytes (pickle.dumps(all_samples)) + + if person_id_mode: + for samples in all_samples: + for sample in samples: + sample.filename = str( samples_path / Path(sample.filename) ) + else: + for sample in all_samples: + sample.filename = str( samples_path / Path(sample.filename) ) + elif sample_type == SampleType.FACE_TEMPORAL_SORTED: if datas[sample_type] is None: datas[sample_type] = SampleLoader.upgradeToFaceTemporalSortedSamples( SampleLoader.load(SampleType.FACE, samples_path) )