mirror of
https://github.com/iperov/DeepFaceLab.git
synced 2025-08-14 02:37:00 -07:00
DFL-2.0 initial branch commit
This commit is contained in:
parent
52a67a61b3
commit
38b85108b3
154 changed files with 5251 additions and 9414 deletions
|
@ -4,9 +4,9 @@ import struct
|
|||
from pathlib import Path
|
||||
|
||||
import samplelib.SampleHost
|
||||
from interact import interact as io
|
||||
from core.interact import interact as io
|
||||
from samplelib import Sample
|
||||
from utils import Path_utils
|
||||
from core import pathex
|
||||
|
||||
packed_faceset_filename = 'faceset.pak'
|
||||
|
||||
|
@ -19,20 +19,20 @@ class PackedFaceset():
|
|||
|
||||
if samples_dat_path.exists():
|
||||
io.log_info(f"{samples_dat_path} : file already exists !")
|
||||
io.input_bool("Press enter to continue and overwrite.", False)
|
||||
io.input("Press enter to continue and overwrite.")
|
||||
|
||||
as_person_faceset = False
|
||||
dir_names = Path_utils.get_all_dir_names(samples_path)
|
||||
dir_names = pathex.get_all_dir_names(samples_path)
|
||||
if len(dir_names) != 0:
|
||||
as_person_faceset = io.input_bool(f"{len(dir_names)} subdirectories found, process as person faceset? (y/n) skip:y : ", True)
|
||||
as_person_faceset = io.input_bool(f"{len(dir_names)} subdirectories found, process as person faceset?", True)
|
||||
|
||||
if as_person_faceset:
|
||||
image_paths = []
|
||||
|
||||
for dir_name in dir_names:
|
||||
image_paths += Path_utils.get_image_paths(samples_path / dir_name)
|
||||
image_paths += pathex.get_image_paths(samples_path / dir_name)
|
||||
else:
|
||||
image_paths = Path_utils.get_image_paths(samples_path)
|
||||
image_paths = pathex.get_image_paths(samples_path)
|
||||
|
||||
samples = samplelib.SampleHost.load_face_samples(image_paths)
|
||||
samples_len = len(samples)
|
||||
|
|
|
@ -4,10 +4,10 @@ from pathlib import Path
|
|||
import cv2
|
||||
import numpy as np
|
||||
|
||||
from utils.cv2_utils import *
|
||||
from core.cv2ex import *
|
||||
from DFLIMG import *
|
||||
from facelib import LandmarksProcessor
|
||||
from imagelib import IEPolys
|
||||
from core.imagelib import IEPolys
|
||||
|
||||
class SampleType(IntEnum):
|
||||
IMAGE = 0 #raw image
|
||||
|
|
|
@ -15,20 +15,16 @@ class SampleGeneratorBase(object):
|
|||
self.batch_size = 1 if self.debug else batch_size
|
||||
self.last_generation = None
|
||||
self.active = True
|
||||
|
||||
|
||||
def set_active(self, is_active):
|
||||
self.active = is_active
|
||||
|
||||
|
||||
def generate_next(self):
|
||||
if not self.active and self.last_generation is not None:
|
||||
return self.last_generation
|
||||
self.last_generation = next(self)
|
||||
return self.last_generation
|
||||
|
||||
#overridable
|
||||
def get_total_sample_count(self):
|
||||
return 0
|
||||
|
||||
|
||||
#overridable
|
||||
def __iter__(self):
|
||||
#implement your own iterator
|
||||
|
|
|
@ -1,13 +1,16 @@
|
|||
import multiprocessing
|
||||
import traceback
|
||||
import pickle
|
||||
import time
|
||||
import traceback
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
import time
|
||||
|
||||
from core import mplib
|
||||
from core.joblib import SubprocessGenerator, ThisThreadGenerator
|
||||
from facelib import LandmarksProcessor
|
||||
from samplelib import (SampleGeneratorBase, SampleHost, SampleProcessor,
|
||||
SampleType)
|
||||
from utils import iter_utils, mp_utils
|
||||
|
||||
|
||||
'''
|
||||
|
@ -34,37 +37,33 @@ class SampleGeneratorFace(SampleGeneratorBase):
|
|||
if self.debug:
|
||||
self.generators_count = 1
|
||||
else:
|
||||
self.generators_count = np.clip(multiprocessing.cpu_count(), 2, generators_count)
|
||||
|
||||
self.generators_count = max(1, generators_count)
|
||||
|
||||
samples = SampleHost.load (SampleType.FACE, self.samples_path)
|
||||
self.samples_len = len(samples)
|
||||
|
||||
if self.samples_len == 0:
|
||||
raise ValueError('No training data provided.')
|
||||
|
||||
index_host = mp_utils.IndexHost(self.samples_len)
|
||||
index_host = mplib.IndexHost(self.samples_len)
|
||||
|
||||
if random_ct_samples_path is not None:
|
||||
ct_samples = SampleHost.load (SampleType.FACE, random_ct_samples_path)
|
||||
ct_index_host = mp_utils.IndexHost( len(ct_samples) )
|
||||
ct_index_host = mplib.IndexHost( len(ct_samples) )
|
||||
else:
|
||||
ct_samples = None
|
||||
ct_index_host = None
|
||||
|
||||
pickled_samples = pickle.dumps(samples, 4)
|
||||
ct_pickled_samples = pickle.dumps(ct_samples, 4) if ct_samples is not None else None
|
||||
|
||||
|
||||
if self.debug:
|
||||
self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None) )]
|
||||
self.generators = [ThisThreadGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None) )]
|
||||
else:
|
||||
self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=True ) for i in range(self.generators_count) ]
|
||||
self.generators = [SubprocessGenerator ( self.batch_func, (pickled_samples, index_host.create_cli(), ct_pickled_samples, ct_index_host.create_cli() if ct_index_host is not None else None), start_now=True ) for i in range(self.generators_count) ]
|
||||
|
||||
self.generator_counter = -1
|
||||
|
||||
#overridable
|
||||
def get_total_sample_count(self):
|
||||
return self.samples_len
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
|
@ -75,8 +74,8 @@ class SampleGeneratorFace(SampleGeneratorBase):
|
|||
|
||||
def batch_func(self, param ):
|
||||
pickled_samples, index_host, ct_pickled_samples, ct_index_host = param
|
||||
|
||||
samples = pickle.loads(pickled_samples)
|
||||
|
||||
samples = pickle.loads(pickled_samples)
|
||||
ct_samples = pickle.loads(ct_pickled_samples) if ct_pickled_samples is not None else None
|
||||
|
||||
bs = self.batch_size
|
||||
|
@ -89,9 +88,9 @@ class SampleGeneratorFace(SampleGeneratorBase):
|
|||
t = time.time()
|
||||
for n_batch in range(bs):
|
||||
sample_idx = indexes[n_batch]
|
||||
sample = samples[sample_idx]
|
||||
|
||||
ct_sample = None
|
||||
sample = samples[sample_idx]
|
||||
|
||||
ct_sample = None
|
||||
if ct_samples is not None:
|
||||
ct_sample = ct_samples[ct_indexes[n_batch]]
|
||||
|
||||
|
|
|
@ -5,10 +5,11 @@ import traceback
|
|||
import cv2
|
||||
import numpy as np
|
||||
|
||||
from core import mplib
|
||||
from core.joblib import SubprocessGenerator, ThisThreadGenerator
|
||||
from facelib import LandmarksProcessor
|
||||
from samplelib import (SampleGeneratorBase, SampleHost, SampleProcessor,
|
||||
SampleType)
|
||||
from utils import iter_utils, mp_utils
|
||||
|
||||
|
||||
'''
|
||||
|
@ -19,12 +20,12 @@ output_sample_types = [
|
|||
]
|
||||
'''
|
||||
class SampleGeneratorFacePerson(SampleGeneratorBase):
|
||||
def __init__ (self, samples_path, debug=False, batch_size=1,
|
||||
sample_process_options=SampleProcessor.Options(),
|
||||
output_sample_types=[],
|
||||
def __init__ (self, samples_path, debug=False, batch_size=1,
|
||||
sample_process_options=SampleProcessor.Options(),
|
||||
output_sample_types=[],
|
||||
person_id_mode=1,
|
||||
**kwargs):
|
||||
|
||||
|
||||
super().__init__(samples_path, debug, batch_size)
|
||||
self.sample_process_options = sample_process_options
|
||||
self.output_sample_types = output_sample_types
|
||||
|
@ -39,13 +40,13 @@ class SampleGeneratorFacePerson(SampleGeneratorBase):
|
|||
if self.samples_len == 0:
|
||||
raise ValueError('No training data provided.')
|
||||
|
||||
unique_person_names = { sample.person_name for sample in samples }
|
||||
persons_name_idxs = { person_name : [] for person_name in unique_person_names }
|
||||
for i,sample in enumerate(samples):
|
||||
persons_name_idxs[sample.person_name].append (i)
|
||||
unique_person_names = { sample.person_name for sample in samples }
|
||||
persons_name_idxs = { person_name : [] for person_name in unique_person_names }
|
||||
for i,sample in enumerate(samples):
|
||||
persons_name_idxs[sample.person_name].append (i)
|
||||
indexes2D = [ persons_name_idxs[person_name] for person_name in unique_person_names ]
|
||||
index2d_host = mp_utils.Index2DHost(indexes2D)
|
||||
|
||||
index2d_host = mplib.Index2DHost(indexes2D)
|
||||
|
||||
if self.debug:
|
||||
self.generators_count = 1
|
||||
self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, (samples_host.create_cli(), index2d_host.create_cli(),) )]
|
||||
|
@ -54,11 +55,7 @@ class SampleGeneratorFacePerson(SampleGeneratorBase):
|
|||
self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (samples_host.create_cli(), index2d_host.create_cli(),), start_now=True ) for i in range(self.generators_count) ]
|
||||
|
||||
self.generator_counter = -1
|
||||
|
||||
#overridable
|
||||
def get_total_sample_count(self):
|
||||
return self.samples_len
|
||||
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
|
@ -67,14 +64,14 @@ class SampleGeneratorFacePerson(SampleGeneratorBase):
|
|||
generator = self.generators[self.generator_counter % len(self.generators) ]
|
||||
return next(generator)
|
||||
|
||||
def batch_func(self, param ):
|
||||
def batch_func(self, param ):
|
||||
samples, index2d_host, = param
|
||||
bs = self.batch_size
|
||||
|
||||
while True:
|
||||
person_idxs = index2d_host.get_1D(bs)
|
||||
person_idxs = index2d_host.get_1D(bs)
|
||||
samples_idxs = index2d_host.get_2D(person_idxs, 1)
|
||||
|
||||
|
||||
batches = None
|
||||
for n_batch in range(bs):
|
||||
person_id = person_idxs[n_batch]
|
||||
|
@ -85,10 +82,10 @@ class SampleGeneratorFacePerson(SampleGeneratorBase):
|
|||
x, = SampleProcessor.process ([sample], self.sample_process_options, self.output_sample_types, self.debug)
|
||||
except:
|
||||
raise Exception ("Exception occured in sample %s. Error: %s" % (sample.filename, traceback.format_exc() ) )
|
||||
|
||||
|
||||
if batches is None:
|
||||
batches = [ [] for _ in range(len(x)) ]
|
||||
|
||||
|
||||
batches += [ [] ]
|
||||
i_person_id = len(batches)-1
|
||||
|
||||
|
@ -96,9 +93,9 @@ class SampleGeneratorFacePerson(SampleGeneratorBase):
|
|||
batches[i].append ( x[i] )
|
||||
|
||||
batches[i_person_id].append ( np.array([person_id]) )
|
||||
|
||||
|
||||
yield [ np.array(batch) for batch in batches]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_person_id_max_count(samples_path):
|
||||
return SampleHost.get_person_id_max_count(samples_path)
|
||||
|
@ -110,43 +107,43 @@ if self.person_id_mode==1:
|
|||
shuffle_idxs = []
|
||||
elif self.person_id_mode==2:
|
||||
persons_count = len(samples)
|
||||
|
||||
|
||||
person_idxs = []
|
||||
for j in range(persons_count):
|
||||
for i in range(j+1,persons_count):
|
||||
person_idxs += [ [i,j] ]
|
||||
|
||||
shuffle_person_idxs = []
|
||||
|
||||
|
||||
samples_idxs = [None]*persons_count
|
||||
shuffle_idxs = [None]*persons_count
|
||||
|
||||
|
||||
for i in range(persons_count):
|
||||
samples_idxs[i] = [*range(len(samples[i]))]
|
||||
shuffle_idxs[i] = []
|
||||
elif self.person_id_mode==3:
|
||||
persons_count = len(samples)
|
||||
|
||||
|
||||
person_idxs = [ *range(persons_count) ]
|
||||
shuffle_person_idxs = []
|
||||
|
||||
|
||||
samples_idxs = [None]*persons_count
|
||||
shuffle_idxs = [None]*persons_count
|
||||
|
||||
|
||||
for i in range(persons_count):
|
||||
samples_idxs[i] = [*range(len(samples[i]))]
|
||||
shuffle_idxs[i] = []
|
||||
|
||||
if self.person_id_mode==2:
|
||||
|
||||
if self.person_id_mode==2:
|
||||
if len(shuffle_person_idxs) == 0:
|
||||
shuffle_person_idxs = person_idxs.copy()
|
||||
np.random.shuffle(shuffle_person_idxs)
|
||||
person_ids = shuffle_person_idxs.pop()
|
||||
|
||||
|
||||
|
||||
|
||||
batches = None
|
||||
for n_batch in range(self.batch_size):
|
||||
|
||||
|
||||
if self.person_id_mode==1:
|
||||
if len(shuffle_idxs) == 0:
|
||||
shuffle_idxs = samples_idxs.copy()
|
||||
|
@ -154,7 +151,7 @@ if self.person_id_mode==2:
|
|||
|
||||
idx = shuffle_idxs.pop()
|
||||
sample = samples[ idx ]
|
||||
|
||||
|
||||
try:
|
||||
x, = SampleProcessor.process ([sample], self.sample_process_options, self.output_sample_types, self.debug)
|
||||
except:
|
||||
|
@ -165,7 +162,7 @@ if self.person_id_mode==2:
|
|||
|
||||
if batches is None:
|
||||
batches = [ [] for _ in range(len(x)) ]
|
||||
|
||||
|
||||
batches += [ [] ]
|
||||
i_person_id = len(batches)-1
|
||||
|
||||
|
@ -174,30 +171,30 @@ if self.person_id_mode==2:
|
|||
|
||||
batches[i_person_id].append ( np.array([sample.person_id]) )
|
||||
|
||||
|
||||
|
||||
elif self.person_id_mode==2:
|
||||
person_id1, person_id2 = person_ids
|
||||
|
||||
|
||||
if len(shuffle_idxs[person_id1]) == 0:
|
||||
shuffle_idxs[person_id1] = samples_idxs[person_id1].copy()
|
||||
np.random.shuffle(shuffle_idxs[person_id1])
|
||||
|
||||
idx = shuffle_idxs[person_id1].pop()
|
||||
sample1 = samples[person_id1][idx]
|
||||
|
||||
|
||||
if len(shuffle_idxs[person_id2]) == 0:
|
||||
shuffle_idxs[person_id2] = samples_idxs[person_id2].copy()
|
||||
np.random.shuffle(shuffle_idxs[person_id2])
|
||||
|
||||
idx = shuffle_idxs[person_id2].pop()
|
||||
sample2 = samples[person_id2][idx]
|
||||
|
||||
|
||||
if sample1 is not None and sample2 is not None:
|
||||
try:
|
||||
x1, = SampleProcessor.process ([sample1], self.sample_process_options, self.output_sample_types, self.debug)
|
||||
except:
|
||||
raise Exception ("Exception occured in sample %s. Error: %s" % (sample1.filename, traceback.format_exc() ) )
|
||||
|
||||
|
||||
try:
|
||||
x2, = SampleProcessor.process ([sample2], self.sample_process_options, self.output_sample_types, self.debug)
|
||||
except:
|
||||
|
@ -205,50 +202,50 @@ if self.person_id_mode==2:
|
|||
|
||||
x1_len = len(x1)
|
||||
if batches is None:
|
||||
batches = [ [] for _ in range(x1_len) ]
|
||||
batches = [ [] for _ in range(x1_len) ]
|
||||
batches += [ [] ]
|
||||
i_person_id1 = len(batches)-1
|
||||
|
||||
batches += [ [] for _ in range(len(x2)) ]
|
||||
|
||||
batches += [ [] for _ in range(len(x2)) ]
|
||||
batches += [ [] ]
|
||||
i_person_id2 = len(batches)-1
|
||||
|
||||
for i in range(x1_len):
|
||||
batches[i].append ( x1[i] )
|
||||
|
||||
|
||||
for i in range(len(x2)):
|
||||
batches[x1_len+1+i].append ( x2[i] )
|
||||
|
||||
batches[i_person_id1].append ( np.array([sample1.person_id]) )
|
||||
|
||||
batches[i_person_id2].append ( np.array([sample2.person_id]) )
|
||||
|
||||
elif self.person_id_mode==3:
|
||||
|
||||
elif self.person_id_mode==3:
|
||||
if len(shuffle_person_idxs) == 0:
|
||||
shuffle_person_idxs = person_idxs.copy()
|
||||
np.random.shuffle(shuffle_person_idxs)
|
||||
person_id = shuffle_person_idxs.pop()
|
||||
|
||||
|
||||
if len(shuffle_idxs[person_id]) == 0:
|
||||
shuffle_idxs[person_id] = samples_idxs[person_id].copy()
|
||||
np.random.shuffle(shuffle_idxs[person_id])
|
||||
|
||||
idx = shuffle_idxs[person_id].pop()
|
||||
sample1 = samples[person_id][idx]
|
||||
|
||||
|
||||
if len(shuffle_idxs[person_id]) == 0:
|
||||
shuffle_idxs[person_id] = samples_idxs[person_id].copy()
|
||||
np.random.shuffle(shuffle_idxs[person_id])
|
||||
|
||||
idx = shuffle_idxs[person_id].pop()
|
||||
sample2 = samples[person_id][idx]
|
||||
|
||||
|
||||
if sample1 is not None and sample2 is not None:
|
||||
try:
|
||||
x1, = SampleProcessor.process ([sample1], self.sample_process_options, self.output_sample_types, self.debug)
|
||||
except:
|
||||
raise Exception ("Exception occured in sample %s. Error: %s" % (sample1.filename, traceback.format_exc() ) )
|
||||
|
||||
|
||||
try:
|
||||
x2, = SampleProcessor.process ([sample2], self.sample_process_options, self.output_sample_types, self.debug)
|
||||
except:
|
||||
|
@ -256,21 +253,21 @@ if self.person_id_mode==2:
|
|||
|
||||
x1_len = len(x1)
|
||||
if batches is None:
|
||||
batches = [ [] for _ in range(x1_len) ]
|
||||
batches = [ [] for _ in range(x1_len) ]
|
||||
batches += [ [] ]
|
||||
i_person_id1 = len(batches)-1
|
||||
|
||||
batches += [ [] for _ in range(len(x2)) ]
|
||||
|
||||
batches += [ [] for _ in range(len(x2)) ]
|
||||
batches += [ [] ]
|
||||
i_person_id2 = len(batches)-1
|
||||
|
||||
for i in range(x1_len):
|
||||
batches[i].append ( x1[i] )
|
||||
|
||||
|
||||
for i in range(len(x2)):
|
||||
batches[x1_len+1+i].append ( x2[i] )
|
||||
|
||||
batches[i_person_id1].append ( np.array([sample1.person_id]) )
|
||||
|
||||
batches[i_person_id2].append ( np.array([sample2.person_id]) )
|
||||
"""
|
||||
batches[i_person_id2].append ( np.array([sample2.person_id]) )
|
||||
"""
|
||||
|
|
|
@ -1,91 +0,0 @@
|
|||
import pickle
|
||||
import traceback
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
|
||||
from samplelib import (SampleGeneratorBase, SampleHost, SampleProcessor,
|
||||
SampleType)
|
||||
from utils import iter_utils
|
||||
|
||||
|
||||
'''
|
||||
output_sample_types = [
|
||||
[SampleProcessor.TypeFlags, size, (optional) {} opts ] ,
|
||||
...
|
||||
]
|
||||
'''
|
||||
class SampleGeneratorFaceTemporal(SampleGeneratorBase):
|
||||
def __init__ (self, samples_path, debug, batch_size, temporal_image_count, sample_process_options=SampleProcessor.Options(), output_sample_types=[], generators_count=2, **kwargs):
|
||||
super().__init__(samples_path, debug, batch_size)
|
||||
|
||||
self.temporal_image_count = temporal_image_count
|
||||
self.sample_process_options = sample_process_options
|
||||
self.output_sample_types = output_sample_types
|
||||
|
||||
if self.debug:
|
||||
self.generators_count = 1
|
||||
else:
|
||||
self.generators_count = generators_count
|
||||
|
||||
samples = SampleHost.load (SampleType.FACE_TEMPORAL_SORTED, self.samples_path)
|
||||
samples_len = len(samples)
|
||||
if samples_len == 0:
|
||||
raise ValueError('No training data provided.')
|
||||
|
||||
pickled_samples = pickle.dumps(samples, 4)
|
||||
if self.debug:
|
||||
self.generators = [iter_utils.ThisThreadGenerator ( self.batch_func, (0, pickled_samples) )]
|
||||
else:
|
||||
self.generators = [iter_utils.SubprocessGenerator ( self.batch_func, (i, pickled_samples) ) for i in range(self.generators_count) ]
|
||||
|
||||
self.generator_counter = -1
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
self.generator_counter += 1
|
||||
generator = self.generators[self.generator_counter % len(self.generators) ]
|
||||
return next(generator)
|
||||
|
||||
def batch_func(self, param):
|
||||
generator_id, pickled_samples = param
|
||||
samples = pickle.loads(pickled_samples)
|
||||
samples_len = len(samples)
|
||||
|
||||
mult_max = 1
|
||||
l = samples_len - ( (self.temporal_image_count)*mult_max - (mult_max-1) )
|
||||
|
||||
samples_idxs = [ *range(l+1) ]
|
||||
|
||||
if len(samples_idxs) - self.temporal_image_count < 0:
|
||||
raise ValueError('Not enough samples to fit temporal line.')
|
||||
|
||||
shuffle_idxs = []
|
||||
|
||||
while True:
|
||||
batches = None
|
||||
for n_batch in range(self.batch_size):
|
||||
if len(shuffle_idxs) == 0:
|
||||
shuffle_idxs = samples_idxs.copy()
|
||||
np.random.shuffle (shuffle_idxs)
|
||||
|
||||
idx = shuffle_idxs.pop()
|
||||
|
||||
temporal_samples = []
|
||||
mult = np.random.randint(mult_max)+1
|
||||
for i in range( self.temporal_image_count ):
|
||||
sample = samples[ idx+i*mult ]
|
||||
try:
|
||||
temporal_samples += SampleProcessor.process ([sample], self.sample_process_options, self.output_sample_types, self.debug)[0]
|
||||
except:
|
||||
raise Exception ("Exception occured in sample %s. Error: %s" % (sample.filename, traceback.format_exc() ) )
|
||||
|
||||
if batches is None:
|
||||
batches = [ [] for _ in range(len(temporal_samples)) ]
|
||||
|
||||
for i in range(len(temporal_samples)):
|
||||
batches[i].append ( temporal_samples[i] )
|
||||
|
||||
yield [ np.array(batch) for batch in batches]
|
|
@ -1,10 +1,12 @@
|
|||
import traceback
|
||||
import numpy as np
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
|
||||
from utils import iter_utils
|
||||
from core.joblib import SubprocessGenerator, ThisThreadGenerator
|
||||
from samplelib import (SampleGeneratorBase, SampleHost, SampleProcessor,
|
||||
SampleType)
|
||||
|
||||
from samplelib import SampleType, SampleProcessor, SampleHost, SampleGeneratorBase
|
||||
|
||||
'''
|
||||
output_sample_types = [
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
import multiprocessing
|
||||
import operator
|
||||
import pickle
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
import pickle
|
||||
|
||||
import samplelib.PackedFaceset
|
||||
from core import pathex
|
||||
from core.interact import interact as io
|
||||
from core.joblib import Subprocessor
|
||||
from DFLIMG import *
|
||||
from facelib import FaceType, LandmarksProcessor
|
||||
from interact import interact as io
|
||||
from joblib import Subprocessor
|
||||
from utils import Path_utils, mp_utils
|
||||
|
||||
from .Sample import Sample, SampleType
|
||||
|
||||
|
@ -45,7 +46,7 @@ class SampleHost:
|
|||
|
||||
if sample_type == SampleType.IMAGE:
|
||||
if samples[sample_type] is None:
|
||||
samples[sample_type] = [ Sample(filename=filename) for filename in io.progress_bar_generator( Path_utils.get_image_paths(samples_path), "Loading") ]
|
||||
samples[sample_type] = [ Sample(filename=filename) for filename in io.progress_bar_generator( pathex.get_image_paths(samples_path), "Loading") ]
|
||||
|
||||
elif sample_type == SampleType.FACE:
|
||||
if samples[sample_type] is None:
|
||||
|
@ -58,7 +59,7 @@ class SampleHost:
|
|||
io.log_info (f"Loaded {len(result)} packed faces from {samples_path}")
|
||||
|
||||
if result is None:
|
||||
result = SampleHost.load_face_samples( Path_utils.get_image_paths(samples_path) )
|
||||
result = SampleHost.load_face_samples( pathex.get_image_paths(samples_path) )
|
||||
samples[sample_type] = result
|
||||
|
||||
elif sample_type == SampleType.FACE_TEMPORAL_SORTED:
|
||||
|
@ -68,6 +69,31 @@ class SampleHost:
|
|||
|
||||
return samples[sample_type]
|
||||
|
||||
@staticmethod
|
||||
def load_face_samples ( image_paths):
|
||||
result = FaceSamplesLoaderSubprocessor(image_paths).run()
|
||||
sample_list = []
|
||||
|
||||
for filename, \
|
||||
( face_type,
|
||||
shape,
|
||||
landmarks,
|
||||
ie_polys,
|
||||
eyebrows_expand_mod,
|
||||
source_filename,
|
||||
) in result:
|
||||
sample_list.append( Sample(filename=filename,
|
||||
sample_type=SampleType.FACE,
|
||||
face_type=FaceType.fromString (face_type),
|
||||
shape=shape,
|
||||
landmarks=landmarks,
|
||||
ie_polys=ie_polys,
|
||||
eyebrows_expand_mod=eyebrows_expand_mod,
|
||||
source_filename=source_filename,
|
||||
))
|
||||
return sample_list
|
||||
|
||||
"""
|
||||
@staticmethod
|
||||
def load_face_samples ( image_paths):
|
||||
sample_list = []
|
||||
|
@ -87,10 +113,80 @@ class SampleHost:
|
|||
source_filename=dflimg.get_source_filename(),
|
||||
))
|
||||
return sample_list
|
||||
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def upgradeToFaceTemporalSortedSamples( samples ):
|
||||
new_s = [ (s, s.source_filename) for s in samples]
|
||||
new_s = sorted(new_s, key=operator.itemgetter(1))
|
||||
|
||||
return [ s[0] for s in new_s]
|
||||
|
||||
|
||||
class FaceSamplesLoaderSubprocessor(Subprocessor):
|
||||
#override
|
||||
def __init__(self, image_paths ):
|
||||
self.image_paths = image_paths
|
||||
self.image_paths_len = len(image_paths)
|
||||
self.idxs = [*range(self.image_paths_len)]
|
||||
self.result = [None]*self.image_paths_len
|
||||
super().__init__('FaceSamplesLoader', FaceSamplesLoaderSubprocessor.Cli, 60)
|
||||
|
||||
#override
|
||||
def on_clients_initialized(self):
|
||||
io.progress_bar ("Loading samples", len (self.image_paths))
|
||||
|
||||
#override
|
||||
def on_clients_finalized(self):
|
||||
io.progress_bar_close()
|
||||
|
||||
#override
|
||||
def process_info_generator(self):
|
||||
for i in range(min(multiprocessing.cpu_count(), 8) ):
|
||||
yield 'CPU%d' % (i), {}, {}
|
||||
|
||||
#override
|
||||
def get_data(self, host_dict):
|
||||
if len (self.idxs) > 0:
|
||||
idx = self.idxs.pop(0)
|
||||
return idx, self.image_paths[idx]
|
||||
|
||||
return None
|
||||
|
||||
#override
|
||||
def on_data_return (self, host_dict, data):
|
||||
self.idxs.insert(0, data[0])
|
||||
|
||||
#override
|
||||
def on_result (self, host_dict, data, result):
|
||||
idx, dflimg = result
|
||||
self.result[idx] = (self.image_paths[idx], dflimg)
|
||||
io.progress_bar_inc(1)
|
||||
|
||||
#override
|
||||
def get_result(self):
|
||||
return self.result
|
||||
|
||||
class Cli(Subprocessor.Cli):
|
||||
#override
|
||||
def process_data(self, data):
|
||||
idx, filename = data
|
||||
dflimg = DFLIMG.load (Path(filename))
|
||||
|
||||
if dflimg is None:
|
||||
self.log_err (f"FaceSamplesLoader: {filename} is not a dfl image file.")
|
||||
data = None
|
||||
else:
|
||||
data = (dflimg.get_face_type(),
|
||||
dflimg.get_shape(),
|
||||
dflimg.get_landmarks(),
|
||||
dflimg.get_ie_polys(),
|
||||
dflimg.get_eyebrows_expand_mod(),
|
||||
dflimg.get_source_filename() )
|
||||
|
||||
return idx, data
|
||||
|
||||
#override
|
||||
def get_data_name (self, data):
|
||||
#return string identificator of your data
|
||||
return data[1]
|
||||
|
|
|
@ -4,7 +4,7 @@ from enum import IntEnum
|
|||
import cv2
|
||||
import numpy as np
|
||||
|
||||
import imagelib
|
||||
from core import imagelib
|
||||
from facelib import FaceType, LandmarksProcessor
|
||||
|
||||
|
||||
|
@ -154,9 +154,9 @@ class SampleProcessor(object):
|
|||
yaw = -yaw
|
||||
|
||||
if img_type == SPTF.IMG_PITCH_YAW_ROLL_SIGMOID:
|
||||
pitch = (pitch+1.0) / 2.0
|
||||
yaw = (yaw+1.0) / 2.0
|
||||
roll = (roll+1.0) / 2.0
|
||||
pitch = np.clip( (pitch / math.pi) / 2.0 + 1.0, 0, 1)
|
||||
yaw = np.clip( (yaw / math.pi) / 2.0 + 1.0, 0, 1)
|
||||
roll = np.clip( (roll / math.pi) / 2.0 + 1.0, 0, 1)
|
||||
|
||||
img = (pitch, yaw, roll)
|
||||
else:
|
||||
|
|
|
@ -5,6 +5,5 @@ from .SampleProcessor import SampleProcessor
|
|||
from .SampleGeneratorBase import SampleGeneratorBase
|
||||
from .SampleGeneratorFace import SampleGeneratorFace
|
||||
from .SampleGeneratorFacePerson import SampleGeneratorFacePerson
|
||||
from .SampleGeneratorFaceTemporal import SampleGeneratorFaceTemporal
|
||||
from .SampleGeneratorImageTemporal import SampleGeneratorImageTemporal
|
||||
from .PackedFaceset import PackedFaceset
|
Loading…
Add table
Add a link
Reference in a new issue