mirror of
https://github.com/iperov/DeepFaceLab.git
synced 2025-08-14 02:37:00 -07:00
all models: removed options 'src_scale_mod', and 'sort samples by yaw as target'
If you want, you can manually remove unnecessary angles from src faceset after sort by yaw. Optimized sample generators (CPU workers). Now they consume less amount of RAM and work faster. added 4.2.other) data_src/dst util faceset pack.bat Packs /aligned/ samples into one /aligned/samples.pak file. After that, all faces will be deleted. 4.2.other) data_src/dst util faceset unpack.bat unpacks faces from /aligned/samples.pak to /aligned/ dir. After that, samples.pak will be deleted. Packed faceset load and work faster.
This commit is contained in:
parent
8866dce22e
commit
50f892d57d
26 changed files with 577 additions and 433 deletions
|
@ -18,10 +18,13 @@ class DFLJPG(object):
|
|||
self.shape = (0,0,0)
|
||||
|
||||
@staticmethod
|
||||
def load_raw(filename):
|
||||
def load_raw(filename, loader_func=None):
|
||||
try:
|
||||
with open(filename, "rb") as f:
|
||||
data = f.read()
|
||||
if loader_func is not None:
|
||||
data = loader_func(filename)
|
||||
else:
|
||||
with open(filename, "rb") as f:
|
||||
data = f.read()
|
||||
except:
|
||||
raise FileNotFoundError(filename)
|
||||
|
||||
|
@ -116,9 +119,9 @@ class DFLJPG(object):
|
|||
raise Exception ("Corrupted JPG file: %s" % (str(e)))
|
||||
|
||||
@staticmethod
|
||||
def load(filename):
|
||||
def load(filename, loader_func=None):
|
||||
try:
|
||||
inst = DFLJPG.load_raw (filename)
|
||||
inst = DFLJPG.load_raw (filename, loader_func=loader_func)
|
||||
inst.dfl_dict = None
|
||||
|
||||
for chunk in inst.chunks:
|
||||
|
|
|
@ -225,10 +225,13 @@ class DFLPNG(object):
|
|||
self.dfl_dict = None
|
||||
|
||||
@staticmethod
|
||||
def load_raw(filename):
|
||||
def load_raw(filename, loader_func=None):
|
||||
try:
|
||||
with open(filename, "rb") as f:
|
||||
data = f.read()
|
||||
if loader_func is not None:
|
||||
data = loader_func(filename)
|
||||
else:
|
||||
with open(filename, "rb") as f:
|
||||
data = f.read()
|
||||
except:
|
||||
raise FileNotFoundError(filename)
|
||||
|
||||
|
@ -252,9 +255,9 @@ class DFLPNG(object):
|
|||
return inst
|
||||
|
||||
@staticmethod
|
||||
def load(filename):
|
||||
def load(filename, loader_func=None):
|
||||
try:
|
||||
inst = DFLPNG.load_raw (filename)
|
||||
inst = DFLPNG.load_raw (filename, loader_func=loader_func)
|
||||
inst.dfl_dict = inst.getDFLDictData()
|
||||
|
||||
if inst.dfl_dict is not None:
|
||||
|
|
|
@ -3,12 +3,15 @@ import numpy as np
|
|||
from pathlib import Path
|
||||
|
||||
#allows to open non-english characters path
|
||||
def cv2_imread(filename, flags=cv2.IMREAD_UNCHANGED):
|
||||
def cv2_imread(filename, flags=cv2.IMREAD_UNCHANGED, loader_func=None):
|
||||
try:
|
||||
with open(filename, "rb") as stream:
|
||||
bytes = bytearray(stream.read())
|
||||
numpyarray = np.asarray(bytes, dtype=np.uint8)
|
||||
return cv2.imdecode(numpyarray, flags)
|
||||
if loader_func is not None:
|
||||
bytes = bytearray(loader_func(filename))
|
||||
else:
|
||||
with open(filename, "rb") as stream:
|
||||
bytes = bytearray(stream.read())
|
||||
numpyarray = np.asarray(bytes, dtype=np.uint8)
|
||||
return cv2.imdecode(numpyarray, flags)
|
||||
except:
|
||||
return None
|
||||
|
||||
|
|
179
utils/mp_utils.py
Normal file
179
utils/mp_utils.py
Normal file
|
@ -0,0 +1,179 @@
|
|||
import multiprocessing
|
||||
import threading
|
||||
import time
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
class IndexHost():
|
||||
"""
|
||||
Provides random shuffled indexes for multiprocesses
|
||||
"""
|
||||
def __init__(self, indexes_count):
|
||||
self.sq = multiprocessing.Queue()
|
||||
self.cqs = []
|
||||
self.clis = []
|
||||
self.thread = threading.Thread(target=self.host_thread, args=(indexes_count,) )
|
||||
self.thread.daemon = True
|
||||
self.thread.start()
|
||||
|
||||
def host_thread(self, indexes_count):
|
||||
idxs = [*range(indexes_count)]
|
||||
shuffle_idxs = []
|
||||
sq = self.sq
|
||||
|
||||
while True:
|
||||
while not sq.empty():
|
||||
obj = sq.get()
|
||||
cq_id, count = obj[0], obj[1]
|
||||
|
||||
result = []
|
||||
for i in range(count):
|
||||
if len(shuffle_idxs) == 0:
|
||||
shuffle_idxs = idxs.copy()
|
||||
np.random.shuffle(shuffle_idxs)
|
||||
result.append(shuffle_idxs.pop())
|
||||
self.cqs[cq_id].put (result)
|
||||
|
||||
time.sleep(0.005)
|
||||
|
||||
def create_cli(self):
|
||||
cq = multiprocessing.Queue()
|
||||
self.cqs.append ( cq )
|
||||
cq_id = len(self.cqs)-1
|
||||
return IndexHost.Cli(self.sq, cq, cq_id)
|
||||
|
||||
# disable pickling
|
||||
def __getstate__(self):
|
||||
return dict()
|
||||
def __setstate__(self, d):
|
||||
self.__dict__.update(d)
|
||||
|
||||
class Cli():
|
||||
def __init__(self, sq, cq, cq_id):
|
||||
self.sq = sq
|
||||
self.cq = cq
|
||||
self.cq_id = cq_id
|
||||
|
||||
def get(self, count):
|
||||
self.sq.put ( (self.cq_id,count) )
|
||||
|
||||
while True:
|
||||
if not self.cq.empty():
|
||||
return self.cq.get()
|
||||
time.sleep(0.001)
|
||||
|
||||
class ListHost():
|
||||
def __init__(self, list_):
|
||||
self.sq = multiprocessing.Queue()
|
||||
self.cqs = []
|
||||
self.clis = []
|
||||
self.list_ = list_
|
||||
self.thread = threading.Thread(target=self.host_thread)
|
||||
self.thread.daemon = True
|
||||
self.thread.start()
|
||||
|
||||
def host_thread(self):
|
||||
sq = self.sq
|
||||
while True:
|
||||
while not sq.empty():
|
||||
obj = sq.get()
|
||||
cq_id, cmd = obj[0], obj[1]
|
||||
if cmd == 0:
|
||||
item = self.list_[ obj[2] ]
|
||||
self.cqs[cq_id].put ( item )
|
||||
|
||||
elif cmd == 1:
|
||||
self.cqs[cq_id].put ( len(self.list_) )
|
||||
time.sleep(0.005)
|
||||
|
||||
def create_cli(self):
|
||||
cq = multiprocessing.Queue()
|
||||
self.cqs.append ( cq )
|
||||
cq_id = len(self.cqs)-1
|
||||
return ListHost.Cli(self.sq, cq, cq_id)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.list_)
|
||||
|
||||
# disable pickling
|
||||
def __getstate__(self):
|
||||
return dict()
|
||||
def __setstate__(self, d):
|
||||
self.__dict__.update(d)
|
||||
|
||||
class Cli():
|
||||
def __init__(self, sq, cq, cq_id):
|
||||
self.sq = sq
|
||||
self.cq = cq
|
||||
self.cq_id = cq_id
|
||||
|
||||
def __getitem__(self, key):
|
||||
self.sq.put ( (self.cq_id,0,key) )
|
||||
|
||||
while True:
|
||||
if not self.cq.empty():
|
||||
return self.cq.get()
|
||||
time.sleep(0.001)
|
||||
|
||||
def __len__(self):
|
||||
self.sq.put ( (self.cq_id,1) )
|
||||
|
||||
while True:
|
||||
if not self.cq.empty():
|
||||
return self.cq.get()
|
||||
time.sleep(0.001)
|
||||
|
||||
class DictHost():
|
||||
def __init__(self, d, num_users):
|
||||
self.sqs = [ multiprocessing.Queue() for _ in range(num_users) ]
|
||||
self.cqs = [ multiprocessing.Queue() for _ in range(num_users) ]
|
||||
|
||||
self.thread = threading.Thread(target=self.host_thread, args=(d,) )
|
||||
self.thread.daemon = True
|
||||
self.thread.start()
|
||||
|
||||
self.clis = [ DictHostCli(sq,cq) for sq, cq in zip(self.sqs, self.cqs) ]
|
||||
|
||||
def host_thread(self, d):
|
||||
while True:
|
||||
for sq, cq in zip(self.sqs, self.cqs):
|
||||
if not sq.empty():
|
||||
obj = sq.get()
|
||||
cmd = obj[0]
|
||||
if cmd == 0:
|
||||
cq.put (d[ obj[1] ])
|
||||
elif cmd == 1:
|
||||
cq.put ( list(d.keys()) )
|
||||
|
||||
time.sleep(0.005)
|
||||
|
||||
|
||||
def get_cli(self, n_user):
|
||||
return self.clis[n_user]
|
||||
|
||||
# disable pickling
|
||||
def __getstate__(self):
|
||||
return dict()
|
||||
def __setstate__(self, d):
|
||||
self.__dict__.update(d)
|
||||
|
||||
class DictHostCli():
|
||||
def __init__(self, sq, cq):
|
||||
self.sq = sq
|
||||
self.cq = cq
|
||||
|
||||
def __getitem__(self, key):
|
||||
self.sq.put ( (0,key) )
|
||||
|
||||
while True:
|
||||
if not self.cq.empty():
|
||||
return self.cq.get()
|
||||
time.sleep(0.001)
|
||||
|
||||
def keys(self):
|
||||
self.sq.put ( (1,) )
|
||||
while True:
|
||||
if not self.cq.empty():
|
||||
return self.cq.get()
|
||||
time.sleep(0.001)
|
Loading…
Add table
Add a link
Reference in a new issue