update == 04.20.2019 == (#242)

* superb improved fanseg

* _

* _

* added FANseg extractor for src and dst faces to use it in training

* -

* _

* _

* update to 'partial' func

* _

* trained FANSeg_256_full_face.h5,
new experimental models: AVATAR, RecycleGAN

* _

* _

* _

* fix for TCC mode cards(tesla), was conflict with plaidML initialization.

* _

* update manuals

* _
This commit is contained in:
iperov 2019-04-20 08:23:08 +04:00 committed by GitHub
commit 046649e6be
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
32 changed files with 1152 additions and 329 deletions

View file

@ -1,7 +1,13 @@
from enum import IntEnum
from pathlib import Path
import cv2
import numpy as np
from utils.cv2_utils import *
from utils.DFLJPG import DFLJPG
from utils.DFLPNG import DFLPNG
class SampleType(IntEnum):
IMAGE = 0 #raw image
@ -10,13 +16,12 @@ class SampleType(IntEnum):
FACE = 1 #aligned face unsorted
FACE_YAW_SORTED = 2 #sorted by yaw
FACE_YAW_SORTED_AS_TARGET = 3 #sorted by yaw and included only yaws which exist in TARGET also automatic mirrored
FACE_WITH_CLOSE_TO_SELF = 4
FACE_END = 4
FACE_END = 3
QTY = 5
QTY = 4
class Sample(object):
def __init__(self, sample_type=None, filename=None, face_type=None, shape=None, landmarks=None, ie_polys=None, pitch=None, yaw=None, mirror=None, close_target_list=None):
def __init__(self, sample_type=None, filename=None, face_type=None, shape=None, landmarks=None, ie_polys=None, pitch=None, yaw=None, source_filename=None, mirror=None, close_target_list=None, fanseg_mask_exist=False):
self.sample_type = sample_type if sample_type is not None else SampleType.IMAGE
self.filename = filename
self.face_type = face_type
@ -25,10 +30,12 @@ class Sample(object):
self.ie_polys = ie_polys
self.pitch = pitch
self.yaw = yaw
self.source_filename = source_filename
self.mirror = mirror
self.close_target_list = close_target_list
self.fanseg_mask_exist = fanseg_mask_exist
def copy_and_set(self, sample_type=None, filename=None, face_type=None, shape=None, landmarks=None, ie_polys=None, pitch=None, yaw=None, mirror=None, close_target_list=None):
def copy_and_set(self, sample_type=None, filename=None, face_type=None, shape=None, landmarks=None, ie_polys=None, pitch=None, yaw=None, source_filename=None, mirror=None, close_target_list=None, fanseg_mask=None, fanseg_mask_exist=None):
return Sample(
sample_type=sample_type if sample_type is not None else self.sample_type,
filename=filename if filename is not None else self.filename,
@ -38,8 +45,10 @@ class Sample(object):
ie_polys=ie_polys if ie_polys is not None else self.ie_polys,
pitch=pitch if pitch is not None else self.pitch,
yaw=yaw if yaw is not None else self.yaw,
source_filename=source_filename if source_filename is not None else self.source_filename,
mirror=mirror if mirror is not None else self.mirror,
close_target_list=close_target_list if close_target_list is not None else self.close_target_list)
close_target_list=close_target_list if close_target_list is not None else self.close_target_list,
fanseg_mask_exist=fanseg_mask_exist if fanseg_mask_exist is not None else self.fanseg_mask_exist)
def load_bgr(self):
img = cv2_imread (self.filename).astype(np.float32) / 255.0
@ -47,6 +56,19 @@ class Sample(object):
img = img[:,::-1].copy()
return img
def load_fanseg_mask(self):
if self.fanseg_mask_exist:
filepath = Path(self.filename)
if filepath.suffix == '.png':
dflimg = DFLPNG.load ( str(filepath) )
elif filepath.suffix == '.jpg':
dflimg = DFLJPG.load ( str(filepath) )
else:
dflimg = None
return dflimg.get_fanseg_mask()
return None
def get_random_close_target_sample(self):
if self.close_target_list is None:
return None

View file

@ -15,7 +15,7 @@ output_sample_types = [
]
'''
class SampleGeneratorFace(SampleGeneratorBase):
def __init__ (self, samples_path, debug, batch_size, sort_by_yaw=False, sort_by_yaw_target_samples_path=None, with_close_to_self=False, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, add_pitch=False, add_yaw=False, generators_count=2, generators_random_seed=None, **kwargs):
def __init__ (self, samples_path, debug, batch_size, sort_by_yaw=False, sort_by_yaw_target_samples_path=None, sample_process_options=SampleProcessor.Options(), output_sample_types=[], add_sample_idx=False, add_pitch=False, add_yaw=False, generators_count=2, generators_random_seed=None, **kwargs):
super().__init__(samples_path, debug, batch_size)
self.sample_process_options = sample_process_options
self.output_sample_types = output_sample_types
@ -27,8 +27,6 @@ class SampleGeneratorFace(SampleGeneratorBase):
self.sample_type = SampleType.FACE_YAW_SORTED_AS_TARGET
elif sort_by_yaw:
self.sample_type = SampleType.FACE_YAW_SORTED
elif with_close_to_self:
self.sample_type = SampleType.FACE_WITH_CLOSE_TO_SELF
else:
self.sample_type = SampleType.FACE
@ -82,7 +80,7 @@ class SampleGeneratorFace(SampleGeneratorBase):
if all ( [ samples[idx] == None for idx in samples_idxs] ):
raise ValueError('Not enough training data. Gather more faces!')
if self.sample_type == SampleType.FACE or self.sample_type == SampleType.FACE_WITH_CLOSE_TO_SELF:
if self.sample_type == SampleType.FACE:
shuffle_idxs = []
elif self.sample_type == SampleType.FACE_YAW_SORTED or self.sample_type == SampleType.FACE_YAW_SORTED_AS_TARGET:
shuffle_idxs = []
@ -102,12 +100,12 @@ class SampleGeneratorFace(SampleGeneratorBase):
if len(repeat_samples_idxs) > 0:
idx = repeat_samples_idxs.pop()
if self.sample_type == SampleType.FACE or self.sample_type == SampleType.FACE_WITH_CLOSE_TO_SELF:
if self.sample_type == SampleType.FACE:
sample = samples[idx]
elif self.sample_type == SampleType.FACE_YAW_SORTED or self.sample_type == SampleType.FACE_YAW_SORTED_AS_TARGET:
sample = samples[(idx >> 16) & 0xFFFF][idx & 0xFFFF]
else:
if self.sample_type == SampleType.FACE or self.sample_type == SampleType.FACE_WITH_CLOSE_TO_SELF:
if self.sample_type == SampleType.FACE:
if len(shuffle_idxs) == 0:
shuffle_idxs = samples_idxs.copy()
np.random.shuffle(shuffle_idxs)

View file

@ -1,19 +1,19 @@
import operator
import traceback
from enum import IntEnum
import cv2
import numpy as np
from pathlib import Path
from utils import Path_utils
from utils.DFLPNG import DFLPNG
from utils.DFLJPG import DFLJPG
import cv2
import numpy as np
from .Sample import Sample
from .Sample import SampleType
from facelib import FaceType
from facelib import LandmarksProcessor
from facelib import FaceType, LandmarksProcessor
from interact import interact as io
from utils import Path_utils
from utils.DFLJPG import DFLJPG
from utils.DFLPNG import DFLPNG
from .Sample import Sample, SampleType
class SampleLoader:
cache = dict()
@ -35,6 +35,10 @@ class SampleLoader:
if datas[sample_type] is None:
datas[sample_type] = SampleLoader.upgradeToFaceSamples( [ Sample(filename=filename) for filename in Path_utils.get_image_paths(samples_path) ] )
# elif sample_type == SampleType.FACE_TEMPORAL_SORTED:
# if datas[sample_type] is None:
# datas[sample_type] = SampleLoader.upgradeToFaceTemporalSortedSamples( SampleLoader.load(SampleType.FACE, samples_path) )
elif sample_type == SampleType.FACE_YAW_SORTED:
if datas[sample_type] is None:
datas[sample_type] = SampleLoader.upgradeToFaceYawSortedSamples( SampleLoader.load(SampleType.FACE, samples_path) )
@ -44,10 +48,6 @@ class SampleLoader:
if target_samples_path is None:
raise Exception('target_samples_path is None for FACE_YAW_SORTED_AS_TARGET')
datas[sample_type] = SampleLoader.upgradeToFaceYawSortedAsTargetSamples( SampleLoader.load(SampleType.FACE_YAW_SORTED, samples_path), SampleLoader.load(SampleType.FACE_YAW_SORTED, target_samples_path) )
elif sample_type == SampleType.FACE_WITH_CLOSE_TO_SELF:
if datas[sample_type] is None:
datas[sample_type] = SampleLoader.upgradeToFaceCloseToSelfSamples( SampleLoader.load(SampleType.FACE, samples_path) )
return datas[sample_type]
@ -77,44 +77,20 @@ class SampleLoader:
landmarks=dflimg.get_landmarks(),
ie_polys=dflimg.get_ie_polys(),
pitch=pitch,
yaw=yaw) )
yaw=yaw,
source_filename=dflimg.get_source_filename(),
fanseg_mask_exist=dflimg.get_fanseg_mask() is not None, ) )
except:
print ("Unable to load %s , error: %s" % (str(s_filename_path), traceback.format_exc() ) )
return sample_list
@staticmethod
def upgradeToFaceCloseToSelfSamples (samples):
yaw_samples = SampleLoader.upgradeToFaceYawSortedSamples(samples)
yaw_samples_len = len(yaw_samples)
# @staticmethod
# def upgradeToFaceTemporalSortedSamples( samples ):
# new_s = [ (s, s.source_filename) for s in samples]
# new_s = sorted(new_s, key=operator.itemgetter(1))
sample_list = []
for i in io.progress_bar_generator( range(yaw_samples_len), "Sorting"):
if yaw_samples[i] is not None:
for s in yaw_samples[i]:
s_t = []
for n in range(2000):
yaw_idx = np.clip ( i-10 +np.random.randint(20), 0, yaw_samples_len-1 )
if yaw_samples[yaw_idx] is None:
continue
yaw_idx_samples_len = len(yaw_samples[yaw_idx])
yaw_idx_sample = yaw_samples[yaw_idx][ np.random.randint(yaw_idx_samples_len) ]
if s.filename == yaw_idx_sample.filename:
continue
s_t.append ( yaw_idx_sample )
if len(s_t) >= 50:
break
if len(s_t) == 0:
s_t = [s]
sample_list.append( s.copy_and_set(close_target_list = s_t) )
return sample_list
# return [ s[0] for s in new_s]
@staticmethod
def upgradeToFaceYawSortedSamples( samples ):

View file

@ -13,18 +13,18 @@ class SampleProcessor(object):
WARPED_TRANSFORMED = 0x00000004,
TRANSFORMED = 0x00000008,
LANDMARKS_ARRAY = 0x00000010, #currently unused
RANDOM_CLOSE = 0x00000020, #currently unused
MORPH_TO_RANDOM_CLOSE = 0x00000040, #currently unused
FACE_TYPE_HALF = 0x00000100,
FACE_TYPE_FULL = 0x00000200,
FACE_TYPE_HEAD = 0x00000400, #currently unused
FACE_TYPE_AVATAR = 0x00000800, #currently unused
FACE_MASK_FULL = 0x00001000,
FACE_MASK_EYES = 0x00002000, #currently unused
MODE_BGR = 0x00010000, #BGR
MODE_G = 0x00020000, #Grayscale
MODE_GGG = 0x00040000, #3xGrayscale
@ -35,7 +35,7 @@ class SampleProcessor(object):
class Options(object):
#motion_blur = [chance_int, range] - chance 0..100 to apply to face (not mask), and range [1..3] where 3 is highest power of motion blur
def __init__(self, random_flip = True, normalize_tanh = False, rotation_range=[-10,10], scale_range=[-0.05, 0.05], tx_range=[-0.05, 0.05], ty_range=[-0.05, 0.05], motion_blur=None ):
self.random_flip = random_flip
self.normalize_tanh = normalize_tanh
@ -49,11 +49,11 @@ class SampleProcessor(object):
chance = np.clip(chance, 0, 100)
range = [3,5,7,9][ : np.clip(range, 0, 3)+1 ]
self.motion_blur = (chance, range)
@staticmethod
def process (sample, sample_process_options, output_sample_types, debug):
SPTF = SampleProcessor.TypeFlags
sample_bgr = sample.load_bgr()
h,w,c = sample_bgr.shape
@ -113,7 +113,7 @@ class SampleProcessor(object):
target_face_type = FaceType.HEAD
elif f & SPTF.FACE_TYPE_AVATAR != 0:
target_face_type = FaceType.AVATAR
apply_motion_blur = f & SPTF.OPT_APPLY_MOTION_BLUR != 0
if img_type == 4:
@ -170,9 +170,16 @@ class SampleProcessor(object):
if np.random.randint(100) < chance :
dim = mb_range[ np.random.randint(len(mb_range) ) ]
img = imagelib.LinearMotionBlur (img, dim, np.random.randint(180) )
if face_mask_type == 1:
mask = LandmarksProcessor.get_image_hull_mask (img.shape, cur_sample.landmarks, cur_sample.ie_polys)
mask = cur_sample.load_fanseg_mask() #using fanseg_mask if exist
if mask is None:
mask = LandmarksProcessor.get_image_hull_mask (img.shape, cur_sample.landmarks)
if cur_sample.ie_polys is not None:
cur_sample.ie_polys.overlay_mask(mask)
img = np.concatenate( (img, mask ), -1 )
elif face_mask_type == 2:
mask = LandmarksProcessor.get_image_eye_mask (img.shape, cur_sample.landmarks)
@ -202,7 +209,7 @@ class SampleProcessor(object):
img_mask = img[...,3:4]
if f & SPTF.MODE_BGR != 0:
img = img
img = img_bgr
elif f & SPTF.MODE_BGR_SHUFFLE != 0:
img_bgr = np.take (img_bgr, np.random.permutation(img_bgr.shape[-1]), axis=-1)
img = np.concatenate ( (img_bgr,img_mask) , -1 )