cuda_cache_max_size

This commit is contained in:
Colombo 2019-10-02 20:17:56 +04:00
parent 09a990852f
commit 353bcdf80f

View file

@ -139,6 +139,8 @@ NLayerDiscriminator = nnlib.NLayerDiscriminator
if 'CUDA_VISIBLE_DEVICES' in os.environ.keys():
os.environ.pop('CUDA_VISIBLE_DEVICES')
os.environ['CUDA_CACHE_MAXSIZE'] = '536870912' #512Mb (32mb default)
os.environ['TF_MIN_GPU_MULTIPROCESSOR_COUNT'] = '2'
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' #tf log errors only