From 8759a202154dd6967e2833ea37c97e9750b99b7b Mon Sep 17 00:00:00 2001 From: Brigham Lysenko Date: Wed, 14 Aug 2019 18:58:42 -0600 Subject: [PATCH] tweaked prompt --- models/ModelBase.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/models/ModelBase.py b/models/ModelBase.py index 66b3090..5fd91c1 100644 --- a/models/ModelBase.py +++ b/models/ModelBase.py @@ -139,8 +139,7 @@ class ModelBase(object): if ask_batch_size and (self.iter == 0 or ask_override): default_batch_size = 0 if self.iter == 0 else self.options.get('batch_size', 0) - self.options['batch_cap'] = max(0, io.input_int("Batch_size (?:help skip:%d) : " % 0, - 0, + self.options['batch_cap'] = max(0, io.input_int("Batch_size (?:help skip:%d) : " % self.options.get('batch_cap', 16), help_message="Larger batch size is better for NN's" " generalization, but it can cause Out of" " Memory error. Tune this value for your"