diff --git a/main.py b/main.py index e5bec0a..37358b7 100644 --- a/main.py +++ b/main.py @@ -116,6 +116,7 @@ if __name__ == "__main__": 'force_model_name' : arguments.force_model_name, 'force_gpu_idxs' : [ int(x) for x in arguments.force_gpu_idxs.split(',') ] if arguments.force_gpu_idxs is not None else None, 'cpu_only' : arguments.cpu_only, + 'silent_start' : arguments.silent_start, 'execute_programs' : [ [int(x[0]), x[1] ] for x in arguments.execute_program ], 'debug' : arguments.debug, } @@ -134,6 +135,9 @@ if __name__ == "__main__": p.add_argument('--force-model-name', dest="force_model_name", default=None, help="Forcing to choose model name from model/ folder.") p.add_argument('--cpu-only', action="store_true", dest="cpu_only", default=False, help="Train on CPU.") p.add_argument('--force-gpu-idxs', dest="force_gpu_idxs", default=None, help="Force to choose GPU indexes separated by comma.") + p.add_argument('--silent-start', action="store_true", dest="silent_start", default=False, help="Silent start. Automatically chooses Best GPU and last used model.") + + p.add_argument('--execute-program', dest="execute_program", default=[], action='append', nargs='+') p.set_defaults (func=process_train) diff --git a/models/ModelBase.py b/models/ModelBase.py index 0ffe0c8..09cb31b 100644 --- a/models/ModelBase.py +++ b/models/ModelBase.py @@ -33,6 +33,7 @@ class ModelBase(object): cpu_only=False, debug=False, force_model_class_name=None, + silent_start=False, **kwargs): self.is_training = is_training self.saved_models_path = saved_models_path @@ -60,59 +61,64 @@ class ModelBase(object): # sort by modified datetime saved_models_names = sorted(saved_models_names, key=operator.itemgetter(1), reverse=True ) saved_models_names = [ x[0] for x in saved_models_names ] + if len(saved_models_names) != 0: - io.log_info ("Choose one of saved models, or enter a name to create a new model.") - io.log_info ("[r] : rename") - io.log_info ("[d] : delete") - io.log_info ("") - for i, model_name in enumerate(saved_models_names): - s = f"[{i}] : {model_name} " - if i == 0: - s += "- latest" - io.log_info (s) + if silent_start: + self.model_name = saved_models_names[0] + io.log_info(f'Silent start: choosed model "{self.model_name}"') + else: + io.log_info ("Choose one of saved models, or enter a name to create a new model.") + io.log_info ("[r] : rename") + io.log_info ("[d] : delete") + io.log_info ("") + for i, model_name in enumerate(saved_models_names): + s = f"[{i}] : {model_name} " + if i == 0: + s += "- latest" + io.log_info (s) - inp = io.input_str(f"", "0", show_default_value=False ) - model_idx = -1 - try: - model_idx = np.clip ( int(inp), 0, len(saved_models_names)-1 ) - except: - pass + inp = io.input_str(f"", "0", show_default_value=False ) + model_idx = -1 + try: + model_idx = np.clip ( int(inp), 0, len(saved_models_names)-1 ) + except: + pass - if model_idx == -1: - if len(inp) == 1: - is_rename = inp[0] == 'r' - is_delete = inp[0] == 'd' + if model_idx == -1: + if len(inp) == 1: + is_rename = inp[0] == 'r' + is_delete = inp[0] == 'd' - if is_rename or is_delete: - if len(saved_models_names) != 0: - - if is_rename: - name = io.input_str(f"Enter the name of the model you want to rename") - elif is_delete: - name = io.input_str(f"Enter the name of the model you want to delete") - - if name in saved_models_names: + if is_rename or is_delete: + if len(saved_models_names) != 0: if is_rename: - new_model_name = io.input_str(f"Enter new name of the model") + name = io.input_str(f"Enter the name of the model you want to rename") + elif is_delete: + name = io.input_str(f"Enter the name of the model you want to delete") - for filepath in pathex.get_paths(saved_models_path): - filepath_name = filepath.name + if name in saved_models_names: - model_filename, remain_filename = filepath_name.split('_', 1) - if model_filename == name: + if is_rename: + new_model_name = io.input_str(f"Enter new name of the model") - if is_rename: - new_filepath = filepath.parent / ( new_model_name + '_' + remain_filename ) - filepath.rename (new_filepath) - elif is_delete: - filepath.unlink() - continue + for filepath in pathex.get_paths(saved_models_path): + filepath_name = filepath.name - self.model_name = inp - else: - self.model_name = saved_models_names[model_idx] + model_filename, remain_filename = filepath_name.split('_', 1) + if model_filename == name: + + if is_rename: + new_filepath = filepath.parent / ( new_model_name + '_' + remain_filename ) + filepath.rename (new_filepath) + elif is_delete: + filepath.unlink() + continue + + self.model_name = inp + else: + self.model_name = saved_models_names[model_idx] else: self.model_name = io.input_str(f"No saved models found. Enter a name of a new model", "new") @@ -144,9 +150,13 @@ class ModelBase(object): if self.is_first_run(): io.log_info ("\nModel first run.") - - self.device_config = nn.DeviceConfig.GPUIndexes( force_gpu_idxs or nn.ask_choose_device_idxs(suggest_best_multi_gpu=True)) \ - if not cpu_only else nn.DeviceConfig.CPU() + + if silent_start: + self.device_config = nn.DeviceConfig.BestGPU() + io.log_info (f"Silent start: choosed device {'CPU' if self.device_config.cpu_only else self.device_config.devices[0].name}") + else: + self.device_config = nn.DeviceConfig.GPUIndexes( force_gpu_idxs or nn.ask_choose_device_idxs(suggest_best_multi_gpu=True)) \ + if not cpu_only else nn.DeviceConfig.CPU() nn.initialize(self.device_config)