From e3102e753c16c1dacc5446ba30879c76910583c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eren=20G=C3=B6lge?= Date: Thu, 18 Feb 2021 17:20:36 +0000 Subject: [PATCH] enable backward compat for loading the best model --- TTS/utils/arguments.py | 30 ++++++++++++++++++++---------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/TTS/utils/arguments.py b/TTS/utils/arguments.py index e4983bfb..bad06262 100644 --- a/TTS/utils/arguments.py +++ b/TTS/utils/arguments.py @@ -86,24 +86,34 @@ def get_last_checkpoint(path): last_models = {} last_model_nums = {} for key in ['checkpoint', 'best_model']: - last_model_num = 0 + last_model_num = None last_model = None + # pass all the checkpoint files and find + # the one with the largest model number suffix. for file_name in file_names: - try: - model_num = int(re.search( - f"{key}_([0-9]+)", file_name).groups()[0]) - if model_num > last_model_num: + match = re.search(f"{key}_([0-9]+)", file_name) + if match is not None: + model_num = int(match.groups()[0]) + if model_num > last_model_num or last_model_num is None: last_model_num = model_num last_model = file_name - except AttributeError: # if there's no match in the filename - continue - last_models[key] = last_model - last_model_nums[key] = last_model_num + + # if there is not checkpoint found above + # find the checkpoint with the latest + # modification date. + key_file_names = [fn for fn in file_names if key in fn] + if last_model is None and len(key_file_names) > 0: + last_model = max(key_file_names, key=os.path.getctime) + last_model_num = os.path.getctime(last_model) + + if last_model is not None: + last_models[key] = last_model + last_model_nums[key] = last_model_num # check what models were found if not last_models: raise ValueError(f"No models found in continue path {path}!") - elif 'checkpoint' not in last_models: # no checkpoint just best model + if 'checkpoint' not in last_models: # no checkpoint just best model last_models['checkpoint'] = last_models['best_model'] elif 'best_model' not in last_models: # no best model # this shouldn't happen, but let's handle it just in case