From 116415b3fcec8415f8e6de9b23c0bbd0dbc6845b Mon Sep 17 00:00:00 2001 From: Lincoln Stein Date: Thu, 10 Nov 2022 21:27:25 +0000 Subject: [PATCH] fix invoke.py crash if no models.yaml file present - Script will now offer the user the ability to create a minimal models.yaml and then gracefully exit. - Closes #1420 --- ldm/invoke/readline.py | 2 ++ scripts/invoke.py | 38 +++++++++++++++++++++++++++++++++++++- 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/ldm/invoke/readline.py b/ldm/invoke/readline.py index 7d87ede755..4e95e9b063 100644 --- a/ldm/invoke/readline.py +++ b/ldm/invoke/readline.py @@ -284,6 +284,7 @@ class Completer(object): switch,partial_path = match.groups() partial_path = partial_path.lstrip() + matches = list() path = os.path.expanduser(partial_path) @@ -321,6 +322,7 @@ class Completer(object): matches.append( switch+os.path.join(os.path.dirname(full_path), node) ) + return matches class DummyCompleter(Completer): diff --git a/scripts/invoke.py b/scripts/invoke.py index 1e9a84295e..e60bc892ba 100755 --- a/scripts/invoke.py +++ b/scripts/invoke.py @@ -90,7 +90,12 @@ def main(): safety_checker=opt.safety_checker, max_loaded_models=opt.max_loaded_models, ) - except (FileNotFoundError, IOError, KeyError) as e: + except FileNotFoundError: + print('** You appear to be missing configs/models.yaml') + print('** You can either exit this script and run scripts/preload_models.py, or fix the problem now.') + emergency_model_create(opt) + sys.exit(-1) + except (IOError, KeyError) as e: print(f'{e}. Aborting.') sys.exit(-1) @@ -482,6 +487,7 @@ def do_command(command:str, gen, opt:Args, completer) -> tuple: command = '-h' return command, operation + def add_weights_to_config(model_path:str, gen, opt, completer): print(f'>> Model import in process. Please enter the values needed to configure this model:') print() @@ -878,6 +884,36 @@ def write_commands(opt, file_path:str, outfilepath:str): f.write('\n'.join(commands)) print(f'>> File {outfilepath} with commands created') +def emergency_model_create(opt:Args): + completer = get_completer(opt) + completer.complete_extensions(('.yaml','.yml','.ckpt','.vae.pt')) + completer.set_default_dir('.') + valid_path = False + while not valid_path: + weights_file = input('Enter the path to a downloaded models file, or ^C to exit: ') + valid_path = os.path.exists(weights_file) + dir,basename = os.path.split(weights_file) + + valid_name = False + while not valid_name: + name = input('Enter a short name for this model (no spaces): ') + name = 'unnamed model' if len(name)==0 else name + valid_name = ' ' not in name + + description = input('Enter a description for this model: ') + description = 'no description' if len(description)==0 else description + + with open(opt.conf, 'w', encoding='utf-8') as f: + f.write(f'{name}:\n') + f.write(f' description: {description}\n') + f.write(f' weights: {weights_file}\n') + f.write(f' config: ./configs/stable-diffusion/v1-inference.yaml\n') + f.write(f' width: 512\n') + f.write(f' height: 512\n') + f.write(f' default: true\n') + print(f'Config file {opt.conf} is created. This script will now exit.') + print(f'After restarting you may examine the entry with !models and edit it with !edit.') + ###################################### if __name__ == '__main__':