mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
add logging support
This commit adds invokeai.backend.util.logging, which provides support for formatted console and logfile messages that follow the status reporting conventions of earlier InvokeAI versions. Examples: ### A critical error (logging.CRITICAL) *** A non-fatal error (logging.ERROR) ** A warning (logging.WARNING) >> Informational message (logging.INFO) | Debugging message (logging.DEBUG) This style logs everything through a single logging object and is identical to using Python's `logging` module. The commonly-used module-level logging functions are implemented as simple pass-thrus to logging: import invokeai.backend.util.logging as ialog ialog.debug('this is a debugging message') ialog.info('this is a informational message') ialog.log(level=logging.CRITICAL, 'get out of dodge') ialog.disable(level=logging.INFO) ialog.basicConfig(filename='/var/log/invokeai.log') Internally, the invokeai logging module creates a new default logger named "invokeai" so that its logging does not interfere with other module's use of the vanilla logging module. So `logging.error("foo")` will go through the regular logging path and not add the additional message decorations. For more control, the logging module's object-oriented logging style is also supported. The API is identical to the vanilla logging usage. In fact, the only thing that has changed is that the getLogger() method adds a custom formatter to the log messages. import logging from invokeai.backend.util.logging import InvokeAILogger logger = InvokeAILogger.getLogger(__name__) fh = logging.FileHandler('/var/invokeai.log') logger.addHandler(fh) logger.critical('this will be logged to both the console and the log file')
This commit is contained in:
parent
5a4765046e
commit
8917a6d99b
@ -28,22 +28,28 @@ import invokeai.backend.util.logging as ialog
|
|||||||
ialog.debug('this is a debugging message')
|
ialog.debug('this is a debugging message')
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
|
|
||||||
def debug(msg:str):
|
# module level functions
|
||||||
InvokeAILogger.getLogger().debug(msg)
|
def debug(msg, *args, **kwargs):
|
||||||
|
InvokeAILogger.getLogger().debug(msg, *args, **kwargs)
|
||||||
|
|
||||||
def info(msg:str):
|
def info(msg, *args, **kwargs):
|
||||||
InvokeAILogger.getLogger().info(msg)
|
InvokeAILogger.getLogger().info(msg, *args, **kwargs)
|
||||||
|
|
||||||
def warning(msg:str):
|
def warning(msg, *args, **kwargs):
|
||||||
InvokeAILogger.getLogger().warning(msg)
|
InvokeAILogger.getLogger().warning(msg, *args, **kwargs)
|
||||||
|
|
||||||
def error(msg:str):
|
def error(msg, *args, **kwargs):
|
||||||
InvokeAILogger.getLogger().error(msg)
|
InvokeAILogger.getLogger().error(msg, *args, **kwargs)
|
||||||
|
|
||||||
def critical(msg:str):
|
def critical(msg, *args, **kwargs):
|
||||||
InvokeAILogger.getLogger().critical(msg)
|
InvokeAILogger.getLogger().critical(msg, *args, **kwargs)
|
||||||
|
|
||||||
|
def log(level, msg, *args, **kwargs):
|
||||||
|
InvokeAILogger.getLogger().log(level, msg, *args, **kwargs)
|
||||||
|
|
||||||
|
def disable(level=logging.CRITICAL):
|
||||||
|
InvokeAILogger.getLogger().disable(level)
|
||||||
|
|
||||||
class InvokeAILogFormatter(logging.Formatter):
|
class InvokeAILogFormatter(logging.Formatter):
|
||||||
'''
|
'''
|
||||||
|
@ -16,6 +16,7 @@ if sys.platform == "darwin":
|
|||||||
import pyparsing # type: ignore
|
import pyparsing # type: ignore
|
||||||
|
|
||||||
import invokeai.version as invokeai
|
import invokeai.version as invokeai
|
||||||
|
import invokeai.backend.util.logging as ialog
|
||||||
|
|
||||||
from ...backend import Generate, ModelManager
|
from ...backend import Generate, ModelManager
|
||||||
from ...backend.args import Args, dream_cmd_from_png, metadata_dumps, metadata_from_png
|
from ...backend.args import Args, dream_cmd_from_png, metadata_dumps, metadata_from_png
|
||||||
@ -69,7 +70,7 @@ def main():
|
|||||||
# run any post-install patches needed
|
# run any post-install patches needed
|
||||||
run_patches()
|
run_patches()
|
||||||
|
|
||||||
print(f">> Internet connectivity is {Globals.internet_available}")
|
ialog.info(f"Internet connectivity is {Globals.internet_available}")
|
||||||
|
|
||||||
if not args.conf:
|
if not args.conf:
|
||||||
config_file = os.path.join(Globals.root, "configs", "models.yaml")
|
config_file = os.path.join(Globals.root, "configs", "models.yaml")
|
||||||
@ -78,8 +79,8 @@ def main():
|
|||||||
opt, FileNotFoundError(f"The file {config_file} could not be found.")
|
opt, FileNotFoundError(f"The file {config_file} could not be found.")
|
||||||
)
|
)
|
||||||
|
|
||||||
print(f">> {invokeai.__app_name__}, version {invokeai.__version__}")
|
ialog.info(f"{invokeai.__app_name__}, version {invokeai.__version__}")
|
||||||
print(f'>> InvokeAI runtime directory is "{Globals.root}"')
|
ialog.info(f'InvokeAI runtime directory is "{Globals.root}"')
|
||||||
|
|
||||||
# loading here to avoid long delays on startup
|
# loading here to avoid long delays on startup
|
||||||
# these two lines prevent a horrible warning message from appearing
|
# these two lines prevent a horrible warning message from appearing
|
||||||
@ -121,7 +122,7 @@ def main():
|
|||||||
else:
|
else:
|
||||||
raise FileNotFoundError(f"{opt.infile} not found.")
|
raise FileNotFoundError(f"{opt.infile} not found.")
|
||||||
except (FileNotFoundError, IOError) as e:
|
except (FileNotFoundError, IOError) as e:
|
||||||
print(f"{e}. Aborting.")
|
ialog.critical('Aborted',exc_info=True)
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
# creating a Generate object:
|
# creating a Generate object:
|
||||||
@ -142,12 +143,12 @@ def main():
|
|||||||
)
|
)
|
||||||
except (FileNotFoundError, TypeError, AssertionError) as e:
|
except (FileNotFoundError, TypeError, AssertionError) as e:
|
||||||
report_model_error(opt, e)
|
report_model_error(opt, e)
|
||||||
except (IOError, KeyError) as e:
|
except (IOError, KeyError):
|
||||||
print(f"{e}. Aborting.")
|
ialog.critical("Aborted",exc_info=True)
|
||||||
sys.exit(-1)
|
sys.exit(-1)
|
||||||
|
|
||||||
if opt.seamless:
|
if opt.seamless:
|
||||||
print(">> changed to seamless tiling mode")
|
ialog.info("Changed to seamless tiling mode")
|
||||||
|
|
||||||
# preload the model
|
# preload the model
|
||||||
try:
|
try:
|
||||||
@ -180,9 +181,7 @@ def main():
|
|||||||
f'\nGoodbye!\nYou can start InvokeAI again by running the "invoke.bat" (or "invoke.sh") script from {Globals.root}'
|
f'\nGoodbye!\nYou can start InvokeAI again by running the "invoke.bat" (or "invoke.sh") script from {Globals.root}'
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
print(">> An error occurred:")
|
ialog.error("An error occurred",exc_info=True)
|
||||||
traceback.print_exc()
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: main_loop() has gotten busy. Needs to be refactored.
|
# TODO: main_loop() has gotten busy. Needs to be refactored.
|
||||||
def main_loop(gen, opt):
|
def main_loop(gen, opt):
|
||||||
@ -248,7 +247,7 @@ def main_loop(gen, opt):
|
|||||||
if not opt.prompt:
|
if not opt.prompt:
|
||||||
oldargs = metadata_from_png(opt.init_img)
|
oldargs = metadata_from_png(opt.init_img)
|
||||||
opt.prompt = oldargs.prompt
|
opt.prompt = oldargs.prompt
|
||||||
print(f'>> Retrieved old prompt "{opt.prompt}" from {opt.init_img}')
|
ialog.info(f'Retrieved old prompt "{opt.prompt}" from {opt.init_img}')
|
||||||
except (OSError, AttributeError, KeyError):
|
except (OSError, AttributeError, KeyError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -265,9 +264,9 @@ def main_loop(gen, opt):
|
|||||||
if opt.init_img is not None and re.match("^-\\d+$", opt.init_img):
|
if opt.init_img is not None and re.match("^-\\d+$", opt.init_img):
|
||||||
try:
|
try:
|
||||||
opt.init_img = last_results[int(opt.init_img)][0]
|
opt.init_img = last_results[int(opt.init_img)][0]
|
||||||
print(f">> Reusing previous image {opt.init_img}")
|
ialog.info(f"Reusing previous image {opt.init_img}")
|
||||||
except IndexError:
|
except IndexError:
|
||||||
print(f">> No previous initial image at position {opt.init_img} found")
|
ialog.info(f"No previous initial image at position {opt.init_img} found")
|
||||||
opt.init_img = None
|
opt.init_img = None
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -288,9 +287,9 @@ def main_loop(gen, opt):
|
|||||||
if opt.seed is not None and opt.seed < 0 and operation != "postprocess":
|
if opt.seed is not None and opt.seed < 0 and operation != "postprocess":
|
||||||
try:
|
try:
|
||||||
opt.seed = last_results[opt.seed][1]
|
opt.seed = last_results[opt.seed][1]
|
||||||
print(f">> Reusing previous seed {opt.seed}")
|
ialog.info(f"Reusing previous seed {opt.seed}")
|
||||||
except IndexError:
|
except IndexError:
|
||||||
print(f">> No previous seed at position {opt.seed} found")
|
ialog.info(f"No previous seed at position {opt.seed} found")
|
||||||
opt.seed = None
|
opt.seed = None
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -309,7 +308,7 @@ def main_loop(gen, opt):
|
|||||||
subdir = subdir[: (path_max - 39 - len(os.path.abspath(opt.outdir)))]
|
subdir = subdir[: (path_max - 39 - len(os.path.abspath(opt.outdir)))]
|
||||||
current_outdir = os.path.join(opt.outdir, subdir)
|
current_outdir = os.path.join(opt.outdir, subdir)
|
||||||
|
|
||||||
print('Writing files to directory: "' + current_outdir + '"')
|
ialog.info('Writing files to directory: "' + current_outdir + '"')
|
||||||
|
|
||||||
# make sure the output directory exists
|
# make sure the output directory exists
|
||||||
if not os.path.exists(current_outdir):
|
if not os.path.exists(current_outdir):
|
||||||
@ -438,15 +437,14 @@ def main_loop(gen, opt):
|
|||||||
catch_interrupts=catch_ctrl_c,
|
catch_interrupts=catch_ctrl_c,
|
||||||
**vars(opt),
|
**vars(opt),
|
||||||
)
|
)
|
||||||
except (PromptParser.ParsingException, pyparsing.ParseException) as e:
|
except (PromptParser.ParsingException, pyparsing.ParseException):
|
||||||
print("** An error occurred while processing your prompt **")
|
ialog.error("An error occurred while processing your prompt",exc_info=True)
|
||||||
print(f"** {str(e)} **")
|
|
||||||
elif operation == "postprocess":
|
elif operation == "postprocess":
|
||||||
print(f">> fixing {opt.prompt}")
|
ialog.info(f"fixing {opt.prompt}")
|
||||||
opt.last_operation = do_postprocess(gen, opt, image_writer)
|
opt.last_operation = do_postprocess(gen, opt, image_writer)
|
||||||
|
|
||||||
elif operation == "mask":
|
elif operation == "mask":
|
||||||
print(f">> generating masks from {opt.prompt}")
|
ialog.info(f"generating masks from {opt.prompt}")
|
||||||
do_textmask(gen, opt, image_writer)
|
do_textmask(gen, opt, image_writer)
|
||||||
|
|
||||||
if opt.grid and len(grid_images) > 0:
|
if opt.grid and len(grid_images) > 0:
|
||||||
@ -469,12 +467,12 @@ def main_loop(gen, opt):
|
|||||||
)
|
)
|
||||||
results = [[path, formatted_dream_prompt]]
|
results = [[path, formatted_dream_prompt]]
|
||||||
|
|
||||||
except AssertionError as e:
|
except AssertionError:
|
||||||
print(e)
|
ialog.error(e)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
print(e)
|
ialog.error(e)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print("Outputs:")
|
print("Outputs:")
|
||||||
@ -513,7 +511,7 @@ def do_command(command: str, gen, opt: Args, completer) -> tuple:
|
|||||||
gen.set_model(model_name)
|
gen.set_model(model_name)
|
||||||
add_embedding_terms(gen, completer)
|
add_embedding_terms(gen, completer)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
print(str(e))
|
ialog.error(e)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
report_model_error(opt, e)
|
report_model_error(opt, e)
|
||||||
completer.add_history(command)
|
completer.add_history(command)
|
||||||
@ -527,8 +525,8 @@ def do_command(command: str, gen, opt: Args, completer) -> tuple:
|
|||||||
elif command.startswith("!import"):
|
elif command.startswith("!import"):
|
||||||
path = shlex.split(command)
|
path = shlex.split(command)
|
||||||
if len(path) < 2:
|
if len(path) < 2:
|
||||||
print(
|
ialog.warning(
|
||||||
"** please provide (1) a URL to a .ckpt file to import; (2) a local path to a .ckpt file; or (3) a diffusers repository id in the form stabilityai/stable-diffusion-2-1"
|
"please provide (1) a URL to a .ckpt file to import; (2) a local path to a .ckpt file; or (3) a diffusers repository id in the form stabilityai/stable-diffusion-2-1"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
@ -541,7 +539,7 @@ def do_command(command: str, gen, opt: Args, completer) -> tuple:
|
|||||||
elif command.startswith(("!convert", "!optimize")):
|
elif command.startswith(("!convert", "!optimize")):
|
||||||
path = shlex.split(command)
|
path = shlex.split(command)
|
||||||
if len(path) < 2:
|
if len(path) < 2:
|
||||||
print("** please provide the path to a .ckpt or .safetensors model")
|
ialog.warning("please provide the path to a .ckpt or .safetensors model")
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
convert_model(path[1], gen, opt, completer)
|
convert_model(path[1], gen, opt, completer)
|
||||||
@ -553,7 +551,7 @@ def do_command(command: str, gen, opt: Args, completer) -> tuple:
|
|||||||
elif command.startswith("!edit"):
|
elif command.startswith("!edit"):
|
||||||
path = shlex.split(command)
|
path = shlex.split(command)
|
||||||
if len(path) < 2:
|
if len(path) < 2:
|
||||||
print("** please provide the name of a model")
|
ialog.warning("please provide the name of a model")
|
||||||
else:
|
else:
|
||||||
edit_model(path[1], gen, opt, completer)
|
edit_model(path[1], gen, opt, completer)
|
||||||
completer.add_history(command)
|
completer.add_history(command)
|
||||||
@ -562,7 +560,7 @@ def do_command(command: str, gen, opt: Args, completer) -> tuple:
|
|||||||
elif command.startswith("!del"):
|
elif command.startswith("!del"):
|
||||||
path = shlex.split(command)
|
path = shlex.split(command)
|
||||||
if len(path) < 2:
|
if len(path) < 2:
|
||||||
print("** please provide the name of a model")
|
ialog.warning("please provide the name of a model")
|
||||||
else:
|
else:
|
||||||
del_config(path[1], gen, opt, completer)
|
del_config(path[1], gen, opt, completer)
|
||||||
completer.add_history(command)
|
completer.add_history(command)
|
||||||
@ -642,8 +640,8 @@ def import_model(model_path: str, gen, opt, completer):
|
|||||||
try:
|
try:
|
||||||
default_name = url_attachment_name(model_path)
|
default_name = url_attachment_name(model_path)
|
||||||
default_name = Path(default_name).stem
|
default_name = Path(default_name).stem
|
||||||
except Exception as e:
|
except Exception:
|
||||||
print(f"** URL: {str(e)}")
|
ialog.warning(f"A problem occurred while assigning the name of the downloaded model",exc_info=True)
|
||||||
model_name, model_desc = _get_model_name_and_desc(
|
model_name, model_desc = _get_model_name_and_desc(
|
||||||
gen.model_manager,
|
gen.model_manager,
|
||||||
completer,
|
completer,
|
||||||
@ -664,11 +662,11 @@ def import_model(model_path: str, gen, opt, completer):
|
|||||||
model_config_file=config_file,
|
model_config_file=config_file,
|
||||||
)
|
)
|
||||||
if not imported_name:
|
if not imported_name:
|
||||||
print("** Aborting import.")
|
ialog.error("Aborting import.")
|
||||||
return
|
return
|
||||||
|
|
||||||
if not _verify_load(imported_name, gen):
|
if not _verify_load(imported_name, gen):
|
||||||
print("** model failed to load. Discarding configuration entry")
|
ialog.error("model failed to load. Discarding configuration entry")
|
||||||
gen.model_manager.del_model(imported_name)
|
gen.model_manager.del_model(imported_name)
|
||||||
return
|
return
|
||||||
if click.confirm("Make this the default model?", default=False):
|
if click.confirm("Make this the default model?", default=False):
|
||||||
@ -676,7 +674,7 @@ def import_model(model_path: str, gen, opt, completer):
|
|||||||
|
|
||||||
gen.model_manager.commit(opt.conf)
|
gen.model_manager.commit(opt.conf)
|
||||||
completer.update_models(gen.model_manager.list_models())
|
completer.update_models(gen.model_manager.list_models())
|
||||||
print(f">> {imported_name} successfully installed")
|
ialog.info(f"{imported_name} successfully installed")
|
||||||
|
|
||||||
def _pick_configuration_file(completer)->Path:
|
def _pick_configuration_file(completer)->Path:
|
||||||
print(
|
print(
|
||||||
@ -720,21 +718,21 @@ Please select the type of this model:
|
|||||||
return choice
|
return choice
|
||||||
|
|
||||||
def _verify_load(model_name: str, gen) -> bool:
|
def _verify_load(model_name: str, gen) -> bool:
|
||||||
print(">> Verifying that new model loads...")
|
ialog.info("Verifying that new model loads...")
|
||||||
current_model = gen.model_name
|
current_model = gen.model_name
|
||||||
try:
|
try:
|
||||||
if not gen.set_model(model_name):
|
if not gen.set_model(model_name):
|
||||||
return
|
return
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"** model failed to load: {str(e)}")
|
ialog.warning(f"model failed to load: {str(e)}")
|
||||||
print(
|
ialog.warning(
|
||||||
"** note that importing 2.X checkpoints is not supported. Please use !convert_model instead."
|
"** note that importing 2.X checkpoints is not supported. Please use !convert_model instead."
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
if click.confirm("Keep model loaded?", default=True):
|
if click.confirm("Keep model loaded?", default=True):
|
||||||
gen.set_model(model_name)
|
gen.set_model(model_name)
|
||||||
else:
|
else:
|
||||||
print(">> Restoring previous model")
|
ialog.info("Restoring previous model")
|
||||||
gen.set_model(current_model)
|
gen.set_model(current_model)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -757,7 +755,7 @@ def convert_model(model_name_or_path: Union[Path, str], gen, opt, completer):
|
|||||||
ckpt_path = None
|
ckpt_path = None
|
||||||
original_config_file = None
|
original_config_file = None
|
||||||
if model_name_or_path == gen.model_name:
|
if model_name_or_path == gen.model_name:
|
||||||
print("** Can't convert the active model. !switch to another model first. **")
|
ialog.warning("Can't convert the active model. !switch to another model first. **")
|
||||||
return
|
return
|
||||||
elif model_info := manager.model_info(model_name_or_path):
|
elif model_info := manager.model_info(model_name_or_path):
|
||||||
if "weights" in model_info:
|
if "weights" in model_info:
|
||||||
@ -767,7 +765,7 @@ def convert_model(model_name_or_path: Union[Path, str], gen, opt, completer):
|
|||||||
model_description = model_info["description"]
|
model_description = model_info["description"]
|
||||||
vae_path = model_info.get("vae")
|
vae_path = model_info.get("vae")
|
||||||
else:
|
else:
|
||||||
print(f"** {model_name_or_path} is not a legacy .ckpt weights file")
|
ialog.warning(f"{model_name_or_path} is not a legacy .ckpt weights file")
|
||||||
return
|
return
|
||||||
model_name = manager.convert_and_import(
|
model_name = manager.convert_and_import(
|
||||||
ckpt_path,
|
ckpt_path,
|
||||||
@ -788,16 +786,16 @@ def convert_model(model_name_or_path: Union[Path, str], gen, opt, completer):
|
|||||||
manager.commit(opt.conf)
|
manager.commit(opt.conf)
|
||||||
if click.confirm(f"Delete the original .ckpt file at {ckpt_path}?", default=False):
|
if click.confirm(f"Delete the original .ckpt file at {ckpt_path}?", default=False):
|
||||||
ckpt_path.unlink(missing_ok=True)
|
ckpt_path.unlink(missing_ok=True)
|
||||||
print(f"{ckpt_path} deleted")
|
ialog.warning(f"{ckpt_path} deleted")
|
||||||
|
|
||||||
|
|
||||||
def del_config(model_name: str, gen, opt, completer):
|
def del_config(model_name: str, gen, opt, completer):
|
||||||
current_model = gen.model_name
|
current_model = gen.model_name
|
||||||
if model_name == current_model:
|
if model_name == current_model:
|
||||||
print("** Can't delete active model. !switch to another model first. **")
|
ialog.warning("Can't delete active model. !switch to another model first. **")
|
||||||
return
|
return
|
||||||
if model_name not in gen.model_manager.config:
|
if model_name not in gen.model_manager.config:
|
||||||
print(f"** Unknown model {model_name}")
|
ialog.warning(f"Unknown model {model_name}")
|
||||||
return
|
return
|
||||||
|
|
||||||
if not click.confirm(
|
if not click.confirm(
|
||||||
@ -810,17 +808,17 @@ def del_config(model_name: str, gen, opt, completer):
|
|||||||
)
|
)
|
||||||
gen.model_manager.del_model(model_name, delete_files=delete_completely)
|
gen.model_manager.del_model(model_name, delete_files=delete_completely)
|
||||||
gen.model_manager.commit(opt.conf)
|
gen.model_manager.commit(opt.conf)
|
||||||
print(f"** {model_name} deleted")
|
ialog.warning(f"{model_name} deleted")
|
||||||
completer.update_models(gen.model_manager.list_models())
|
completer.update_models(gen.model_manager.list_models())
|
||||||
|
|
||||||
|
|
||||||
def edit_model(model_name: str, gen, opt, completer):
|
def edit_model(model_name: str, gen, opt, completer):
|
||||||
manager = gen.model_manager
|
manager = gen.model_manager
|
||||||
if not (info := manager.model_info(model_name)):
|
if not (info := manager.model_info(model_name)):
|
||||||
print(f"** Unknown model {model_name}")
|
ialog.warning(f"** Unknown model {model_name}")
|
||||||
return
|
return
|
||||||
|
print()
|
||||||
print(f"\n>> Editing model {model_name} from configuration file {opt.conf}")
|
ialog.info(f"Editing model {model_name} from configuration file {opt.conf}")
|
||||||
new_name = _get_model_name(manager.list_models(), completer, model_name)
|
new_name = _get_model_name(manager.list_models(), completer, model_name)
|
||||||
|
|
||||||
for attribute in info.keys():
|
for attribute in info.keys():
|
||||||
@ -858,7 +856,7 @@ def edit_model(model_name: str, gen, opt, completer):
|
|||||||
manager.set_default_model(new_name)
|
manager.set_default_model(new_name)
|
||||||
manager.commit(opt.conf)
|
manager.commit(opt.conf)
|
||||||
completer.update_models(manager.list_models())
|
completer.update_models(manager.list_models())
|
||||||
print(">> Model successfully updated")
|
ialog.info("Model successfully updated")
|
||||||
|
|
||||||
|
|
||||||
def _get_model_name(existing_names, completer, default_name: str = "") -> str:
|
def _get_model_name(existing_names, completer, default_name: str = "") -> str:
|
||||||
@ -869,11 +867,11 @@ def _get_model_name(existing_names, completer, default_name: str = "") -> str:
|
|||||||
if len(model_name) == 0:
|
if len(model_name) == 0:
|
||||||
model_name = default_name
|
model_name = default_name
|
||||||
if not re.match("^[\w._+:/-]+$", model_name):
|
if not re.match("^[\w._+:/-]+$", model_name):
|
||||||
print(
|
ialog.warning(
|
||||||
'** model name must contain only words, digits and the characters "._+:/-" **'
|
'model name must contain only words, digits and the characters "._+:/-" **'
|
||||||
)
|
)
|
||||||
elif model_name != default_name and model_name in existing_names:
|
elif model_name != default_name and model_name in existing_names:
|
||||||
print(f"** the name {model_name} is already in use. Pick another.")
|
ialog.warning(f"the name {model_name} is already in use. Pick another.")
|
||||||
else:
|
else:
|
||||||
done = True
|
done = True
|
||||||
return model_name
|
return model_name
|
||||||
@ -940,11 +938,10 @@ def do_postprocess(gen, opt, callback):
|
|||||||
opt=opt,
|
opt=opt,
|
||||||
)
|
)
|
||||||
except OSError:
|
except OSError:
|
||||||
print(traceback.format_exc(), file=sys.stderr)
|
ialog.error(f"{file_path}: file could not be read",exc_info=True)
|
||||||
print(f"** {file_path}: file could not be read")
|
|
||||||
return
|
return
|
||||||
except (KeyError, AttributeError):
|
except (KeyError, AttributeError):
|
||||||
print(traceback.format_exc(), file=sys.stderr)
|
ialog.error(f"an error occurred while applying the {tool} postprocessor",exc_info=True)
|
||||||
return
|
return
|
||||||
return opt.last_operation
|
return opt.last_operation
|
||||||
|
|
||||||
@ -999,13 +996,13 @@ def prepare_image_metadata(
|
|||||||
try:
|
try:
|
||||||
filename = opt.fnformat.format(**wildcards)
|
filename = opt.fnformat.format(**wildcards)
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
print(
|
ialog.error(
|
||||||
f"** The filename format contains an unknown key '{e.args[0]}'. Will use {{prefix}}.{{seed}}.png' instead"
|
f"The filename format contains an unknown key '{e.args[0]}'. Will use {{prefix}}.{{seed}}.png' instead"
|
||||||
)
|
)
|
||||||
filename = f"{prefix}.{seed}.png"
|
filename = f"{prefix}.{seed}.png"
|
||||||
except IndexError:
|
except IndexError:
|
||||||
print(
|
ialog.error(
|
||||||
"** The filename format is broken or complete. Will use '{prefix}.{seed}.png' instead"
|
"The filename format is broken or complete. Will use '{prefix}.{seed}.png' instead"
|
||||||
)
|
)
|
||||||
filename = f"{prefix}.{seed}.png"
|
filename = f"{prefix}.{seed}.png"
|
||||||
|
|
||||||
@ -1094,14 +1091,14 @@ def split_variations(variations_string) -> list:
|
|||||||
for part in variations_string.split(","):
|
for part in variations_string.split(","):
|
||||||
seed_and_weight = part.split(":")
|
seed_and_weight = part.split(":")
|
||||||
if len(seed_and_weight) != 2:
|
if len(seed_and_weight) != 2:
|
||||||
print(f'** Could not parse with_variation part "{part}"')
|
ialog.warning(f'Could not parse with_variation part "{part}"')
|
||||||
broken = True
|
broken = True
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
seed = int(seed_and_weight[0])
|
seed = int(seed_and_weight[0])
|
||||||
weight = float(seed_and_weight[1])
|
weight = float(seed_and_weight[1])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print(f'** Could not parse with_variation part "{part}"')
|
ialog.warning(f'Could not parse with_variation part "{part}"')
|
||||||
broken = True
|
broken = True
|
||||||
break
|
break
|
||||||
parts.append([seed, weight])
|
parts.append([seed, weight])
|
||||||
@ -1125,23 +1122,23 @@ def load_face_restoration(opt):
|
|||||||
opt.gfpgan_model_path
|
opt.gfpgan_model_path
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print(">> Face restoration disabled")
|
ialog.info("Face restoration disabled")
|
||||||
if opt.esrgan:
|
if opt.esrgan:
|
||||||
esrgan = restoration.load_esrgan(opt.esrgan_bg_tile)
|
esrgan = restoration.load_esrgan(opt.esrgan_bg_tile)
|
||||||
else:
|
else:
|
||||||
print(">> Upscaling disabled")
|
ialog.info("Upscaling disabled")
|
||||||
else:
|
else:
|
||||||
print(">> Face restoration and upscaling disabled")
|
ialog.info("Face restoration and upscaling disabled")
|
||||||
except (ModuleNotFoundError, ImportError):
|
except (ModuleNotFoundError, ImportError):
|
||||||
print(traceback.format_exc(), file=sys.stderr)
|
print(traceback.format_exc(), file=sys.stderr)
|
||||||
print(">> You may need to install the ESRGAN and/or GFPGAN modules")
|
ialog.info("You may need to install the ESRGAN and/or GFPGAN modules")
|
||||||
return gfpgan, codeformer, esrgan
|
return gfpgan, codeformer, esrgan
|
||||||
|
|
||||||
|
|
||||||
def make_step_callback(gen, opt, prefix):
|
def make_step_callback(gen, opt, prefix):
|
||||||
destination = os.path.join(opt.outdir, "intermediates", prefix)
|
destination = os.path.join(opt.outdir, "intermediates", prefix)
|
||||||
os.makedirs(destination, exist_ok=True)
|
os.makedirs(destination, exist_ok=True)
|
||||||
print(f">> Intermediate images will be written into {destination}")
|
ialog.info(f"Intermediate images will be written into {destination}")
|
||||||
|
|
||||||
def callback(state: PipelineIntermediateState):
|
def callback(state: PipelineIntermediateState):
|
||||||
latents = state.latents
|
latents = state.latents
|
||||||
@ -1183,21 +1180,20 @@ def retrieve_dream_command(opt, command, completer):
|
|||||||
try:
|
try:
|
||||||
cmd = dream_cmd_from_png(path)
|
cmd = dream_cmd_from_png(path)
|
||||||
except OSError:
|
except OSError:
|
||||||
print(f"## {tokens[0]}: file could not be read")
|
ialog.error(f"{tokens[0]}: file could not be read")
|
||||||
except (KeyError, AttributeError, IndexError):
|
except (KeyError, AttributeError, IndexError):
|
||||||
print(f"## {tokens[0]}: file has no metadata")
|
ialog.error(f"{tokens[0]}: file has no metadata")
|
||||||
except:
|
except:
|
||||||
print(f"## {tokens[0]}: file could not be processed")
|
ialog.error(f"{tokens[0]}: file could not be processed")
|
||||||
if len(cmd) > 0:
|
if len(cmd) > 0:
|
||||||
completer.set_line(cmd)
|
completer.set_line(cmd)
|
||||||
|
|
||||||
|
|
||||||
def write_commands(opt, file_path: str, outfilepath: str):
|
def write_commands(opt, file_path: str, outfilepath: str):
|
||||||
dir, basename = os.path.split(file_path)
|
dir, basename = os.path.split(file_path)
|
||||||
try:
|
try:
|
||||||
paths = sorted(list(Path(dir).glob(basename)))
|
paths = sorted(list(Path(dir).glob(basename)))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
print(f'## "{basename}": unacceptable pattern')
|
ialog.error(f'"{basename}": unacceptable pattern')
|
||||||
return
|
return
|
||||||
|
|
||||||
commands = []
|
commands = []
|
||||||
@ -1206,9 +1202,9 @@ def write_commands(opt, file_path: str, outfilepath: str):
|
|||||||
try:
|
try:
|
||||||
cmd = dream_cmd_from_png(path)
|
cmd = dream_cmd_from_png(path)
|
||||||
except (KeyError, AttributeError, IndexError):
|
except (KeyError, AttributeError, IndexError):
|
||||||
print(f"## {path}: file has no metadata")
|
ialog.error(f"{path}: file has no metadata")
|
||||||
except:
|
except:
|
||||||
print(f"## {path}: file could not be processed")
|
ialog.error(f"{path}: file could not be processed")
|
||||||
if cmd:
|
if cmd:
|
||||||
commands.append(f"# {path}")
|
commands.append(f"# {path}")
|
||||||
commands.append(cmd)
|
commands.append(cmd)
|
||||||
@ -1218,18 +1214,18 @@ def write_commands(opt, file_path: str, outfilepath: str):
|
|||||||
outfilepath = os.path.join(opt.outdir, basename)
|
outfilepath = os.path.join(opt.outdir, basename)
|
||||||
with open(outfilepath, "w", encoding="utf-8") as f:
|
with open(outfilepath, "w", encoding="utf-8") as f:
|
||||||
f.write("\n".join(commands))
|
f.write("\n".join(commands))
|
||||||
print(f">> File {outfilepath} with commands created")
|
ialog.info(f"File {outfilepath} with commands created")
|
||||||
|
|
||||||
|
|
||||||
def report_model_error(opt: Namespace, e: Exception):
|
def report_model_error(opt: Namespace, e: Exception):
|
||||||
print(f'** An error occurred while attempting to initialize the model: "{str(e)}"')
|
ialog.warning(f'An error occurred while attempting to initialize the model: "{str(e)}"')
|
||||||
print(
|
ialog.warning(
|
||||||
"** This can be caused by a missing or corrupted models file, and can sometimes be fixed by (re)installing the models."
|
"This can be caused by a missing or corrupted models file, and can sometimes be fixed by (re)installing the models."
|
||||||
)
|
)
|
||||||
yes_to_all = os.environ.get("INVOKE_MODEL_RECONFIGURE")
|
yes_to_all = os.environ.get("INVOKE_MODEL_RECONFIGURE")
|
||||||
if yes_to_all:
|
if yes_to_all:
|
||||||
print(
|
ialog.warning(
|
||||||
"** Reconfiguration is being forced by environment variable INVOKE_MODEL_RECONFIGURE"
|
"Reconfiguration is being forced by environment variable INVOKE_MODEL_RECONFIGURE"
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if not click.confirm(
|
if not click.confirm(
|
||||||
@ -1238,7 +1234,7 @@ def report_model_error(opt: Namespace, e: Exception):
|
|||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
print("invokeai-configure is launching....\n")
|
ialog.info("invokeai-configure is launching....\n")
|
||||||
|
|
||||||
# Match arguments that were set on the CLI
|
# Match arguments that were set on the CLI
|
||||||
# only the arguments accepted by the configuration script are parsed
|
# only the arguments accepted by the configuration script are parsed
|
||||||
@ -1255,7 +1251,7 @@ def report_model_error(opt: Namespace, e: Exception):
|
|||||||
from ..install import invokeai_configure
|
from ..install import invokeai_configure
|
||||||
|
|
||||||
invokeai_configure()
|
invokeai_configure()
|
||||||
print("** InvokeAI will now restart")
|
ialog.warning("InvokeAI will now restart")
|
||||||
sys.argv = previous_args
|
sys.argv = previous_args
|
||||||
main() # would rather do a os.exec(), but doesn't exist?
|
main() # would rather do a os.exec(), but doesn't exist?
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
Loading…
Reference in New Issue
Block a user