diff --git a/invokeai/app/api/dependencies.py b/invokeai/app/api/dependencies.py index c2a32010c5..9db35fb5c3 100644 --- a/invokeai/app/api/dependencies.py +++ b/invokeai/app/api/dependencies.py @@ -49,7 +49,7 @@ def check_internet() -> bool: return False -logger = InvokeAILogger.getLogger() +logger = InvokeAILogger.get_logger() class ApiDependencies: diff --git a/invokeai/app/api_app.py b/invokeai/app/api_app.py index c93197d1bf..682cafb5ab 100644 --- a/invokeai/app/api_app.py +++ b/invokeai/app/api_app.py @@ -41,7 +41,9 @@ if True: # hack to make flake8 happy with imports coming after setting up the c import invokeai.backend.util.mps_fixes # noqa: F401 (monkeypatching on import) -logger = InvokeAILogger.getLogger(config=app_config) +app_config = InvokeAIAppConfig.get_config() +app_config.parse_args() +logger = InvokeAILogger.get_logger(config=app_config) # fix for windows mimetypes registry entries being borked # see https://github.com/invoke-ai/InvokeAI/discussions/3684#discussioncomment-6391352 @@ -223,7 +225,7 @@ def invoke_api(): exc_info=e, ) else: - jurigged.watch(logger=InvokeAILogger.getLogger(name="jurigged").info) + jurigged.watch(logger=InvokeAILogger.get_logger(name="jurigged").info) port = find_port(app_config.port) if port != app_config.port: @@ -242,7 +244,7 @@ def invoke_api(): # replace uvicorn's loggers with InvokeAI's for consistent appearance for logname in ["uvicorn.access", "uvicorn"]: - log = logging.getLogger(logname) + log = InvokeAILogger.get_logger(logname) log.handlers.clear() for ch in logger.handlers: log.addHandler(ch) diff --git a/invokeai/app/cli_app.py b/invokeai/app/cli_app.py index dc59954e9b..2f8a4d2cbd 100644 --- a/invokeai/app/cli_app.py +++ b/invokeai/app/cli_app.py @@ -7,8 +7,6 @@ from .services.config import InvokeAIAppConfig # parse_args() must be called before any other imports. if it is not called first, consumers of the config # which are imported/used before parse_args() is called will get the default config values instead of the # values from the command line or config file. -config = InvokeAIAppConfig.get_config() -config.parse_args() if True: # hack to make flake8 happy with imports coming after setting up the config import argparse @@ -61,8 +59,9 @@ if True: # hack to make flake8 happy with imports coming after setting up the c if torch.backends.mps.is_available(): import invokeai.backend.util.mps_fixes # noqa: F401 (monkeypatching on import) - -logger = InvokeAILogger().getLogger(config=config) +config = InvokeAIAppConfig.get_config() +config.parse_args() +logger = InvokeAILogger().get_logger(config=config) class CliCommand(BaseModel): diff --git a/invokeai/backend/install/invokeai_configure.py b/invokeai/backend/install/invokeai_configure.py index 0b3f50e3fc..ec2221e12d 100755 --- a/invokeai/backend/install/invokeai_configure.py +++ b/invokeai/backend/install/invokeai_configure.py @@ -93,7 +93,7 @@ INIT_FILE_PREAMBLE = """# InvokeAI initialization file # or renaming it and then running invokeai-configure again. """ -logger = InvokeAILogger.getLogger() +logger = InvokeAILogger.get_logger() class DummyWidgetValue(Enum): @@ -894,7 +894,7 @@ def main(): if opt.full_precision: invoke_args.extend(["--precision", "float32"]) config.parse_args(invoke_args) - logger = InvokeAILogger().getLogger(config=config) + logger = InvokeAILogger().get_logger(config=config) errors = set() diff --git a/invokeai/backend/install/model_install_backend.py b/invokeai/backend/install/model_install_backend.py index 667111047f..6133a26ec1 100644 --- a/invokeai/backend/install/model_install_backend.py +++ b/invokeai/backend/install/model_install_backend.py @@ -30,7 +30,7 @@ warnings.filterwarnings("ignore") # --------------------------globals----------------------- config = InvokeAIAppConfig.get_config() -logger = InvokeAILogger.getLogger(name="InvokeAI") +logger = InvokeAILogger.get_logger(name="InvokeAI") # the initial "configs" dir is now bundled in the `invokeai.configs` package Dataset_path = Path(configs.__path__[0]) / "INITIAL_MODELS.yaml" @@ -492,7 +492,7 @@ def yes_or_no(prompt: str, default_yes=True): # --------------------------------------------- def hf_download_from_pretrained(model_class: object, model_name: str, destination: Path, **kwargs): - logger = InvokeAILogger.getLogger("InvokeAI") + logger = InvokeAILogger.get_logger("InvokeAI") logger.addFilter(lambda x: "fp16 is not a valid" not in x.getMessage()) model = model_class.from_pretrained( diff --git a/invokeai/backend/model_management/convert_ckpt_to_diffusers.py b/invokeai/backend/model_management/convert_ckpt_to_diffusers.py index 69d32a49c7..15712c2ee8 100644 --- a/invokeai/backend/model_management/convert_ckpt_to_diffusers.py +++ b/invokeai/backend/model_management/convert_ckpt_to_diffusers.py @@ -74,7 +74,7 @@ if is_accelerate_available(): from accelerate import init_empty_weights from accelerate.utils import set_module_tensor_to_device -logger = InvokeAILogger.getLogger(__name__) +logger = InvokeAILogger.get_logger(__name__) CONVERT_MODEL_ROOT = InvokeAIAppConfig.get_config().models_path / "core/convert" diff --git a/invokeai/backend/util/hotfixes.py b/invokeai/backend/util/hotfixes.py index 852d640161..fb1297996c 100644 --- a/invokeai/backend/util/hotfixes.py +++ b/invokeai/backend/util/hotfixes.py @@ -24,7 +24,7 @@ from invokeai.backend.util.logging import InvokeAILogger # Modified ControlNetModel with encoder_attention_mask argument added -logger = InvokeAILogger.getLogger(__name__) +logger = InvokeAILogger.get_logger(__name__) class ControlNetModel(ModelMixin, ConfigMixin, FromOriginalControlnetMixin): diff --git a/invokeai/backend/util/logging.py b/invokeai/backend/util/logging.py index accbc407f7..3c829a1a02 100644 --- a/invokeai/backend/util/logging.py +++ b/invokeai/backend/util/logging.py @@ -1,7 +1,6 @@ # Copyright (c) 2023 Lincoln D. Stein and The InvokeAI Development Team -""" -invokeai.backend.util.logging +"""invokeai.backend.util.logging Logging class for InvokeAI that produces console messages @@ -9,9 +8,9 @@ Usage: from invokeai.backend.util.logging import InvokeAILogger -logger = InvokeAILogger.getLogger(name='InvokeAI') // Initialization +logger = InvokeAILogger.get_logger(name='InvokeAI') // Initialization (or) -logger = InvokeAILogger.getLogger(__name__) // To use the filename +logger = InvokeAILogger.get_logger(__name__) // To use the filename logger.configure() logger.critical('this is critical') // Critical Message @@ -34,13 +33,13 @@ IAILogger.debug('this is a debugging message') ## Configuration The default configuration will print to stderr on the console. To add -additional logging handlers, call getLogger with an initialized InvokeAIAppConfig +additional logging handlers, call get_logger with an initialized InvokeAIAppConfig object: config = InvokeAIAppConfig.get_config() config.parse_args() - logger = InvokeAILogger.getLogger(config=config) + logger = InvokeAILogger.get_logger(config=config) ### Three command-line options control logging: @@ -173,6 +172,7 @@ InvokeAI: log_level: info log_format: color ``` + """ import logging.handlers @@ -193,39 +193,35 @@ except ImportError: # module level functions def debug(msg, *args, **kwargs): - InvokeAILogger.getLogger().debug(msg, *args, **kwargs) + InvokeAILogger.get_logger().debug(msg, *args, **kwargs) def info(msg, *args, **kwargs): - InvokeAILogger.getLogger().info(msg, *args, **kwargs) + InvokeAILogger.get_logger().info(msg, *args, **kwargs) def warning(msg, *args, **kwargs): - InvokeAILogger.getLogger().warning(msg, *args, **kwargs) + InvokeAILogger.get_logger().warning(msg, *args, **kwargs) def error(msg, *args, **kwargs): - InvokeAILogger.getLogger().error(msg, *args, **kwargs) + InvokeAILogger.get_logger().error(msg, *args, **kwargs) def critical(msg, *args, **kwargs): - InvokeAILogger.getLogger().critical(msg, *args, **kwargs) + InvokeAILogger.get_logger().critical(msg, *args, **kwargs) def log(level, msg, *args, **kwargs): - InvokeAILogger.getLogger().log(level, msg, *args, **kwargs) + InvokeAILogger.get_logger().log(level, msg, *args, **kwargs) def disable(level=logging.CRITICAL): - InvokeAILogger.getLogger().disable(level) + InvokeAILogger.get_logger().disable(level) def basicConfig(**kwargs): - InvokeAILogger.getLogger().basicConfig(**kwargs) - - -def getLogger(name: str = None) -> logging.Logger: - return InvokeAILogger.getLogger(name) + InvokeAILogger.get_logger().basicConfig(**kwargs) _FACILITY_MAP = ( @@ -351,7 +347,7 @@ class InvokeAILogger(object): loggers = dict() @classmethod - def getLogger( + def get_logger( cls, name: str = "InvokeAI", config: InvokeAIAppConfig = InvokeAIAppConfig.get_config() ) -> logging.Logger: if name in cls.loggers: @@ -360,13 +356,13 @@ class InvokeAILogger(object): else: logger = logging.getLogger(name) logger.setLevel(config.log_level.upper()) # yes, strings work here - for ch in cls.getLoggers(config): + for ch in cls.get_loggers(config): logger.addHandler(ch) cls.loggers[name] = logger return cls.loggers[name] @classmethod - def getLoggers(cls, config: InvokeAIAppConfig) -> list[logging.Handler]: + def get_loggers(cls, config: InvokeAIAppConfig) -> list[logging.Handler]: handler_strs = config.log_handlers handlers = list() for handler in handler_strs: diff --git a/invokeai/frontend/install/model_install.py b/invokeai/frontend/install/model_install.py index fae67df736..64ad3a7d77 100644 --- a/invokeai/frontend/install/model_install.py +++ b/invokeai/frontend/install/model_install.py @@ -45,7 +45,7 @@ from invokeai.frontend.install.widgets import ( ) config = InvokeAIAppConfig.get_config() -logger = InvokeAILogger.getLogger() +logger = InvokeAILogger.get_logger() # build a table mapping all non-printable characters to None # for stripping control characters @@ -652,7 +652,7 @@ def process_and_execute( translator = StderrToMessage(conn_out) sys.stderr = translator sys.stdout = translator - logger = InvokeAILogger.getLogger() + logger = InvokeAILogger.get_logger() logger.handlers.clear() logger.addHandler(logging.StreamHandler(translator)) @@ -765,7 +765,7 @@ def main(): if opt.full_precision: invoke_args.extend(["--precision", "float32"]) config.parse_args(invoke_args) - logger = InvokeAILogger().getLogger(config=config) + logger = InvokeAILogger().get_logger(config=config) if not config.model_conf_path.exists(): logger.info("Your InvokeAI root directory is not set up. Calling invokeai-configure.")