resolve conflicts with get_logger() code changes from main

This commit is contained in:
Lincoln Stein
2023-09-24 10:34:06 -04:00
15 changed files with 45 additions and 44 deletions

View File

@ -49,7 +49,7 @@ def check_internet() -> bool:
return False
logger = InvokeAILogger.getLogger()
logger = InvokeAILogger.get_logger()
class ApiDependencies:

View File

@ -41,7 +41,9 @@ if True: # hack to make flake8 happy with imports coming after setting up the c
import invokeai.backend.util.mps_fixes # noqa: F401 (monkeypatching on import)
logger = InvokeAILogger.getLogger(config=app_config)
app_config = InvokeAIAppConfig.get_config()
app_config.parse_args()
logger = InvokeAILogger.get_logger(config=app_config)
# fix for windows mimetypes registry entries being borked
# see https://github.com/invoke-ai/InvokeAI/discussions/3684#discussioncomment-6391352
@ -223,7 +225,7 @@ def invoke_api():
exc_info=e,
)
else:
jurigged.watch(logger=InvokeAILogger.getLogger(name="jurigged").info)
jurigged.watch(logger=InvokeAILogger.get_logger(name="jurigged").info)
port = find_port(app_config.port)
if port != app_config.port:
@ -242,7 +244,7 @@ def invoke_api():
# replace uvicorn's loggers with InvokeAI's for consistent appearance
for logname in ["uvicorn.access", "uvicorn"]:
log = logging.getLogger(logname)
log = InvokeAILogger.get_logger(logname)
log.handlers.clear()
for ch in logger.handlers:
log.addHandler(ch)

View File

@ -7,8 +7,6 @@ from .services.config import InvokeAIAppConfig
# parse_args() must be called before any other imports. if it is not called first, consumers of the config
# which are imported/used before parse_args() is called will get the default config values instead of the
# values from the command line or config file.
config = InvokeAIAppConfig.get_config()
config.parse_args()
if True: # hack to make flake8 happy with imports coming after setting up the config
import argparse
@ -61,8 +59,9 @@ if True: # hack to make flake8 happy with imports coming after setting up the c
if torch.backends.mps.is_available():
import invokeai.backend.util.mps_fixes # noqa: F401 (monkeypatching on import)
logger = InvokeAILogger().getLogger(config=config)
config = InvokeAIAppConfig.get_config()
config.parse_args()
logger = InvokeAILogger().get_logger(config=config)
class CliCommand(BaseModel):

View File

@ -293,7 +293,7 @@ class EventServiceBase:
def emit_model_event(self, job: DownloadJobBase):
"""Emit event when the status of a download/install job changes."""
logger = InvokeAILogger.getLogger()
logger = InvokeAILogger.get_logger()
progress = 100 * (job.bytes / job.total_bytes) if job.total_bytes > 0 else 0
logger.info(f"Dispatch model_event for job {job.id}, status={job.status.value}, progress={progress:5.2f}%")
self.dispatch( # use dispatch() directly here because we are not a session event.

View File

@ -93,10 +93,12 @@ INIT_FILE_PREAMBLE = """# InvokeAI initialization file
# or renaming it and then running invokeai-configure again.
"""
logger = InvokeAILogger.getLogger()
logger = InvokeAILogger.get_logger()
class DummyWidgetValue(Enum):
"""Dummy widget values."""
zero = 0
true = True
false = False
@ -182,7 +184,6 @@ class ProgressBar:
# ---------------------------------------------
def hf_download_from_pretrained(model_class: object, model_name: str, destination: Path, **kwargs):
logger = InvokeAILogger.getLogger("InvokeAIConfigure")
logger.addFilter(lambda x: "fp16 is not a valid" not in x.getMessage())
model = model_class.from_pretrained(
@ -909,7 +910,7 @@ def main():
invoke_args.extend(["--precision", "float32"])
config.parse_args(invoke_args)
config.precision = "float32" if opt.full_precision else choose_precision(torch.device(choose_torch_device()))
logger = InvokeAILogger().getLogger(config=config)
logger = InvokeAILogger().get_logger(config=config)
errors = set()

View File

@ -74,7 +74,7 @@ if is_accelerate_available():
from accelerate import init_empty_weights
from accelerate.utils import set_module_tensor_to_device
logger = InvokeAILogger.getLogger(__name__)
logger = InvokeAILogger.get_logger(__name__)
CONVERT_MODEL_ROOT = InvokeAIAppConfig.get_config().models_path / "core/convert"

View File

@ -105,7 +105,7 @@ class DownloadQueue(DownloadQueueBase):
self._queue = PriorityQueue()
self._worker_pool = set()
self._lock = threading.RLock()
self._logger = InvokeAILogger.getLogger(config=config)
self._logger = InvokeAILogger.get_logger(config=config)
self._event_handlers = event_handlers
self._requests = requests_session or requests.Session()
self._quiet = quiet

View File

@ -360,7 +360,7 @@ class ModelInstall(ModelInstallBase):
event_handlers: Optional[List[DownloadEventHandler]] = None,
): # noqa D107 - use base class docstrings
self._app_config = config or InvokeAIAppConfig.get_config()
self._logger = logger or InvokeAILogger.getLogger(config=self._app_config)
self._logger = logger or InvokeAILogger.get_logger(config=self._app_config)
self._store = store or get_config_store(self._app_config.model_conf_path)
self._download_queue = download or DownloadQueue(config=self._app_config, event_handlers=event_handlers)
self._async_installs = dict()

View File

@ -149,7 +149,7 @@ class ModelLoad(ModelLoadBase):
self._app_config = config
self._store = store
self._logger = InvokeAILogger.getLogger()
self._logger = InvokeAILogger.get_logger()
self._installer = ModelInstall(
store=self._store,
logger=self._logger,

View File

@ -606,7 +606,7 @@ class IAIOnnxRuntimeModel:
def trim_model_convert_cache(cache_path: Path, max_cache_size: int):
current_size = directory_size(cache_path)
logger = InvokeAILogger.getLogger()
logger = InvokeAILogger.get_logger()
if current_size <= max_cache_size:
return

View File

@ -29,7 +29,7 @@ from pydantic import BaseModel, Field
from invokeai.backend.util.logging import InvokeAILogger
default_logger = InvokeAILogger.getLogger()
default_logger = InvokeAILogger.get_logger()
class SearchStats(BaseModel):

View File

@ -17,7 +17,7 @@ def migrate_models_store(config: InvokeAIAppConfig):
from invokeai.backend.model_manager.storage import get_config_store
app_config = InvokeAIAppConfig.get_config()
logger = InvokeAILogger.getLogger()
logger = InvokeAILogger.get_logger()
old_file: Path = app_config.model_conf_path
new_file: Path = old_file.with_name("models3_2.yaml")

View File

@ -24,7 +24,7 @@ from invokeai.backend.util.logging import InvokeAILogger
# Modified ControlNetModel with encoder_attention_mask argument added
logger = InvokeAILogger.getLogger(__name__)
logger = InvokeAILogger.get_logger(__name__)
class ControlNetModel(ModelMixin, ConfigMixin, FromOriginalControlnetMixin):

View File

@ -1,7 +1,6 @@
# Copyright (c) 2023 Lincoln D. Stein and The InvokeAI Development Team
"""
invokeai.backend.util.logging
"""invokeai.backend.util.logging
Logging class for InvokeAI that produces console messages
@ -9,9 +8,9 @@ Usage:
from invokeai.backend.util.logging import InvokeAILogger
logger = InvokeAILogger.getLogger(name='InvokeAI') // Initialization
logger = InvokeAILogger.get_logger(name='InvokeAI') // Initialization
(or)
logger = InvokeAILogger.getLogger(__name__) // To use the filename
logger = InvokeAILogger.get_logger(__name__) // To use the filename
logger.configure()
logger.critical('this is critical') // Critical Message
@ -34,13 +33,13 @@ IAILogger.debug('this is a debugging message')
## Configuration
The default configuration will print to stderr on the console. To add
additional logging handlers, call getLogger with an initialized InvokeAIAppConfig
additional logging handlers, call get_logger with an initialized InvokeAIAppConfig
object:
config = InvokeAIAppConfig.get_config()
config.parse_args()
logger = InvokeAILogger.getLogger(config=config)
logger = InvokeAILogger.get_logger(config=config)
### Three command-line options control logging:
@ -173,6 +172,7 @@ InvokeAI:
log_level: info
log_format: color
```
"""
import logging.handlers
@ -194,39 +194,35 @@ except ImportError:
# module level functions
def debug(msg, *args, **kwargs):
InvokeAILogger.getLogger().debug(msg, *args, **kwargs)
InvokeAILogger.get_logger().debug(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
InvokeAILogger.getLogger().info(msg, *args, **kwargs)
InvokeAILogger.get_logger().info(msg, *args, **kwargs)
def warning(msg, *args, **kwargs):
InvokeAILogger.getLogger().warning(msg, *args, **kwargs)
InvokeAILogger.get_logger().warning(msg, *args, **kwargs)
def error(msg, *args, **kwargs):
InvokeAILogger.getLogger().error(msg, *args, **kwargs)
InvokeAILogger.get_logger().error(msg, *args, **kwargs)
def critical(msg, *args, **kwargs):
InvokeAILogger.getLogger().critical(msg, *args, **kwargs)
InvokeAILogger.get_logger().critical(msg, *args, **kwargs)
def log(level, msg, *args, **kwargs):
InvokeAILogger.getLogger().log(level, msg, *args, **kwargs)
InvokeAILogger.get_logger().log(level, msg, *args, **kwargs)
def disable(level=logging.CRITICAL):
InvokeAILogger.getLogger().disable(level)
InvokeAILogger.get_logger().disable(level)
def basicConfig(**kwargs):
InvokeAILogger.getLogger().basicConfig(**kwargs)
def getLogger(name: str = None) -> logging.Logger:
return InvokeAILogger.getLogger(name)
InvokeAILogger.get_logger().basicConfig(**kwargs)
_FACILITY_MAP = (
@ -352,21 +348,24 @@ class InvokeAILogger(object):
loggers = dict()
@classmethod
def getLogger(cls, name: str = "InvokeAI", config: Optional[InvokeAIAppConfig] = None) -> logging.Logger:
config = config or InvokeAIAppConfig.get_config()
def get_logger(
cls, name: str = "InvokeAI", config: InvokeAIAppConfig = InvokeAIAppConfig.get_config()
) -> logging.Logger:
"""Return a logger appropriately configured for the current InvokeAI configuration."""
if name in cls.loggers:
logger = cls.loggers[name]
logger.handlers.clear()
else:
logger = logging.getLogger(name)
config = config or InvokeAIAppConfig.get_config() # in case None is passed
logger.setLevel(config.log_level.upper()) # yes, strings work here
for ch in cls.getLoggers(config):
for ch in cls.get_loggers(config):
logger.addHandler(ch)
cls.loggers[name] = logger
return cls.loggers[name]
@classmethod
def getLoggers(cls, config: InvokeAIAppConfig) -> list[logging.Handler]:
def get_loggers(cls, config: InvokeAIAppConfig) -> list[logging.Handler]:
handler_strs = config.log_handlers
handlers = list()
for handler in handler_strs:

View File

@ -44,7 +44,7 @@ from invokeai.frontend.install.widgets import (
)
config = InvokeAIAppConfig.get_config()
logger = InvokeAILogger.getLogger()
logger = InvokeAILogger.get_logger()
# build a table mapping all non-printable characters to None
# for stripping control characters
@ -590,7 +590,7 @@ def main():
if opt.full_precision:
invoke_args.extend(["--precision", "float32"])
config.parse_args(invoke_args)
logger = InvokeAILogger().getLogger(config=config)
logger = InvokeAILogger().get_logger(config=config)
if not config.model_conf_path.exists():
logger.info("Your InvokeAI root directory is not set up. Calling invokeai-configure.")