From 04f9757f8d41309a319aec6bb92ee0ee6c4e7a01 Mon Sep 17 00:00:00 2001 From: Lincoln Stein Date: Tue, 6 Jun 2023 22:57:49 -0400 Subject: [PATCH] prevent crash when trying to calculate size of missing safety_checker - Also fixed up order in which logger is created in invokeai-web so that handlers are installed after command-line options are parsed (and not before!) --- invokeai/app/api_app.py | 18 ++++++++--------- invokeai/app/cli_app.py | 8 ++------ invokeai/app/invocations/latent.py | 2 +- .../backend/model_management/model_cache.py | 20 ++++++++++++------- invokeai/backend/util/logging.py | 11 +++++----- invokeai/frontend/web/dist/index.html | 2 +- invokeai/frontend/web/stats.html | 2 +- 7 files changed, 33 insertions(+), 30 deletions(-) diff --git a/invokeai/app/api_app.py b/invokeai/app/api_app.py index 96a22466b5..cdcac42191 100644 --- a/invokeai/app/api_app.py +++ b/invokeai/app/api_app.py @@ -3,7 +3,7 @@ import asyncio from inspect import signature import uvicorn -from invokeai.backend.util.logging import InvokeAILogger + from fastapi import FastAPI from fastapi.middleware.cors import CORSMiddleware from fastapi.openapi.docs import get_redoc_html, get_swagger_ui_html @@ -13,13 +13,18 @@ from fastapi_events.handlers.local import local_handler from fastapi_events.middleware import EventHandlerASGIMiddleware from pydantic.schema import schema +# Do this early so that other modules pick up configuration +from .services.config import InvokeAIAppConfig +app_config = InvokeAIAppConfig.get_config() +app_config.parse_args() + +from invokeai.backend.util.logging import InvokeAILogger +logger = InvokeAILogger.getLogger() + from .api.dependencies import ApiDependencies from .api.routers import sessions, models, images from .api.sockets import SocketIO from .invocations.baseinvocation import BaseInvocation -from .services.config import InvokeAIAppConfig - -logger = InvokeAILogger.getLogger() # Create the app # TODO: create this all in a method so configuration/etc. can be passed in? @@ -37,11 +42,6 @@ app.add_middleware( socket_io = SocketIO(app) -# initialize config -# this is a module global -app_config = InvokeAIAppConfig.get_config() -app_config.parse_args() - # Add startup event to load dependencies @app.on_event("startup") async def startup_event(): diff --git a/invokeai/app/cli_app.py b/invokeai/app/cli_app.py index 62b46d4f84..82144cdca6 100644 --- a/invokeai/app/cli_app.py +++ b/invokeai/app/cli_app.py @@ -11,7 +11,7 @@ from typing import Union, get_type_hints from pydantic import BaseModel, ValidationError from pydantic.fields import Field -import invokeai.backend.util.logging as logger +from invokeai.backend.util.logging import InvokeAILogger from invokeai.app.services.image_record_storage import SqliteImageRecordStorage from invokeai.app.services.images import ImageService from invokeai.app.services.metadata import CoreMetadataService @@ -22,7 +22,6 @@ from .cli.commands import (BaseCommand, CliContext, ExitCli, SortedHelpFormatter, add_graph_parsers, add_parsers) from .cli.completer import set_autocompleter from .invocations.baseinvocation import BaseInvocation -from .services.config import get_invokeai_config from .services.default_graphs import (create_system_graphs, default_text_to_image_graph_id) from .services.events import EventServiceBase @@ -192,14 +191,11 @@ def invoke_all(context: CliContext): raise SessionError() - -logger = logger.InvokeAILogger.getLogger() - - def invoke_cli(): # this gets the basic configuration config = InvokeAIAppConfig.get_config() config.parse_args() + logger = InvokeAILogger.getLogger() # get the optional list of invocations to execute on the command line parser = config.get_parser() diff --git a/invokeai/app/invocations/latent.py b/invokeai/app/invocations/latent.py index 797a81c721..3e4484c967 100644 --- a/invokeai/app/invocations/latent.py +++ b/invokeai/app/invocations/latent.py @@ -513,7 +513,7 @@ class LatentsToImageInvocation(BaseInvocation): return ImageOutput( image=ImageField( image_name=image_dto.image_name, - image_type=image_dto.image_type, + image_type=image_dto.image_origin, ), width=image_dto.width, height=image_dto.height, diff --git a/invokeai/backend/model_management/model_cache.py b/invokeai/backend/model_management/model_cache.py index 5f83f077e5..c38d051eb4 100644 --- a/invokeai/backend/model_management/model_cache.py +++ b/invokeai/backend/model_management/model_cache.py @@ -16,18 +16,19 @@ context. Use like this: """ -import contextlib import gc import os import sys import hashlib +import json import warnings from contextlib import suppress from enum import Enum from pathlib import Path -from typing import Dict, Sequence, Union, types, Optional, List, Type, Any +from typing import Dict, Union, types, Optional, List, Type, Any import torch +import transformers from diffusers import DiffusionPipeline, SchedulerMixin, ConfigMixin from diffusers import logging as diffusers_logging @@ -63,6 +64,11 @@ def calc_model_size_by_fs( if subfolder is not None: model_path = os.path.join(model_path, subfolder) + # this can happen when, for example, the safety checker + # is not downloaded. + if not os.path.exists(model_path): + return 0 + all_files = os.listdir(model_path) all_files = [f for f in all_files if os.path.isfile(os.path.join(model_path, f))] @@ -88,7 +94,7 @@ def calc_model_size_by_fs( if not file.endswith(index_postfix): continue try: - with open(os.path.join(model_path, index_file), "r") as f: + with open(os.path.join(model_path, file), "r") as f: index_data = json.loads(f.read()) return int(index_data["metadata"]["total_size"]) except: @@ -277,7 +283,7 @@ class ClassifierModelInfo(ModelInfoBase): self.child_sizes: Dict[str, int] = dict() try: - main_config = EmptyConfigLoader.load_config(repo_id_or_path, config_name="config.json") + main_config = EmptyConfigLoader.load_config(self.repo_id_or_path, config_name="config.json") #main_config = json.loads(os.path.join(self.model_path, "config.json")) except: raise Exception("Invalid classifier model! (config.json not found or invalid)") @@ -289,7 +295,7 @@ class ClassifierModelInfo(ModelInfoBase): def _load_tokenizer(self, main_config: dict): try: - tokenizer_config = EmptyConfigLoader.load_config(repo_id_or_path, config_name="tokenizer_config.json") + tokenizer_config = EmptyConfigLoader.load_config(self.repo_id_or_path, config_name="tokenizer_config.json") #tokenizer_config = json.loads(os.path.join(self.model_path, "tokenizer_config.json")) except: raise Exception("Invalid classifier model! (Failed to load tokenizer_config.json)") @@ -314,13 +320,13 @@ class ClassifierModelInfo(ModelInfoBase): raise Exception("Invalid classifier model! (Failed to detect text_encoder type)") self.child_types[SDModelType.TextEncoder] = self._definition_to_type(["transformers", text_encoder_class_name]) - self.child_sizes[SDModelType.TextEncoder] = calc_model_size_by_fs(repo_id_or_path) + self.child_sizes[SDModelType.TextEncoder] = calc_model_size_by_fs(self.repo_id_or_path) def _load_feature_extractor(self, main_config: dict): self.child_sizes[SDModelType.FeatureExtractor] = 0 try: - feature_extractor_config = EmptyConfigLoader.load_config(repo_id_or_path, config_name="preprocessor_config.json") + feature_extractor_config = EmptyConfigLoader.load_config(self.repo_id_or_path, config_name="preprocessor_config.json") except: return # feature extractor not passed with t5 diff --git a/invokeai/backend/util/logging.py b/invokeai/backend/util/logging.py index 16efd56c03..e7a27b8b6f 100644 --- a/invokeai/backend/util/logging.py +++ b/invokeai/backend/util/logging.py @@ -195,13 +195,12 @@ class InvokeAILogger(object): @classmethod def getLoggers(cls, config: InvokeAIAppConfig) -> list[logging.Handler]: handler_strs = config.log_handlers - print(f'handler_strs={handler_strs}') handlers = list() for handler in handler_strs: handler_name,*args = handler.split('=',2) args = args[0] if len(args) > 0 else None - # console is the only handler that gets a custom formatter + # console and file are the only handlers that gets a custom formatter if handler_name=='console': formatter = LOG_FORMATTERS[config.log_format] ch = logging.StreamHandler() @@ -210,14 +209,16 @@ class InvokeAILogger(object): elif handler_name=='syslog': ch = cls._parse_syslog_args(args) - ch.setFormatter(InvokeAISyslogFormatter()) handlers.append(ch) elif handler_name=='file': - handlers.append(cls._parse_file_args(args)) + ch = cls._parse_file_args(args) + ch.setFormatter(InvokeAISyslogFormatter()) + handlers.append(ch) elif handler_name=='http': - handlers.append(cls._parse_http_args(args)) + ch = cls._parse_http_args(args) + handlers.append(ch) return handlers @staticmethod diff --git a/invokeai/frontend/web/dist/index.html b/invokeai/frontend/web/dist/index.html index 8a982a7268..a87e4bb40d 100644 --- a/invokeai/frontend/web/dist/index.html +++ b/invokeai/frontend/web/dist/index.html @@ -12,7 +12,7 @@ margin: 0; } - + diff --git a/invokeai/frontend/web/stats.html b/invokeai/frontend/web/stats.html index e9c4381206..3d72d1163e 100644 --- a/invokeai/frontend/web/stats.html +++ b/invokeai/frontend/web/stats.html @@ -6157,7 +6157,7 @@ var drawChart = (function (exports) {