From 163ece9aeeaee9b1330b9a6b3be89a5048e0ff1f Mon Sep 17 00:00:00 2001 From: Ryan Dick Date: Mon, 11 Sep 2023 16:08:15 -0400 Subject: [PATCH] Initial skeleton for IPAdapter model management. --- .../backend/model_management/model_manager.py | 22 ++++---- .../backend/model_management/model_probe.py | 40 +++++++++----- .../backend/model_management/models/base.py | 32 ++++++----- .../model_management/models/ip_adapter.py | 53 +++++++++++++++++++ 4 files changed, 112 insertions(+), 35 deletions(-) create mode 100644 invokeai/backend/model_management/models/ip_adapter.py diff --git a/invokeai/backend/model_management/model_manager.py b/invokeai/backend/model_management/model_manager.py index d87bc03fb7..d746a83a9e 100644 --- a/invokeai/backend/model_management/model_manager.py +++ b/invokeai/backend/model_management/model_manager.py @@ -25,6 +25,7 @@ Models are described using four attributes: ModelType.Lora -- a LoRA or LyCORIS fine-tune ModelType.TextualInversion -- a textual inversion embedding ModelType.ControlNet -- a ControlNet model + ModelType.IPAdapter -- an IPAdapter model 3) BaseModelType -- an enum indicating the stable diffusion base model, one of: BaseModelType.StableDiffusion1 @@ -234,8 +235,8 @@ import textwrap import types from dataclasses import dataclass from pathlib import Path -from shutil import rmtree, move -from typing import Optional, List, Literal, Tuple, Union, Dict, Set, Callable +from shutil import move, rmtree +from typing import Callable, Dict, List, Literal, Optional, Set, Tuple, Union import torch import yaml @@ -246,20 +247,21 @@ from pydantic import BaseModel, Field import invokeai.backend.util.logging as logger from invokeai.app.services.config import InvokeAIAppConfig from invokeai.backend.util import CUDA_DEVICE, Chdir + from .model_cache import ModelCache, ModelLocker from .model_search import ModelSearch from .models import ( - BaseModelType, - ModelType, - SubModelType, - ModelError, - SchedulerPredictionType, MODEL_CLASSES, - ModelConfigBase, - ModelNotFoundException, - InvalidModelException, + BaseModelType, DuplicateModelException, + InvalidModelException, ModelBase, + ModelConfigBase, + ModelError, + ModelNotFoundException, + ModelType, + SchedulerPredictionType, + SubModelType, ) # We are only starting to number the config file with release 3. diff --git a/invokeai/backend/model_management/model_probe.py b/invokeai/backend/model_management/model_probe.py index 0d6f61e145..6750e0fe6a 100644 --- a/invokeai/backend/model_management/model_probe.py +++ b/invokeai/backend/model_management/model_probe.py @@ -1,24 +1,23 @@ import json -import torch -import safetensors.torch - from dataclasses import dataclass - -from diffusers import ModelMixin, ConfigMixin from pathlib import Path -from typing import Callable, Literal, Union, Dict, Optional +from typing import Callable, Dict, Literal, Optional, Union + +import safetensors.torch +import torch +from diffusers import ConfigMixin, ModelMixin from picklescan.scanner import scan_file_path from .models import ( BaseModelType, + InvalidModelException, ModelType, ModelVariantType, SchedulerPredictionType, SilenceWarnings, - InvalidModelException, ) -from .util import lora_token_vector_length from .models.base import read_checkpoint_meta +from .util import lora_token_vector_length @dataclass @@ -53,6 +52,7 @@ class ModelProbe(object): "StableDiffusionXLInpaintPipeline": ModelType.Main, "AutoencoderKL": ModelType.Vae, "ControlNetModel": ModelType.ControlNet, + "IPAdapterModel": ModelType.IPAdapter, } @classmethod @@ -367,6 +367,11 @@ class ControlNetCheckpointProbe(CheckpointProbeBase): raise InvalidModelException("Unable to determine base type for {self.checkpoint_path}") +class IPAdapterCheckpointProbe(CheckpointProbeBase): + def get_base_type(self) -> BaseModelType: + raise NotImplementedError() + + ######################################################## # classes for probing folders ####################################################### @@ -486,11 +491,11 @@ class ControlNetFolderProbe(FolderProbeBase): base_model = ( BaseModelType.StableDiffusion1 if dimension == 768 - else BaseModelType.StableDiffusion2 - if dimension == 1024 - else BaseModelType.StableDiffusionXL - if dimension == 2048 - else None + else ( + BaseModelType.StableDiffusion2 + if dimension == 1024 + else BaseModelType.StableDiffusionXL if dimension == 2048 else None + ) ) if not base_model: raise InvalidModelException(f"Unable to determine model base for {self.folder_path}") @@ -510,15 +515,24 @@ class LoRAFolderProbe(FolderProbeBase): return LoRACheckpointProbe(model_file, None).get_base_type() +class IPAdapterFolderProbe(FolderProbeBase): + def get_base_type(self) -> BaseModelType: + raise NotImplementedError() + + ############## register probe classes ###### ModelProbe.register_probe("diffusers", ModelType.Main, PipelineFolderProbe) ModelProbe.register_probe("diffusers", ModelType.Vae, VaeFolderProbe) ModelProbe.register_probe("diffusers", ModelType.Lora, LoRAFolderProbe) ModelProbe.register_probe("diffusers", ModelType.TextualInversion, TextualInversionFolderProbe) ModelProbe.register_probe("diffusers", ModelType.ControlNet, ControlNetFolderProbe) +ModelProbe.register_probe("diffusers", ModelType.IPAdapter, IPAdapterFolderProbe) + ModelProbe.register_probe("checkpoint", ModelType.Main, PipelineCheckpointProbe) ModelProbe.register_probe("checkpoint", ModelType.Vae, VaeCheckpointProbe) ModelProbe.register_probe("checkpoint", ModelType.Lora, LoRACheckpointProbe) ModelProbe.register_probe("checkpoint", ModelType.TextualInversion, TextualInversionCheckpointProbe) ModelProbe.register_probe("checkpoint", ModelType.ControlNet, ControlNetCheckpointProbe) +ModelProbe.register_probe("checkpoint", ModelType.IPAdapter, IPAdapterCheckpointProbe) + ModelProbe.register_probe("onnx", ModelType.ONNX, ONNXFolderProbe) diff --git a/invokeai/backend/model_management/models/base.py b/invokeai/backend/model_management/models/base.py index ed1c2c6098..16b6bc26a6 100644 --- a/invokeai/backend/model_management/models/base.py +++ b/invokeai/backend/model_management/models/base.py @@ -1,29 +1,36 @@ +import inspect import json import os import sys import typing -import inspect import warnings from abc import ABCMeta, abstractmethod from contextlib import suppress from enum import Enum from pathlib import Path -from picklescan.scanner import scan_file_path +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Literal, + Optional, + Type, + TypeVar, + Union, +) -import torch import numpy as np import onnx import safetensors.torch -from diffusers import DiffusionPipeline, ConfigMixin -from onnx import numpy_helper -from onnxruntime import ( - InferenceSession, - SessionOptions, - get_available_providers, -) -from pydantic import BaseModel, Field -from typing import List, Dict, Optional, Type, Literal, TypeVar, Generic, Callable, Any, Union +import torch +from diffusers import ConfigMixin, DiffusionPipeline from diffusers import logging as diffusers_logging +from onnx import numpy_helper +from onnxruntime import InferenceSession, SessionOptions, get_available_providers +from picklescan.scanner import scan_file_path +from pydantic import BaseModel, Field from transformers import logging as transformers_logging @@ -54,6 +61,7 @@ class ModelType(str, Enum): Lora = "lora" ControlNet = "controlnet" # used by model_probe TextualInversion = "embedding" + IPAdapter = "ipadapter" class SubModelType(str, Enum): diff --git a/invokeai/backend/model_management/models/ip_adapter.py b/invokeai/backend/model_management/models/ip_adapter.py new file mode 100644 index 0000000000..028f358aaa --- /dev/null +++ b/invokeai/backend/model_management/models/ip_adapter.py @@ -0,0 +1,53 @@ +import os +from enum import Enum +from typing import Any, Optional + +import torch + +from invokeai.backend.model_management.models.base import ( + BaseModelType, + ModelBase, + ModelType, + SubModelType, + classproperty, +) + + +class IPAdapterModelFormat(Enum): + # The 'official' IP-Adapter model format from Tencent (i.e. https://huggingface.co/h94/IP-Adapter) + Tencent = "tencent" + + +class IPAdapterModel(ModelBase): + def __init__(self, model_path: str, base_model: BaseModelType, model_type: ModelType): + assert model_type == ModelType.IPAdapter + super().__init__(model_path, base_model, model_type) + + # TODO(ryand): Check correct files for model size calculation. + self.model_size = os.path.getsize(self.model_path) + + @classmethod + def detect_format(cls, path: str) -> str: + if not os.path.exists(path): + raise ModuleNotFoundError(f"No IP-Adapter model at path '{path}'.") + + raise NotImplementedError() + + @classproperty + def save_to_config(cls) -> bool: + raise NotImplementedError() + + def get_size(self, child_type: Optional[SubModelType] = None) -> int: + if child_type is not None: + raise ValueError("There are no child models in an IP-Adapter model.") + + raise NotImplementedError() + + def get_model( + self, + torch_dtype: Optional[torch.dtype], + child_type: Optional[SubModelType] = None, + ) -> Any: + if child_type is not None: + raise ValueError("There are no child models in an IP-Adapter model.") + raise NotImplementedError()