2023-05-11 04:09:19 +00:00
|
|
|
# Copyright (c) 2023 Lincoln D. Stein and the InvokeAI Team
|
|
|
|
|
2023-05-14 00:06:26 +00:00
|
|
|
from __future__ import annotations
|
2023-05-11 04:09:19 +00:00
|
|
|
|
|
|
|
from abc import ABC, abstractmethod
|
2023-08-01 07:55:13 +00:00
|
|
|
from logging import Logger
|
2023-05-11 04:09:19 +00:00
|
|
|
from pathlib import Path
|
2023-07-06 17:15:15 +00:00
|
|
|
from pydantic import Field
|
2023-08-01 07:55:13 +00:00
|
|
|
from typing import Literal, Optional, Union, Callable, List, Tuple, TYPE_CHECKING
|
2023-07-05 10:07:10 +00:00
|
|
|
from types import ModuleType
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-07-06 17:15:15 +00:00
|
|
|
from invokeai.backend.model_management import (
|
2023-05-11 04:09:19 +00:00
|
|
|
ModelManager,
|
2023-06-11 03:12:21 +00:00
|
|
|
BaseModelType,
|
|
|
|
ModelType,
|
|
|
|
SubModelType,
|
2023-06-12 02:52:30 +00:00
|
|
|
ModelInfo,
|
2023-07-05 13:05:05 +00:00
|
|
|
AddModelResult,
|
|
|
|
SchedulerPredictionType,
|
2023-07-06 17:15:15 +00:00
|
|
|
ModelMerger,
|
|
|
|
MergeInterpolationMethod,
|
2023-07-16 18:17:05 +00:00
|
|
|
ModelNotFoundException,
|
2023-05-11 04:09:19 +00:00
|
|
|
)
|
2023-07-14 15:14:33 +00:00
|
|
|
from invokeai.backend.model_management.model_search import FindModels
|
2023-08-16 01:00:30 +00:00
|
|
|
from invokeai.backend.model_management.model_cache import CacheStats
|
2023-07-06 17:15:15 +00:00
|
|
|
|
2023-07-05 18:50:57 +00:00
|
|
|
import torch
|
2023-05-12 04:14:56 +00:00
|
|
|
from invokeai.app.models.exceptions import CanceledException
|
2023-05-12 01:24:29 +00:00
|
|
|
from ...backend.util import choose_precision, choose_torch_device
|
2023-07-05 10:07:10 +00:00
|
|
|
from .config import InvokeAIAppConfig
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-05-14 00:06:26 +00:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from ..invocations.baseinvocation import BaseInvocation, InvocationContext
|
|
|
|
|
2023-05-12 03:33:24 +00:00
|
|
|
|
2023-05-11 04:09:19 +00:00
|
|
|
class ModelManagerServiceBase(ABC):
|
|
|
|
"""Responsible for managing models on disk and in memory"""
|
|
|
|
|
2023-05-12 01:24:29 +00:00
|
|
|
@abstractmethod
|
|
|
|
def __init__(
|
2023-05-14 00:06:26 +00:00
|
|
|
self,
|
2023-05-18 21:17:45 +00:00
|
|
|
config: InvokeAIAppConfig,
|
2023-07-05 10:07:10 +00:00
|
|
|
logger: ModuleType,
|
2023-05-12 01:24:29 +00:00
|
|
|
):
|
|
|
|
"""
|
2023-07-04 21:05:35 +00:00
|
|
|
Initialize with the path to the models.yaml config file.
|
2023-05-12 01:24:29 +00:00
|
|
|
Optional parameters are the torch device type, precision, max_models,
|
|
|
|
and sequential_offload boolean. Note that the default device
|
|
|
|
type and precision are set up for a CUDA system running at half precision.
|
|
|
|
"""
|
|
|
|
pass
|
2023-07-04 21:05:35 +00:00
|
|
|
|
2023-05-11 04:09:19 +00:00
|
|
|
@abstractmethod
|
2023-05-14 00:06:26 +00:00
|
|
|
def get_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
|
|
|
submodel: Optional[SubModelType] = None,
|
2023-05-14 00:06:26 +00:00
|
|
|
node: Optional[BaseInvocation] = None,
|
|
|
|
context: Optional[InvocationContext] = None,
|
2023-06-12 02:52:30 +00:00
|
|
|
) -> ModelInfo:
|
2023-07-04 21:05:35 +00:00
|
|
|
"""Retrieve the indicated model with name and type.
|
|
|
|
submodel can be used to get a part (such as the vae)
|
2023-05-11 04:09:19 +00:00
|
|
|
of a diffusers pipeline."""
|
|
|
|
pass
|
|
|
|
|
|
|
|
@property
|
|
|
|
@abstractmethod
|
|
|
|
def logger(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
2023-05-13 18:44:44 +00:00
|
|
|
def model_exists(
|
2023-05-14 00:06:26 +00:00
|
|
|
self,
|
|
|
|
model_name: str,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
2023-05-13 18:44:44 +00:00
|
|
|
) -> bool:
|
2023-05-11 04:09:19 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
2023-06-11 03:12:21 +00:00
|
|
|
def model_info(self, model_name: str, base_model: BaseModelType, model_type: ModelType) -> dict:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Given a model name returns a dict-like (OmegaConf) object describing it.
|
2023-07-06 03:13:01 +00:00
|
|
|
Uses the exact format as the omegaconf stanza.
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
2023-06-11 03:12:21 +00:00
|
|
|
def list_models(self, base_model: Optional[BaseModelType] = None, model_type: Optional[ModelType] = None) -> dict:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Return a dict of models in the format:
|
2023-05-16 03:44:08 +00:00
|
|
|
{ model_type1:
|
|
|
|
{ model_name1: {'status': 'active'|'cached'|'not loaded',
|
|
|
|
'model_name' : name,
|
|
|
|
'model_type' : SDModelType,
|
|
|
|
'description': description,
|
|
|
|
'format': 'folder'|'safetensors'|'ckpt'
|
|
|
|
},
|
|
|
|
model_name2: { etc }
|
|
|
|
},
|
|
|
|
model_type2:
|
|
|
|
{ model_name_n: etc
|
|
|
|
}
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
pass
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-07-06 03:13:01 +00:00
|
|
|
@abstractmethod
|
|
|
|
def list_model(self, model_name: str, base_model: BaseModelType, model_type: ModelType) -> dict:
|
|
|
|
"""
|
|
|
|
Return information about the model using the same format as list_models()
|
|
|
|
"""
|
|
|
|
pass
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-07-06 03:13:01 +00:00
|
|
|
@abstractmethod
|
|
|
|
def model_names(self) -> List[Tuple[str, BaseModelType, ModelType]]:
|
|
|
|
"""
|
|
|
|
Returns a list of all the model names known.
|
|
|
|
"""
|
|
|
|
pass
|
2023-05-11 04:09:19 +00:00
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def add_model(
|
2023-05-14 00:06:26 +00:00
|
|
|
self,
|
|
|
|
model_name: str,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
2023-05-14 00:06:26 +00:00
|
|
|
model_attributes: dict,
|
|
|
|
clobber: bool = False,
|
2023-07-05 13:05:05 +00:00
|
|
|
) -> AddModelResult:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Update the named model with a dictionary of attributes. Will fail with an
|
|
|
|
assertion error if the name already exists. Pass clobber=True to overwrite.
|
2023-07-04 21:05:35 +00:00
|
|
|
On a successful update, the config will be changed in memory. Will fail
|
|
|
|
with an assertion error if provided attributes are incorrect or
|
2023-05-11 04:09:19 +00:00
|
|
|
the model name is missing. Call commit() to write changes to disk.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2023-07-05 18:50:57 +00:00
|
|
|
@abstractmethod
|
|
|
|
def update_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
|
|
|
model_attributes: dict,
|
|
|
|
) -> AddModelResult:
|
|
|
|
"""
|
|
|
|
Update the named model with a dictionary of attributes. Will fail with a
|
2023-07-16 18:17:05 +00:00
|
|
|
ModelNotFoundException if the name does not already exist.
|
2023-07-05 18:50:57 +00:00
|
|
|
|
|
|
|
On a successful update, the config will be changed in memory. Will fail
|
|
|
|
with an assertion error if provided attributes are incorrect or
|
|
|
|
the model name is missing. Call commit() to write changes to disk.
|
|
|
|
"""
|
|
|
|
pass
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-05-11 04:09:19 +00:00
|
|
|
@abstractmethod
|
2023-05-14 00:06:26 +00:00
|
|
|
def del_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
2023-05-14 00:06:26 +00:00
|
|
|
):
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
2023-07-04 21:05:35 +00:00
|
|
|
Delete the named model from configuration. If delete_files is true,
|
|
|
|
then the underlying weight file or diffusers directory will be deleted
|
2023-05-11 04:09:19 +00:00
|
|
|
as well. Call commit() to write to disk.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2023-07-15 03:03:18 +00:00
|
|
|
@abstractmethod
|
|
|
|
def rename_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
|
|
|
new_name: str,
|
|
|
|
):
|
|
|
|
"""
|
|
|
|
Rename the indicated model.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2023-07-14 17:45:16 +00:00
|
|
|
@abstractmethod
|
|
|
|
def list_checkpoint_configs(self) -> List[Path]:
|
|
|
|
"""
|
|
|
|
List the checkpoint config paths from ROOT/configs/stable-diffusion.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2023-07-05 13:05:05 +00:00
|
|
|
@abstractmethod
|
|
|
|
def convert_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
|
|
|
base_model: BaseModelType,
|
2023-08-01 07:55:13 +00:00
|
|
|
model_type: Literal[ModelType.Main, ModelType.Vae],
|
2023-07-05 13:05:05 +00:00
|
|
|
) -> AddModelResult:
|
|
|
|
"""
|
|
|
|
Convert a checkpoint file into a diffusers folder, deleting the cached
|
|
|
|
version and deleting the original checkpoint file if it is in the models
|
|
|
|
directory.
|
|
|
|
:param model_name: Name of the model to convert
|
|
|
|
:param base_model: Base model type
|
|
|
|
:param model_type: Type of model ['vae' or 'main']
|
|
|
|
|
|
|
|
This will raise a ValueError unless the model is not a checkpoint. It will
|
|
|
|
also raise a ValueError in the event that there is a similarly-named diffusers
|
|
|
|
directory already in place.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2023-07-03 23:32:54 +00:00
|
|
|
@abstractmethod
|
|
|
|
def heuristic_import(
|
|
|
|
self,
|
2023-07-05 10:07:10 +00:00
|
|
|
items_to_import: set[str],
|
|
|
|
prediction_type_helper: Optional[Callable[[Path], SchedulerPredictionType]] = None,
|
|
|
|
) -> dict[str, AddModelResult]:
|
2023-07-03 23:32:54 +00:00
|
|
|
"""Import a list of paths, repo_ids or URLs. Returns the set of
|
|
|
|
successfully imported items.
|
|
|
|
:param items_to_import: Set of strings corresponding to models to be imported.
|
|
|
|
:param prediction_type_helper: A callback that receives the Path of a Stable Diffusion 2 checkpoint model and returns a SchedulerPredictionType.
|
|
|
|
|
|
|
|
The prediction type helper is necessary to distinguish between
|
|
|
|
models based on Stable Diffusion 2 Base (requiring
|
|
|
|
SchedulerPredictionType.Epsilson) and Stable Diffusion 768
|
|
|
|
(requiring SchedulerPredictionType.VPrediction). It is
|
|
|
|
generally impossible to do this programmatically, so the
|
|
|
|
prediction_type_helper usually asks the user to choose.
|
|
|
|
|
|
|
|
The result is a set of successfully installed models. Each element
|
|
|
|
of the set is a dict corresponding to the newly-created OmegaConf stanza for
|
|
|
|
that model.
|
2023-07-27 14:54:01 +00:00
|
|
|
"""
|
2023-07-03 23:32:54 +00:00
|
|
|
pass
|
|
|
|
|
2023-05-11 04:09:19 +00:00
|
|
|
@abstractmethod
|
2023-07-06 17:15:15 +00:00
|
|
|
def merge_models(
|
|
|
|
self,
|
|
|
|
model_names: List[str] = Field(
|
|
|
|
default=None, min_items=2, max_items=3, description="List of model names to merge"
|
|
|
|
),
|
|
|
|
base_model: Union[BaseModelType, str] = Field(
|
|
|
|
default=None, description="Base model shared by all models to be merged"
|
|
|
|
),
|
|
|
|
merged_model_name: str = Field(default=None, description="Name of destination model after merging"),
|
|
|
|
alpha: Optional[float] = 0.5,
|
|
|
|
interp: Optional[MergeInterpolationMethod] = None,
|
|
|
|
force: Optional[bool] = False,
|
2023-07-14 17:45:16 +00:00
|
|
|
merge_dest_directory: Optional[Path] = None,
|
2023-07-06 17:15:15 +00:00
|
|
|
) -> AddModelResult:
|
|
|
|
"""
|
|
|
|
Merge two to three diffusrs pipeline models and save as a new model.
|
|
|
|
:param model_names: List of 2-3 models to merge
|
|
|
|
:param base_model: Base model to use for all models
|
|
|
|
:param merged_model_name: Name of destination merged model
|
|
|
|
:param alpha: Alpha strength to apply to 2d and 3d model
|
|
|
|
:param interp: Interpolation method. None (default)
|
2023-07-14 17:45:16 +00:00
|
|
|
:param merge_dest_directory: Save the merged model to the designated directory (with 'merged_model_name' appended)
|
2023-07-06 17:15:15 +00:00
|
|
|
"""
|
|
|
|
pass
|
2023-07-14 15:14:33 +00:00
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def search_for_models(self, directory: Path) -> List[Path]:
|
|
|
|
"""
|
|
|
|
Return list of all models found in the designated directory.
|
|
|
|
"""
|
|
|
|
pass
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-07-14 17:45:16 +00:00
|
|
|
@abstractmethod
|
|
|
|
def sync_to_config(self):
|
|
|
|
"""
|
|
|
|
Re-read models.yaml, rescan the models directory, and reimport models
|
|
|
|
in the autoimport directories. Call after making changes outside the
|
|
|
|
model manager API.
|
|
|
|
"""
|
|
|
|
pass
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-08-16 01:00:30 +00:00
|
|
|
@abstractmethod
|
|
|
|
def collect_cache_stats(self, cache_stats: CacheStats):
|
|
|
|
"""
|
|
|
|
Reset model cache statistics for graph with graph_id.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2023-05-11 04:09:19 +00:00
|
|
|
@abstractmethod
|
2023-07-05 10:07:10 +00:00
|
|
|
def commit(self, conf_file: Optional[Path] = None) -> None:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Write current configuration out to the indicated file.
|
|
|
|
If no conf_file is provided, then replaces the
|
|
|
|
original file/database used to initialize the object.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-05-11 04:09:19 +00:00
|
|
|
# simple implementation
|
|
|
|
class ModelManagerService(ModelManagerServiceBase):
|
|
|
|
"""Responsible for managing models on disk and in memory"""
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-05-11 04:09:19 +00:00
|
|
|
def __init__(
|
2023-05-14 00:06:26 +00:00
|
|
|
self,
|
2023-05-18 21:17:45 +00:00
|
|
|
config: InvokeAIAppConfig,
|
2023-08-01 07:55:13 +00:00
|
|
|
logger: Logger,
|
2023-05-14 00:06:26 +00:00
|
|
|
):
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
2023-07-04 21:05:35 +00:00
|
|
|
Initialize with the path to the models.yaml config file.
|
2023-05-11 04:09:19 +00:00
|
|
|
Optional parameters are the torch device type, precision, max_models,
|
|
|
|
and sequential_offload boolean. Note that the default device
|
|
|
|
type and precision are set up for a CUDA system running at half precision.
|
|
|
|
"""
|
2023-05-18 21:17:45 +00:00
|
|
|
if config.model_conf_path and config.model_conf_path.exists():
|
|
|
|
config_file = config.model_conf_path
|
2023-05-12 01:24:29 +00:00
|
|
|
else:
|
2023-05-18 21:17:45 +00:00
|
|
|
config_file = config.root_dir / "configs/models.yaml"
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-07-21 02:45:35 +00:00
|
|
|
logger.debug(f"Config file={config_file}")
|
2023-05-12 01:24:29 +00:00
|
|
|
|
|
|
|
device = torch.device(choose_torch_device())
|
2023-07-21 02:45:35 +00:00
|
|
|
device_name = torch.cuda.get_device_name() if device == torch.device("cuda") else ""
|
|
|
|
logger.info(f"GPU device = {device} {device_name}")
|
2023-07-02 00:47:11 +00:00
|
|
|
|
2023-05-23 00:48:22 +00:00
|
|
|
precision = config.precision
|
|
|
|
if precision == "auto":
|
2023-05-12 01:24:29 +00:00
|
|
|
precision = choose_precision(device)
|
2023-05-23 00:48:22 +00:00
|
|
|
dtype = torch.float32 if precision == "float32" else torch.float16
|
2023-05-12 01:24:29 +00:00
|
|
|
|
|
|
|
# this is transitional backward compatibility
|
|
|
|
# support for the deprecated `max_loaded_models`
|
|
|
|
# configuration value. If present, then the
|
|
|
|
# cache size is set to 2.5 GB times
|
|
|
|
# the number of max_loaded_models. Otherwise
|
2023-08-17 17:47:26 +00:00
|
|
|
# use new `ram_cache_size` config setting
|
|
|
|
max_cache_size = config.ram_cache_size
|
2023-05-12 01:24:29 +00:00
|
|
|
|
2023-07-04 21:05:35 +00:00
|
|
|
logger.debug(f"Maximum RAM cache size: {max_cache_size} GiB")
|
|
|
|
|
2023-05-12 01:24:29 +00:00
|
|
|
sequential_offload = config.sequential_guidance
|
|
|
|
|
2023-05-14 00:06:26 +00:00
|
|
|
self.mgr = ModelManager(
|
|
|
|
config=config_file,
|
|
|
|
device_type=device,
|
|
|
|
precision=dtype,
|
|
|
|
max_cache_size=max_cache_size,
|
|
|
|
sequential_offload=sequential_offload,
|
|
|
|
logger=logger,
|
|
|
|
)
|
2023-05-12 01:24:29 +00:00
|
|
|
logger.info("Model manager service initialized")
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-05-14 00:06:26 +00:00
|
|
|
def get_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
|
|
|
submodel: Optional[SubModelType] = None,
|
2023-05-14 00:06:26 +00:00
|
|
|
context: Optional[InvocationContext] = None,
|
2023-06-12 02:52:30 +00:00
|
|
|
) -> ModelInfo:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Retrieve the indicated model. submodel can be used to get a
|
|
|
|
part (such as the vae) of a diffusers mode.
|
|
|
|
"""
|
2023-05-12 04:14:56 +00:00
|
|
|
|
2023-07-15 16:12:01 +00:00
|
|
|
# we can emit model loading events if we are executing with access to the invocation context
|
|
|
|
if context:
|
2023-05-12 04:14:56 +00:00
|
|
|
self._emit_load_event(
|
|
|
|
context=context,
|
|
|
|
model_name=model_name,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model=base_model,
|
2023-05-12 04:14:56 +00:00
|
|
|
model_type=model_type,
|
2023-06-11 03:12:21 +00:00
|
|
|
submodel=submodel,
|
2023-05-12 04:14:56 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
model_info = self.mgr.get_model(
|
2023-05-11 04:09:19 +00:00
|
|
|
model_name,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model,
|
2023-05-11 04:09:19 +00:00
|
|
|
model_type,
|
|
|
|
submodel,
|
|
|
|
)
|
|
|
|
|
2023-07-15 16:12:01 +00:00
|
|
|
if context:
|
2023-05-12 04:14:56 +00:00
|
|
|
self._emit_load_event(
|
|
|
|
context=context,
|
|
|
|
model_name=model_name,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model=base_model,
|
2023-05-12 04:14:56 +00:00
|
|
|
model_type=model_type,
|
|
|
|
submodel=submodel,
|
|
|
|
model_info=model_info,
|
|
|
|
)
|
2023-07-04 21:05:35 +00:00
|
|
|
|
2023-05-12 04:14:56 +00:00
|
|
|
return model_info
|
|
|
|
|
2023-05-13 18:44:44 +00:00
|
|
|
def model_exists(
|
2023-05-14 00:06:26 +00:00
|
|
|
self,
|
|
|
|
model_name: str,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
2023-05-13 18:44:44 +00:00
|
|
|
) -> bool:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Given a model name, returns True if it is a valid
|
|
|
|
identifier.
|
|
|
|
"""
|
2023-05-13 18:44:44 +00:00
|
|
|
return self.mgr.model_exists(
|
2023-05-11 04:19:20 +00:00
|
|
|
model_name,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model,
|
2023-05-14 00:06:26 +00:00
|
|
|
model_type,
|
|
|
|
)
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-08-01 07:55:13 +00:00
|
|
|
def model_info(self, model_name: str, base_model: BaseModelType, model_type: ModelType) -> Union[dict, None]:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Given a model name returns a dict-like (OmegaConf) object describing it.
|
|
|
|
"""
|
2023-06-11 03:12:21 +00:00
|
|
|
return self.mgr.model_info(model_name, base_model, model_type)
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-06-11 03:12:21 +00:00
|
|
|
def model_names(self) -> List[Tuple[str, BaseModelType, ModelType]]:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Returns a list of all the model names known.
|
|
|
|
"""
|
|
|
|
return self.mgr.model_names()
|
|
|
|
|
2023-06-11 03:12:21 +00:00
|
|
|
def list_models(
|
|
|
|
self, base_model: Optional[BaseModelType] = None, model_type: Optional[ModelType] = None
|
2023-06-22 07:34:12 +00:00
|
|
|
) -> list[dict]:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
2023-06-22 07:34:12 +00:00
|
|
|
Return a list of models.
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
2023-06-11 03:12:21 +00:00
|
|
|
return self.mgr.list_models(base_model, model_type)
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-08-01 07:55:13 +00:00
|
|
|
def list_model(self, model_name: str, base_model: BaseModelType, model_type: ModelType) -> Union[dict, None]:
|
2023-07-06 03:13:01 +00:00
|
|
|
"""
|
|
|
|
Return information about the model using the same format as list_models()
|
|
|
|
"""
|
|
|
|
return self.mgr.list_model(model_name=model_name, base_model=base_model, model_type=model_type)
|
|
|
|
|
2023-05-11 04:09:19 +00:00
|
|
|
def add_model(
|
2023-05-14 00:06:26 +00:00
|
|
|
self,
|
|
|
|
model_name: str,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
2023-05-14 00:06:26 +00:00
|
|
|
model_attributes: dict,
|
|
|
|
clobber: bool = False,
|
2023-08-01 07:55:13 +00:00
|
|
|
) -> AddModelResult:
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Update the named model with a dictionary of attributes. Will fail with an
|
|
|
|
assertion error if the name already exists. Pass clobber=True to overwrite.
|
2023-07-04 21:05:35 +00:00
|
|
|
On a successful update, the config will be changed in memory. Will fail
|
|
|
|
with an assertion error if provided attributes are incorrect or
|
2023-05-11 04:09:19 +00:00
|
|
|
the model name is missing. Call commit() to write changes to disk.
|
|
|
|
"""
|
2023-07-04 21:26:57 +00:00
|
|
|
self.logger.debug(f"add/update model {model_name}")
|
2023-06-11 03:12:21 +00:00
|
|
|
return self.mgr.add_model(model_name, base_model, model_type, model_attributes, clobber)
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-07-05 18:50:57 +00:00
|
|
|
def update_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
|
|
|
model_attributes: dict,
|
|
|
|
) -> AddModelResult:
|
|
|
|
"""
|
|
|
|
Update the named model with a dictionary of attributes. Will fail with a
|
2023-07-16 18:17:05 +00:00
|
|
|
ModelNotFoundException exception if the name does not already exist.
|
2023-07-05 18:50:57 +00:00
|
|
|
On a successful update, the config will be changed in memory. Will fail
|
|
|
|
with an assertion error if provided attributes are incorrect or
|
|
|
|
the model name is missing. Call commit() to write changes to disk.
|
|
|
|
"""
|
|
|
|
self.logger.debug(f"update model {model_name}")
|
|
|
|
if not self.model_exists(model_name, base_model, model_type):
|
2023-07-16 18:17:05 +00:00
|
|
|
raise ModelNotFoundException(f"Unknown model {model_name}")
|
2023-07-05 18:50:57 +00:00
|
|
|
return self.add_model(model_name, base_model, model_type, model_attributes, clobber=True)
|
2023-07-27 14:54:01 +00:00
|
|
|
|
2023-05-14 00:06:26 +00:00
|
|
|
def del_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
2023-05-14 00:06:26 +00:00
|
|
|
):
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
2023-07-04 21:05:35 +00:00
|
|
|
Delete the named model from configuration. If delete_files is true,
|
|
|
|
then the underlying weight file or diffusers directory will be deleted
|
2023-07-14 17:45:16 +00:00
|
|
|
as well.
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
2023-07-04 21:26:57 +00:00
|
|
|
self.logger.debug(f"delete model {model_name}")
|
2023-06-14 00:12:12 +00:00
|
|
|
self.mgr.del_model(model_name, base_model, model_type)
|
2023-07-14 17:45:16 +00:00
|
|
|
self.mgr.commit()
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-07-05 13:05:05 +00:00
|
|
|
def convert_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
|
|
|
base_model: BaseModelType,
|
2023-08-01 07:55:13 +00:00
|
|
|
model_type: Literal[ModelType.Main, ModelType.Vae],
|
2023-07-14 17:45:16 +00:00
|
|
|
convert_dest_directory: Optional[Path] = Field(
|
|
|
|
default=None, description="Optional directory location for merged model"
|
|
|
|
),
|
2023-07-05 13:05:05 +00:00
|
|
|
) -> AddModelResult:
|
|
|
|
"""
|
|
|
|
Convert a checkpoint file into a diffusers folder, deleting the cached
|
|
|
|
version and deleting the original checkpoint file if it is in the models
|
|
|
|
directory.
|
|
|
|
:param model_name: Name of the model to convert
|
|
|
|
:param base_model: Base model type
|
|
|
|
:param model_type: Type of model ['vae' or 'main']
|
2023-07-14 17:45:16 +00:00
|
|
|
:param convert_dest_directory: Save the converted model to the designated directory (`models/etc/etc` by default)
|
2023-07-05 13:05:05 +00:00
|
|
|
|
|
|
|
This will raise a ValueError unless the model is not a checkpoint. It will
|
|
|
|
also raise a ValueError in the event that there is a similarly-named diffusers
|
|
|
|
directory already in place.
|
|
|
|
"""
|
|
|
|
self.logger.debug(f"convert model {model_name}")
|
2023-07-14 17:45:16 +00:00
|
|
|
return self.mgr.convert_model(model_name, base_model, model_type, convert_dest_directory)
|
2023-05-11 04:09:19 +00:00
|
|
|
|
2023-08-16 01:00:30 +00:00
|
|
|
def collect_cache_stats(self, cache_stats: CacheStats):
|
|
|
|
"""
|
|
|
|
Reset model cache statistics for graph with graph_id.
|
|
|
|
"""
|
|
|
|
self.mgr.cache.stats = cache_stats
|
|
|
|
|
2023-05-14 00:06:26 +00:00
|
|
|
def commit(self, conf_file: Optional[Path] = None):
|
2023-05-11 04:09:19 +00:00
|
|
|
"""
|
|
|
|
Write current configuration out to the indicated file.
|
|
|
|
If no conf_file is provided, then replaces the
|
|
|
|
original file/database used to initialize the object.
|
|
|
|
"""
|
|
|
|
return self.mgr.commit(conf_file)
|
|
|
|
|
2023-05-12 04:14:56 +00:00
|
|
|
def _emit_load_event(
|
2023-05-14 00:06:26 +00:00
|
|
|
self,
|
|
|
|
context,
|
|
|
|
model_name: str,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
2023-07-15 16:12:01 +00:00
|
|
|
submodel: Optional[SubModelType] = None,
|
2023-06-12 02:52:30 +00:00
|
|
|
model_info: Optional[ModelInfo] = None,
|
2023-05-12 04:14:56 +00:00
|
|
|
):
|
|
|
|
if context.services.queue.is_canceled(context.graph_execution_state_id):
|
2023-05-14 00:06:26 +00:00
|
|
|
raise CanceledException()
|
2023-07-15 16:12:01 +00:00
|
|
|
|
2023-05-26 03:28:15 +00:00
|
|
|
if model_info:
|
|
|
|
context.services.events.emit_model_load_completed(
|
2023-05-12 04:14:56 +00:00
|
|
|
graph_execution_state_id=context.graph_execution_state_id,
|
|
|
|
model_name=model_name,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model=base_model,
|
2023-05-12 04:14:56 +00:00
|
|
|
model_type=model_type,
|
|
|
|
submodel=submodel,
|
2023-05-26 03:28:15 +00:00
|
|
|
model_info=model_info,
|
2023-05-12 04:14:56 +00:00
|
|
|
)
|
|
|
|
else:
|
2023-05-26 03:28:15 +00:00
|
|
|
context.services.events.emit_model_load_started(
|
2023-05-12 04:14:56 +00:00
|
|
|
graph_execution_state_id=context.graph_execution_state_id,
|
|
|
|
model_name=model_name,
|
2023-06-11 03:12:21 +00:00
|
|
|
base_model=base_model,
|
2023-05-12 04:14:56 +00:00
|
|
|
model_type=model_type,
|
|
|
|
submodel=submodel,
|
|
|
|
)
|
|
|
|
|
2023-05-11 04:09:19 +00:00
|
|
|
@property
|
|
|
|
def logger(self):
|
|
|
|
return self.mgr.logger
|
2023-07-04 21:05:35 +00:00
|
|
|
|
2023-07-03 23:32:54 +00:00
|
|
|
def heuristic_import(
|
|
|
|
self,
|
2023-07-05 10:07:10 +00:00
|
|
|
items_to_import: set[str],
|
|
|
|
prediction_type_helper: Optional[Callable[[Path], SchedulerPredictionType]] = None,
|
|
|
|
) -> dict[str, AddModelResult]:
|
2023-07-03 23:32:54 +00:00
|
|
|
"""Import a list of paths, repo_ids or URLs. Returns the set of
|
|
|
|
successfully imported items.
|
|
|
|
:param items_to_import: Set of strings corresponding to models to be imported.
|
|
|
|
:param prediction_type_helper: A callback that receives the Path of a Stable Diffusion 2 checkpoint model and returns a SchedulerPredictionType.
|
|
|
|
|
|
|
|
The prediction type helper is necessary to distinguish between
|
|
|
|
models based on Stable Diffusion 2 Base (requiring
|
|
|
|
SchedulerPredictionType.Epsilson) and Stable Diffusion 768
|
|
|
|
(requiring SchedulerPredictionType.VPrediction). It is
|
|
|
|
generally impossible to do this programmatically, so the
|
|
|
|
prediction_type_helper usually asks the user to choose.
|
|
|
|
|
|
|
|
The result is a set of successfully installed models. Each element
|
|
|
|
of the set is a dict corresponding to the newly-created OmegaConf stanza for
|
|
|
|
that model.
|
2023-07-27 14:54:01 +00:00
|
|
|
"""
|
2023-07-03 23:32:54 +00:00
|
|
|
return self.mgr.heuristic_import(items_to_import, prediction_type_helper)
|
2023-07-06 17:15:15 +00:00
|
|
|
|
|
|
|
def merge_models(
|
|
|
|
self,
|
|
|
|
model_names: List[str] = Field(
|
|
|
|
default=None, min_items=2, max_items=3, description="List of model names to merge"
|
|
|
|
),
|
|
|
|
base_model: Union[BaseModelType, str] = Field(
|
|
|
|
default=None, description="Base model shared by all models to be merged"
|
|
|
|
),
|
|
|
|
merged_model_name: str = Field(default=None, description="Name of destination model after merging"),
|
2023-08-01 07:55:13 +00:00
|
|
|
alpha: float = 0.5,
|
2023-07-06 17:15:15 +00:00
|
|
|
interp: Optional[MergeInterpolationMethod] = None,
|
2023-08-01 07:55:13 +00:00
|
|
|
force: bool = False,
|
2023-07-14 17:45:16 +00:00
|
|
|
merge_dest_directory: Optional[Path] = Field(
|
|
|
|
default=None, description="Optional directory location for merged model"
|
|
|
|
),
|
2023-07-06 17:15:15 +00:00
|
|
|
) -> AddModelResult:
|
|
|
|
"""
|
|
|
|
Merge two to three diffusrs pipeline models and save as a new model.
|
|
|
|
:param model_names: List of 2-3 models to merge
|
|
|
|
:param base_model: Base model to use for all models
|
|
|
|
:param merged_model_name: Name of destination merged model
|
|
|
|
:param alpha: Alpha strength to apply to 2d and 3d model
|
|
|
|
:param interp: Interpolation method. None (default)
|
2023-07-14 17:45:16 +00:00
|
|
|
:param merge_dest_directory: Save the merged model to the designated directory (with 'merged_model_name' appended)
|
2023-07-06 17:15:15 +00:00
|
|
|
"""
|
|
|
|
merger = ModelMerger(self.mgr)
|
2023-07-06 19:12:34 +00:00
|
|
|
try:
|
|
|
|
result = merger.merge_diffusion_models_and_save(
|
|
|
|
model_names=model_names,
|
|
|
|
base_model=base_model,
|
|
|
|
merged_model_name=merged_model_name,
|
|
|
|
alpha=alpha,
|
|
|
|
interp=interp,
|
|
|
|
force=force,
|
2023-07-14 17:45:16 +00:00
|
|
|
merge_dest_directory=merge_dest_directory,
|
2023-07-06 19:12:34 +00:00
|
|
|
)
|
|
|
|
except AssertionError as e:
|
|
|
|
raise ValueError(e)
|
|
|
|
return result
|
2023-07-14 15:14:33 +00:00
|
|
|
|
|
|
|
def search_for_models(self, directory: Path) -> List[Path]:
|
|
|
|
"""
|
|
|
|
Return list of all models found in the designated directory.
|
|
|
|
"""
|
2023-07-23 12:49:28 +00:00
|
|
|
search = FindModels([directory], self.logger)
|
2023-07-14 15:14:33 +00:00
|
|
|
return search.list_models()
|
2023-07-14 17:45:16 +00:00
|
|
|
|
|
|
|
def sync_to_config(self):
|
|
|
|
"""
|
|
|
|
Re-read models.yaml, rescan the models directory, and reimport models
|
|
|
|
in the autoimport directories. Call after making changes outside the
|
|
|
|
model manager API.
|
|
|
|
"""
|
|
|
|
return self.mgr.sync_to_config()
|
|
|
|
|
|
|
|
def list_checkpoint_configs(self) -> List[Path]:
|
|
|
|
"""
|
|
|
|
List the checkpoint config paths from ROOT/configs/stable-diffusion.
|
|
|
|
"""
|
|
|
|
config = self.mgr.app_config
|
|
|
|
conf_path = config.legacy_conf_path
|
|
|
|
root_path = config.root_path
|
|
|
|
return [(conf_path / x).relative_to(root_path) for x in conf_path.glob("**/*.yaml")]
|
2023-07-15 03:03:18 +00:00
|
|
|
|
|
|
|
def rename_model(
|
|
|
|
self,
|
|
|
|
model_name: str,
|
|
|
|
base_model: BaseModelType,
|
|
|
|
model_type: ModelType,
|
2023-08-01 07:55:13 +00:00
|
|
|
new_name: Optional[str] = None,
|
|
|
|
new_base: Optional[BaseModelType] = None,
|
2023-07-15 03:03:18 +00:00
|
|
|
):
|
|
|
|
"""
|
|
|
|
Rename the indicated model. Can provide a new name and/or a new base.
|
|
|
|
:param model_name: Current name of the model
|
|
|
|
:param base_model: Current base of the model
|
|
|
|
:param model_type: Model type (can't be changed)
|
|
|
|
:param new_name: New name for the model
|
|
|
|
:param new_base: New base for the model
|
|
|
|
"""
|
|
|
|
self.mgr.rename_model(
|
|
|
|
base_model=base_model,
|
|
|
|
model_type=model_type,
|
|
|
|
model_name=model_name,
|
|
|
|
new_name=new_name,
|
|
|
|
new_base=new_base,
|
|
|
|
)
|