2024-01-14 19:54:53 +00:00
|
|
|
# Fixtures to support testing of the model_manager v2 installer, metadata and record store
|
|
|
|
|
|
|
|
import os
|
|
|
|
import shutil
|
2024-02-29 20:08:10 +00:00
|
|
|
import time
|
2024-01-14 19:54:53 +00:00
|
|
|
from pathlib import Path
|
|
|
|
from typing import Any, Dict, List
|
|
|
|
|
|
|
|
import pytest
|
|
|
|
from pydantic import BaseModel
|
2024-02-18 06:27:42 +00:00
|
|
|
from pytest import FixtureRequest
|
2024-01-14 19:54:53 +00:00
|
|
|
from requests.sessions import Session
|
|
|
|
from requests_testadapter import TestAdapter, TestSession
|
|
|
|
|
|
|
|
from invokeai.app.services.config import InvokeAIAppConfig
|
2024-02-18 06:27:42 +00:00
|
|
|
from invokeai.app.services.download import DownloadQueueService, DownloadQueueServiceBase
|
2024-01-14 19:54:53 +00:00
|
|
|
from invokeai.app.services.events.events_base import EventServiceBase
|
|
|
|
from invokeai.app.services.model_install import ModelInstallService, ModelInstallServiceBase
|
2024-02-18 06:27:42 +00:00
|
|
|
from invokeai.app.services.model_load import ModelLoadService, ModelLoadServiceBase
|
|
|
|
from invokeai.app.services.model_manager import ModelManagerService, ModelManagerServiceBase
|
2024-02-17 16:45:32 +00:00
|
|
|
from invokeai.app.services.model_records import ModelRecordServiceBase, ModelRecordServiceSQL
|
2024-01-14 19:54:53 +00:00
|
|
|
from invokeai.backend.model_manager.config import (
|
|
|
|
BaseModelType,
|
2024-03-04 10:38:21 +00:00
|
|
|
LoRADiffusersConfig,
|
|
|
|
MainCheckpointConfig,
|
|
|
|
MainDiffusersConfig,
|
2024-01-14 19:54:53 +00:00
|
|
|
ModelFormat,
|
2024-03-04 10:38:21 +00:00
|
|
|
ModelSourceType,
|
2024-01-14 19:54:53 +00:00
|
|
|
ModelType,
|
2024-03-04 10:38:21 +00:00
|
|
|
ModelVariantType,
|
2024-03-05 06:37:17 +00:00
|
|
|
VAEDiffusersConfig,
|
2024-01-14 19:54:53 +00:00
|
|
|
)
|
2024-02-17 16:45:32 +00:00
|
|
|
from invokeai.backend.model_manager.load import ModelCache, ModelConvertCache
|
2024-01-14 19:54:53 +00:00
|
|
|
from invokeai.backend.util.logging import InvokeAILogger
|
2024-02-17 16:45:32 +00:00
|
|
|
from tests.backend.model_manager.model_metadata.metadata_examples import (
|
2024-03-15 18:26:05 +00:00
|
|
|
HFTestLoraMetadata,
|
2024-01-14 19:54:53 +00:00
|
|
|
RepoCivitaiModelMetadata1,
|
|
|
|
RepoCivitaiVersionMetadata1,
|
|
|
|
RepoHFMetadata1,
|
|
|
|
RepoHFMetadata1_nofp16,
|
|
|
|
RepoHFModelJson1,
|
|
|
|
)
|
|
|
|
from tests.fixtures.sqlite_database import create_mock_sqlite_database
|
|
|
|
|
|
|
|
|
|
|
|
class DummyEvent(BaseModel):
|
|
|
|
"""Dummy Event to use with Dummy Event service."""
|
|
|
|
|
|
|
|
event_name: str
|
|
|
|
payload: Dict[str, Any]
|
|
|
|
|
|
|
|
|
|
|
|
class DummyEventService(EventServiceBase):
|
|
|
|
"""Dummy event service for testing."""
|
|
|
|
|
|
|
|
events: List[DummyEvent]
|
|
|
|
|
|
|
|
def __init__(self) -> None:
|
|
|
|
super().__init__()
|
|
|
|
self.events = []
|
|
|
|
|
|
|
|
def dispatch(self, event_name: str, payload: Any) -> None:
|
|
|
|
"""Dispatch an event by appending it to self.events."""
|
|
|
|
self.events.append(DummyEvent(event_name=payload["event"], payload=payload["data"]))
|
|
|
|
|
|
|
|
|
|
|
|
# Create a temporary directory using the contents of `./data/invokeai_root` as the template
|
|
|
|
@pytest.fixture
|
|
|
|
def mm2_root_dir(tmp_path_factory) -> Path:
|
|
|
|
root_template = Path(__file__).resolve().parent / "data" / "invokeai_root"
|
|
|
|
temp_dir: Path = tmp_path_factory.mktemp("data") / "invokeai_root"
|
|
|
|
shutil.copytree(root_template, temp_dir)
|
|
|
|
return temp_dir
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def mm2_model_files(tmp_path_factory) -> Path:
|
|
|
|
root_template = Path(__file__).resolve().parent / "data" / "test_files"
|
|
|
|
temp_dir: Path = tmp_path_factory.mktemp("data") / "test_files"
|
|
|
|
shutil.copytree(root_template, temp_dir)
|
|
|
|
return temp_dir
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def embedding_file(mm2_model_files: Path) -> Path:
|
|
|
|
return mm2_model_files / "test_embedding.safetensors"
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def diffusers_dir(mm2_model_files: Path) -> Path:
|
|
|
|
return mm2_model_files / "test-diffusers-main"
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def mm2_app_config(mm2_root_dir: Path) -> InvokeAIAppConfig:
|
2024-03-11 13:30:09 +00:00
|
|
|
app_config = InvokeAIAppConfig(models_dir=mm2_root_dir / "models", log_level="info")
|
|
|
|
app_config.set_root(mm2_root_dir)
|
2024-01-14 19:54:53 +00:00
|
|
|
return app_config
|
|
|
|
|
|
|
|
|
2024-02-10 23:09:45 +00:00
|
|
|
@pytest.fixture
|
2024-02-18 06:27:42 +00:00
|
|
|
def mm2_download_queue(mm2_session: Session, request: FixtureRequest) -> DownloadQueueServiceBase:
|
2024-02-17 16:45:32 +00:00
|
|
|
download_queue = DownloadQueueService(requests_session=mm2_session)
|
|
|
|
download_queue.start()
|
|
|
|
|
|
|
|
def stop_queue() -> None:
|
|
|
|
download_queue.stop()
|
|
|
|
|
|
|
|
request.addfinalizer(stop_queue)
|
|
|
|
return download_queue
|
|
|
|
|
2024-02-18 06:27:42 +00:00
|
|
|
|
2024-02-17 16:45:32 +00:00
|
|
|
@pytest.fixture
|
|
|
|
def mm2_loader(mm2_app_config: InvokeAIAppConfig, mm2_record_store: ModelRecordServiceBase) -> ModelLoadServiceBase:
|
2024-02-10 23:09:45 +00:00
|
|
|
ram_cache = ModelCache(
|
2024-02-17 16:45:32 +00:00
|
|
|
logger=InvokeAILogger.get_logger(),
|
2024-03-11 13:30:09 +00:00
|
|
|
max_cache_size=mm2_app_config.ram,
|
|
|
|
max_vram_cache_size=mm2_app_config.vram,
|
2024-02-10 23:09:45 +00:00
|
|
|
)
|
2024-03-11 13:30:09 +00:00
|
|
|
convert_cache = ModelConvertCache(mm2_app_config.convert_cache_path)
|
2024-02-18 06:27:42 +00:00
|
|
|
return ModelLoadService(
|
|
|
|
app_config=mm2_app_config,
|
|
|
|
ram_cache=ram_cache,
|
|
|
|
convert_cache=convert_cache,
|
|
|
|
)
|
|
|
|
|
2024-02-17 16:45:32 +00:00
|
|
|
|
|
|
|
@pytest.fixture
|
2024-02-18 06:27:42 +00:00
|
|
|
def mm2_installer(
|
|
|
|
mm2_app_config: InvokeAIAppConfig,
|
|
|
|
mm2_download_queue: DownloadQueueServiceBase,
|
|
|
|
mm2_session: Session,
|
|
|
|
request: FixtureRequest,
|
|
|
|
) -> ModelInstallServiceBase:
|
2024-02-17 16:45:32 +00:00
|
|
|
logger = InvokeAILogger.get_logger()
|
|
|
|
db = create_mock_sqlite_database(mm2_app_config, logger)
|
|
|
|
events = DummyEventService()
|
2024-03-04 10:38:21 +00:00
|
|
|
store = ModelRecordServiceSQL(db)
|
2024-02-17 16:45:32 +00:00
|
|
|
|
|
|
|
installer = ModelInstallService(
|
|
|
|
app_config=mm2_app_config,
|
|
|
|
record_store=store,
|
|
|
|
download_queue=mm2_download_queue,
|
|
|
|
event_bus=events,
|
|
|
|
session=mm2_session,
|
|
|
|
)
|
|
|
|
installer.start()
|
|
|
|
|
|
|
|
def stop_installer() -> None:
|
|
|
|
installer.stop()
|
2024-02-29 20:08:10 +00:00
|
|
|
time.sleep(0.1) # avoid error message from the logger when it is closed before thread prints final message
|
2024-02-17 16:45:32 +00:00
|
|
|
|
|
|
|
request.addfinalizer(stop_installer)
|
|
|
|
return installer
|
2024-02-10 23:09:45 +00:00
|
|
|
|
|
|
|
|
2024-01-14 19:54:53 +00:00
|
|
|
@pytest.fixture
|
2024-02-17 16:45:32 +00:00
|
|
|
def mm2_record_store(mm2_app_config: InvokeAIAppConfig) -> ModelRecordServiceBase:
|
2024-01-14 19:54:53 +00:00
|
|
|
logger = InvokeAILogger.get_logger(config=mm2_app_config)
|
|
|
|
db = create_mock_sqlite_database(mm2_app_config, logger)
|
2024-03-04 10:38:21 +00:00
|
|
|
store = ModelRecordServiceSQL(db)
|
2024-01-14 19:54:53 +00:00
|
|
|
# add five simple config records to the database
|
2024-03-05 06:37:17 +00:00
|
|
|
config1 = VAEDiffusersConfig(
|
2024-03-04 10:38:21 +00:00
|
|
|
key="test_config_1",
|
|
|
|
path="/tmp/foo1",
|
|
|
|
format=ModelFormat.Diffusers,
|
|
|
|
name="test2",
|
|
|
|
base=BaseModelType.StableDiffusion2,
|
2024-03-05 06:37:17 +00:00
|
|
|
type=ModelType.VAE,
|
2024-03-04 10:38:21 +00:00
|
|
|
hash="111222333444",
|
|
|
|
source="stabilityai/sdxl-vae",
|
|
|
|
source_type=ModelSourceType.HFRepoID,
|
|
|
|
)
|
|
|
|
config2 = MainCheckpointConfig(
|
|
|
|
key="test_config_2",
|
|
|
|
path="/tmp/foo2.ckpt",
|
|
|
|
name="model1",
|
|
|
|
format=ModelFormat.Checkpoint,
|
|
|
|
base=BaseModelType.StableDiffusion1,
|
|
|
|
type=ModelType.Main,
|
|
|
|
config_path="/tmp/foo.yaml",
|
|
|
|
variant=ModelVariantType.Normal,
|
|
|
|
hash="111222333444",
|
|
|
|
source="https://civitai.com/models/206883/split",
|
2024-03-07 16:24:30 +00:00
|
|
|
source_type=ModelSourceType.Url,
|
2024-03-04 10:38:21 +00:00
|
|
|
)
|
|
|
|
config3 = MainDiffusersConfig(
|
|
|
|
key="test_config_3",
|
|
|
|
path="/tmp/foo3",
|
|
|
|
format=ModelFormat.Diffusers,
|
|
|
|
name="test3",
|
|
|
|
base=BaseModelType.StableDiffusionXL,
|
|
|
|
type=ModelType.Main,
|
|
|
|
hash="111222333444",
|
|
|
|
source="author3/model3",
|
|
|
|
description="This is test 3",
|
|
|
|
source_type=ModelSourceType.HFRepoID,
|
|
|
|
)
|
|
|
|
config4 = LoRADiffusersConfig(
|
|
|
|
key="test_config_4",
|
|
|
|
path="/tmp/foo4",
|
|
|
|
format=ModelFormat.Diffusers,
|
|
|
|
name="test4",
|
|
|
|
base=BaseModelType.StableDiffusionXL,
|
2024-03-05 06:37:17 +00:00
|
|
|
type=ModelType.LoRA,
|
2024-03-04 10:38:21 +00:00
|
|
|
hash="111222333444",
|
|
|
|
source="author4/model4",
|
|
|
|
source_type=ModelSourceType.HFRepoID,
|
|
|
|
)
|
|
|
|
config5 = LoRADiffusersConfig(
|
|
|
|
key="test_config_5",
|
|
|
|
path="/tmp/foo5",
|
|
|
|
format=ModelFormat.Diffusers,
|
|
|
|
name="test5",
|
|
|
|
base=BaseModelType.StableDiffusion1,
|
2024-03-05 06:37:17 +00:00
|
|
|
type=ModelType.LoRA,
|
2024-03-04 10:38:21 +00:00
|
|
|
hash="111222333444",
|
|
|
|
source="author4/model5",
|
|
|
|
source_type=ModelSourceType.HFRepoID,
|
|
|
|
)
|
|
|
|
store.add_model(config1)
|
|
|
|
store.add_model(config2)
|
|
|
|
store.add_model(config3)
|
|
|
|
store.add_model(config4)
|
|
|
|
store.add_model(config5)
|
2024-01-14 19:54:53 +00:00
|
|
|
return store
|
|
|
|
|
2024-02-18 06:27:42 +00:00
|
|
|
|
2024-01-14 19:54:53 +00:00
|
|
|
@pytest.fixture
|
2024-02-18 06:27:42 +00:00
|
|
|
def mm2_model_manager(
|
|
|
|
mm2_record_store: ModelRecordServiceBase, mm2_installer: ModelInstallServiceBase, mm2_loader: ModelLoadServiceBase
|
|
|
|
) -> ModelManagerServiceBase:
|
|
|
|
return ModelManagerService(store=mm2_record_store, install=mm2_installer, load=mm2_loader)
|
|
|
|
|
2024-01-14 19:54:53 +00:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def mm2_session(embedding_file: Path, diffusers_dir: Path) -> Session:
|
|
|
|
"""This fixtures defines a series of mock URLs for testing download and installation."""
|
2024-02-16 03:41:29 +00:00
|
|
|
sess: Session = TestSession()
|
2024-01-14 19:54:53 +00:00
|
|
|
sess.mount(
|
|
|
|
"https://test.com/missing_model.safetensors",
|
|
|
|
TestAdapter(
|
|
|
|
b"missing",
|
|
|
|
status=404,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
sess.mount(
|
|
|
|
"https://huggingface.co/api/models/stabilityai/sdxl-turbo",
|
|
|
|
TestAdapter(
|
|
|
|
RepoHFMetadata1,
|
|
|
|
headers={"Content-Type": "application/json; charset=utf-8", "Content-Length": len(RepoHFMetadata1)},
|
|
|
|
),
|
|
|
|
)
|
|
|
|
sess.mount(
|
|
|
|
"https://huggingface.co/api/models/stabilityai/sdxl-turbo-nofp16",
|
|
|
|
TestAdapter(
|
|
|
|
RepoHFMetadata1_nofp16,
|
|
|
|
headers={"Content-Type": "application/json; charset=utf-8", "Content-Length": len(RepoHFMetadata1_nofp16)},
|
|
|
|
),
|
|
|
|
)
|
|
|
|
sess.mount(
|
|
|
|
"https://civitai.com/api/v1/model-versions/242807",
|
|
|
|
TestAdapter(
|
|
|
|
RepoCivitaiVersionMetadata1,
|
|
|
|
headers={
|
|
|
|
"Content-Length": len(RepoCivitaiVersionMetadata1),
|
|
|
|
},
|
|
|
|
),
|
|
|
|
)
|
|
|
|
sess.mount(
|
|
|
|
"https://civitai.com/api/v1/models/215485",
|
|
|
|
TestAdapter(
|
|
|
|
RepoCivitaiModelMetadata1,
|
|
|
|
headers={
|
|
|
|
"Content-Length": len(RepoCivitaiModelMetadata1),
|
|
|
|
},
|
|
|
|
),
|
|
|
|
)
|
|
|
|
sess.mount(
|
|
|
|
"https://huggingface.co/stabilityai/sdxl-turbo/resolve/main/model_index.json",
|
|
|
|
TestAdapter(
|
|
|
|
RepoHFModelJson1,
|
|
|
|
headers={
|
|
|
|
"Content-Length": len(RepoHFModelJson1),
|
|
|
|
},
|
|
|
|
),
|
|
|
|
)
|
|
|
|
with open(embedding_file, "rb") as f:
|
|
|
|
data = f.read() # file is small - just 15K
|
|
|
|
sess.mount(
|
|
|
|
"https://www.test.foo/download/test_embedding.safetensors",
|
|
|
|
TestAdapter(data, headers={"Content-Type": "application/octet-stream", "Content-Length": len(data)}),
|
|
|
|
)
|
|
|
|
sess.mount(
|
|
|
|
"https://huggingface.co/api/models/stabilityai/sdxl-turbo",
|
|
|
|
TestAdapter(
|
|
|
|
RepoHFMetadata1,
|
|
|
|
headers={"Content-Type": "application/json; charset=utf-8", "Content-Length": len(RepoHFMetadata1)},
|
|
|
|
),
|
|
|
|
)
|
2024-03-15 18:26:05 +00:00
|
|
|
|
2024-03-15 18:23:30 +00:00
|
|
|
with open(embedding_file, "rb") as f:
|
|
|
|
data = f.read() # file is small - just 15K
|
|
|
|
sess.mount(
|
|
|
|
"https://huggingface.co/api/models/InvokeAI-test/textual_inversion_tests?blobs=True",
|
|
|
|
TestAdapter(
|
|
|
|
HFTestLoraMetadata,
|
|
|
|
headers={"Content-Type": "application/json; charset=utf-8", "Content-Length": len(HFTestLoraMetadata)},
|
|
|
|
),
|
|
|
|
)
|
|
|
|
sess.mount(
|
|
|
|
"https://huggingface.co/InvokeAI-test/textual_inversion_tests/resolve/main/learned_embeds-steps-1000.safetensors",
|
|
|
|
TestAdapter(
|
|
|
|
data,
|
|
|
|
headers={"Content-Type": "application/json; charset=utf-8", "Content-Length": len(data)},
|
|
|
|
),
|
|
|
|
)
|
2024-01-14 19:54:53 +00:00
|
|
|
for root, _, files in os.walk(diffusers_dir):
|
|
|
|
for name in files:
|
|
|
|
path = Path(root, name)
|
|
|
|
url_base = path.relative_to(diffusers_dir).as_posix()
|
|
|
|
url = f"https://huggingface.co/stabilityai/sdxl-turbo/resolve/main/{url_base}"
|
|
|
|
with open(path, "rb") as f:
|
|
|
|
data = f.read()
|
|
|
|
sess.mount(
|
|
|
|
url,
|
|
|
|
TestAdapter(
|
|
|
|
data,
|
|
|
|
headers={
|
|
|
|
"Content-Type": "application/json; charset=utf-8",
|
|
|
|
"Content-Length": len(data),
|
|
|
|
},
|
|
|
|
),
|
|
|
|
)
|
|
|
|
return sess
|