change names of convert and download caches and add migration script

This commit is contained in:
Lincoln Stein 2024-04-14 15:54:46 -04:00
parent 41b909cbe3
commit 3ddd7ced49
6 changed files with 111 additions and 57 deletions

View File

@ -86,6 +86,7 @@ class InvokeAIAppConfig(BaseSettings):
patchmatch: Enable patchmatch inpaint code. patchmatch: Enable patchmatch inpaint code.
models_dir: Path to the models directory. models_dir: Path to the models directory.
convert_cache_dir: Path to the converted models cache directory. When loading a non-diffusers model, it will be converted and store on disk at this location. convert_cache_dir: Path to the converted models cache directory. When loading a non-diffusers model, it will be converted and store on disk at this location.
download_cache_dir: Path to the directory that contains dynamically downloaded models.
legacy_conf_dir: Path to directory of legacy checkpoint config files. legacy_conf_dir: Path to directory of legacy checkpoint config files.
db_dir: Path to InvokeAI databases directory. db_dir: Path to InvokeAI databases directory.
outputs_dir: Path to directory for outputs. outputs_dir: Path to directory for outputs.
@ -146,7 +147,8 @@ class InvokeAIAppConfig(BaseSettings):
# PATHS # PATHS
models_dir: Path = Field(default=Path("models"), description="Path to the models directory.") models_dir: Path = Field(default=Path("models"), description="Path to the models directory.")
convert_cache_dir: Path = Field(default=Path("models/.cache"), description="Path to the converted models cache directory. When loading a non-diffusers model, it will be converted and store on disk at this location.") convert_cache_dir: Path = Field(default=Path("models/.convert_cache"), description="Path to the converted models cache directory. When loading a non-diffusers model, it will be converted and store on disk at this location.")
download_cache_dir: Path = Field(default=Path("models/.download_cache"), description="Path to the directory that contains dynamically downloaded models.")
legacy_conf_dir: Path = Field(default=Path("configs"), description="Path to directory of legacy checkpoint config files.") legacy_conf_dir: Path = Field(default=Path("configs"), description="Path to directory of legacy checkpoint config files.")
db_dir: Path = Field(default=Path("databases"), description="Path to InvokeAI databases directory.") db_dir: Path = Field(default=Path("databases"), description="Path to InvokeAI databases directory.")
outputs_dir: Path = Field(default=Path("outputs"), description="Path to directory for outputs.") outputs_dir: Path = Field(default=Path("outputs"), description="Path to directory for outputs.")
@ -303,6 +305,11 @@ class InvokeAIAppConfig(BaseSettings):
"""Path to the converted cache models directory, resolved to an absolute path..""" """Path to the converted cache models directory, resolved to an absolute path.."""
return self._resolve(self.convert_cache_dir) return self._resolve(self.convert_cache_dir)
@property
def download_cache_path(self) -> Path:
"""Path to the downloaded models directory, resolved to an absolute path.."""
return self._resolve(self.download_cache_dir)
@property @property
def custom_nodes_path(self) -> Path: def custom_nodes_path(self) -> Path:
"""Path to the custom nodes directory, resolved to an absolute path..""" """Path to the custom nodes directory, resolved to an absolute path.."""

View File

@ -393,6 +393,11 @@ class ModelInstallService(ModelInstallServiceBase):
rmtree(model_path) rmtree(model_path)
self.unregister(key) self.unregister(key)
@classmethod
def _download_cache_path(cls, source: Union[str, AnyHttpUrl], app_config: InvokeAIAppConfig) -> Path:
model_hash = sha256(str(source).encode("utf-8")).hexdigest()[0:32]
return app_config.download_cache_path / model_hash
def download_and_cache( def download_and_cache(
self, self,
source: Union[str, AnyHttpUrl], source: Union[str, AnyHttpUrl],
@ -400,8 +405,7 @@ class ModelInstallService(ModelInstallServiceBase):
timeout: int = 0, timeout: int = 0,
) -> Path: ) -> Path:
"""Download the model file located at source to the models cache and return its Path.""" """Download the model file located at source to the models cache and return its Path."""
model_hash = sha256(str(source).encode("utf-8")).hexdigest()[0:32] model_path = self._download_cache_path(source, self._app_config)
model_path = self._app_config.convert_cache_path / model_hash
# We expect the cache directory to contain one and only one downloaded file. # We expect the cache directory to contain one and only one downloaded file.
# We don't know the file's name in advance, as it is set by the download # We don't know the file's name in advance, as it is set by the download
@ -532,8 +536,13 @@ class ModelInstallService(ModelInstallServiceBase):
if resolved_path in installed_model_paths: if resolved_path in installed_model_paths:
return True return True
# Skip core models entirely - these aren't registered with the model manager. # Skip core models entirely - these aren't registered with the model manager.
if str(resolved_path).startswith(str(self.app_config.models_path / "core")): for special_directory in [
return False self.app_config.models_path / "core",
self.app_config.convert_cache_dir,
self.app_config.download_cache_dir,
]:
if resolved_path.is_relative_to(special_directory):
return False
try: try:
model_id = self.register_path(model_path) model_id = self.register_path(model_path)
self._logger.info(f"Registered {model_path.name} with id {model_id}") self._logger.info(f"Registered {model_path.name} with id {model_id}")

View File

@ -12,6 +12,7 @@ from invokeai.app.services.shared.sqlite_migrator.migrations.migration_6 import
from invokeai.app.services.shared.sqlite_migrator.migrations.migration_7 import build_migration_7 from invokeai.app.services.shared.sqlite_migrator.migrations.migration_7 import build_migration_7
from invokeai.app.services.shared.sqlite_migrator.migrations.migration_8 import build_migration_8 from invokeai.app.services.shared.sqlite_migrator.migrations.migration_8 import build_migration_8
from invokeai.app.services.shared.sqlite_migrator.migrations.migration_9 import build_migration_9 from invokeai.app.services.shared.sqlite_migrator.migrations.migration_9 import build_migration_9
from invokeai.app.services.shared.sqlite_migrator.migrations.migration_10 import build_migration_10
from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_impl import SqliteMigrator from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_impl import SqliteMigrator
@ -41,6 +42,7 @@ def init_db(config: InvokeAIAppConfig, logger: Logger, image_files: ImageFileSto
migrator.register_migration(build_migration_7()) migrator.register_migration(build_migration_7())
migrator.register_migration(build_migration_8(app_config=config)) migrator.register_migration(build_migration_8(app_config=config))
migrator.register_migration(build_migration_9()) migrator.register_migration(build_migration_9())
migrator.register_migration(build_migration_10(app_config=config, logger=logger))
migrator.run_migrations() migrator.run_migrations()
return db return db

View File

@ -0,0 +1,87 @@
import pathlib
import shutil
import sqlite3
from logging import Logger
from invokeai.app.services.config import InvokeAIAppConfig
from invokeai.app.services.model_install.model_install_default import ModelInstallService
from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_common import Migration
LEGACY_CORE_MODELS = {
# OpenPose
"https://huggingface.co/yzd-v/DWPose/resolve/main/yolox_l.onnx?download=true": "any/annotators/dwpose/yolox_l.onnx",
"https://huggingface.co/yzd-v/DWPose/resolve/main/dw-ll_ucoco_384.onnx?download=true": "any/annotators/dwpose/dw-ll_ucoco_384.onnx",
# DepthAnything
"https://huggingface.co/spaces/LiheYoung/Depth-Anything/resolve/main/checkpoints/depth_anything_vitl14.pth?download=true": "any/annotators/depth_anything/depth_anything_vitl14.pth",
"https://huggingface.co/spaces/LiheYoung/Depth-Anything/resolve/main/checkpoints/depth_anything_vitb14.pth?download=true": "any/annotators/depth_anything/depth_anything_vitb14.pth",
"https://huggingface.co/spaces/LiheYoung/Depth-Anything/resolve/main/checkpoints/depth_anything_vits14.pth?download=true": "any/annotators/depth_anything/depth_anything_vits14.pth",
# Lama inpaint
"https://github.com/Sanster/models/releases/download/add_big_lama/big-lama.pt": "core/misc/lama/lama.pt",
# RealESRGAN upscale
"https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth": "core/upscaling/realesrgan/RealESRGAN_x4plus.pth",
"https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.2.4/RealESRGAN_x4plus_anime_6B.pth": "core/upscaling/realesrgan/RealESRGAN_x4plus_anime_6B.pth",
"https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.1/ESRGAN_SRx4_DF2KOST_official-ff704c30.pth": "core/upscaling/realesrgan/ESRGAN_SRx4_DF2KOST_official-ff704c30.pth",
"https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.1/RealESRGAN_x2plus.pth": "core/upscaling/realesrgan/RealESRGAN_x2plus.pth",
}
class Migration10Callback:
def __init__(self, app_config: InvokeAIAppConfig, logger: Logger) -> None:
self._app_config = app_config
self._logger = logger
def __call__(self, cursor: sqlite3.Cursor) -> None:
self._rename_convert_cache()
self._migrate_downloaded_models_cache()
self._remove_unused_core_models()
def _rename_convert_cache(self) -> None:
"""Rename models/.cache to models/.convert_cache."""
legacy_convert_path = self._app_config.root_path / "models" / ".cache"
configured_convert_dir = self._app_config.convert_cache_dir
configured_convert_path = self._app_config.convert_cache_path
# old convert dir was in use, and current convert dir has not been changed
if legacy_convert_path.exists() and configured_convert_dir == pathlib.Path("models/.convert_cache"):
self._logger.info(
f"Migrating legacy convert cache directory from {str(legacy_convert_path)} to {str(configured_convert_path)}"
)
shutil.rmtree(configured_convert_path, ignore_errors=True) # shouldn't be needed, but just in case...
shutil.move(legacy_convert_path, configured_convert_path)
def _migrate_downloaded_models_cache(self) -> None:
"""Move used core models to modsl/.download_cache."""
self._logger.info(f"Migrating legacy core models to {str(self._app_config.download_cache_path)}")
for url, legacy_dest in LEGACY_CORE_MODELS.items():
legacy_dest_path = self._app_config.models_path / legacy_dest
if not legacy_dest_path.exists():
continue
# this returns a unique directory path
new_path = ModelInstallService._download_cache_path(url, self._app_config)
new_path.mkdir(parents=True, exist_ok=True)
shutil.move(legacy_dest_path, new_path / legacy_dest_path.name)
def _remove_unused_core_models(self) -> None:
"""Remove unused core models and their directories."""
self._logger.info("Removing defunct core models.")
for dir in ["face_restoration", "misc", "upscaling"]:
path_to_remove = self._app_config.models_path / "core" / dir
shutil.rmtree(path_to_remove, ignore_errors=True)
shutil.rmtree(self._app_config.models_path / "any" / "annotators", ignore_errors=True)
def build_migration_10(app_config: InvokeAIAppConfig, logger: Logger) -> Migration:
"""
Build the migration from database version 9 to 10.
This migration does the following:
- Moves "core" models previously downloaded with download_with_progress_bar() into new
"models/.download_cache" directory.
- Renames "models/.cache" to "models/.convert_cache".
"""
migration_10 = Migration(
from_version=9,
to_version=10,
callback=Migration10Callback(app_config=app_config, logger=logger),
)
return migration_10

View File

@ -1,51 +0,0 @@
from pathlib import Path
from urllib import request
from tqdm import tqdm
from invokeai.backend.util.logging import InvokeAILogger
class ProgressBar:
"""Simple progress bar for urllib.request.urlretrieve using tqdm."""
def __init__(self, model_name: str = "file"):
self.pbar = None
self.name = model_name
def __call__(self, block_num: int, block_size: int, total_size: int):
if not self.pbar:
self.pbar = tqdm(
desc=self.name,
initial=0,
unit="iB",
unit_scale=True,
unit_divisor=1000,
total=total_size,
)
self.pbar.update(block_size)
def download_with_progress_bar(name: str, url: str, dest_path: Path) -> bool:
"""Download a file from a URL to a destination path, with a progress bar.
If the file already exists, it will not be downloaded again.
Exceptions are not caught.
Args:
name (str): Name of the file being downloaded.
url (str): URL to download the file from.
dest_path (Path): Destination path to save the file to.
Returns:
bool: True if the file was downloaded, False if it already existed.
"""
if dest_path.exists():
return False # already downloaded
InvokeAILogger.get_logger().info(f"Downloading {name}...")
dest_path.parent.mkdir(parents=True, exist_ok=True)
request.urlretrieve(url, dest_path, ProgressBar(name))
return True

View File

@ -29,7 +29,7 @@ def test_download_and_cache(mock_context: InvocationContext, mm2_root_dir: Path)
assert downloaded_path.is_file() assert downloaded_path.is_file()
assert downloaded_path.exists() assert downloaded_path.exists()
assert downloaded_path.name == "test_embedding.safetensors" assert downloaded_path.name == "test_embedding.safetensors"
assert downloaded_path.parent.parent == mm2_root_dir / "models/.cache" assert downloaded_path.parent.parent == mm2_root_dir / "models/.download_cache"
downloaded_path_2 = mock_context.models.download_and_cache_ckpt( downloaded_path_2 = mock_context.models.download_and_cache_ckpt(
"https://www.test.foo/download/test_embedding.safetensors" "https://www.test.foo/download/test_embedding.safetensors"