Merge branch 'main' into refactor/model-manager-3

This commit is contained in:
Lincoln Stein
2023-12-04 17:06:36 -05:00
committed by GitHub
12 changed files with 198 additions and 82 deletions

View File

@ -1,7 +1,11 @@
import typing
from enum import Enum
from importlib.metadata import PackageNotFoundError, version
from pathlib import Path
from platform import python_version
from typing import Optional
import torch
from fastapi import Body
from fastapi.routing import APIRouter
from pydantic import BaseModel, Field
@ -40,6 +44,24 @@ class AppVersion(BaseModel):
version: str = Field(description="App version")
class AppDependencyVersions(BaseModel):
"""App depencency Versions Response"""
accelerate: str = Field(description="accelerate version")
compel: str = Field(description="compel version")
cuda: Optional[str] = Field(description="CUDA version")
diffusers: str = Field(description="diffusers version")
numpy: str = Field(description="Numpy version")
opencv: str = Field(description="OpenCV version")
onnx: str = Field(description="ONNX version")
pillow: str = Field(description="Pillow (PIL) version")
python: str = Field(description="Python version")
torch: str = Field(description="PyTorch version")
torchvision: str = Field(description="PyTorch Vision version")
transformers: str = Field(description="transformers version")
xformers: Optional[str] = Field(description="xformers version")
class AppConfig(BaseModel):
"""App Config Response"""
@ -54,6 +76,29 @@ async def get_version() -> AppVersion:
return AppVersion(version=__version__)
@app_router.get("/app_deps", operation_id="get_app_deps", status_code=200, response_model=AppDependencyVersions)
async def get_app_deps() -> AppDependencyVersions:
try:
xformers = version("xformers")
except PackageNotFoundError:
xformers = None
return AppDependencyVersions(
accelerate=version("accelerate"),
compel=version("compel"),
cuda=torch.version.cuda,
diffusers=version("diffusers"),
numpy=version("numpy"),
opencv=version("opencv-python"),
onnx=version("onnx"),
pillow=version("pillow"),
python=python_version(),
torch=torch.version.__version__,
torchvision=version("torchvision"),
transformers=version("transformers"),
xformers=xformers,
)
@app_router.get("/config", operation_id="get_config", status_code=200, response_model=AppConfig)
async def get_config() -> AppConfig:
infill_methods = ["tile", "lama", "cv2"]

View File

@ -5,6 +5,8 @@ from typing import Union
import torch
from invokeai.app.services.invoker import Invoker
from .latents_storage_base import LatentsStorageBase
@ -17,6 +19,10 @@ class DiskLatentsStorage(LatentsStorageBase):
self.__output_folder = output_folder if isinstance(output_folder, Path) else Path(output_folder)
self.__output_folder.mkdir(parents=True, exist_ok=True)
def start(self, invoker: Invoker) -> None:
self._invoker = invoker
self._delete_all_latents()
def get(self, name: str) -> torch.Tensor:
latent_path = self.get_path(name)
return torch.load(latent_path)
@ -32,3 +38,21 @@ class DiskLatentsStorage(LatentsStorageBase):
def get_path(self, name: str) -> Path:
return self.__output_folder / name
def _delete_all_latents(self) -> None:
"""
Deletes all latents from disk.
Must be called after we have access to `self._invoker` (e.g. in `start()`).
"""
deleted_latents_count = 0
freed_space = 0
for latents_file in Path(self.__output_folder).glob("*"):
if latents_file.is_file():
freed_space += latents_file.stat().st_size
deleted_latents_count += 1
latents_file.unlink()
if deleted_latents_count > 0:
freed_space_in_mb = round(freed_space / 1024 / 1024, 2)
self._invoker.services.logger.info(
f"Deleted {deleted_latents_count} latents files (freed {freed_space_in_mb}MB)"
)

View File

@ -5,6 +5,8 @@ from typing import Dict, Optional
import torch
from invokeai.app.services.invoker import Invoker
from .latents_storage_base import LatentsStorageBase
@ -23,6 +25,18 @@ class ForwardCacheLatentsStorage(LatentsStorageBase):
self.__cache_ids = Queue()
self.__max_cache_size = max_cache_size
def start(self, invoker: Invoker) -> None:
self._invoker = invoker
start_op = getattr(self.__underlying_storage, "start", None)
if callable(start_op):
start_op(invoker)
def stop(self, invoker: Invoker) -> None:
self._invoker = invoker
stop_op = getattr(self.__underlying_storage, "stop", None)
if callable(stop_op):
stop_op(invoker)
def get(self, name: str) -> torch.Tensor:
cache_item = self.__get_cache(name)
if cache_item is not None:

View File

@ -42,7 +42,8 @@ class SqliteSessionQueue(SessionQueueBase):
self._set_in_progress_to_canceled()
prune_result = self.prune(DEFAULT_QUEUE_ID)
local_handler.register(event_name=EventServiceBase.queue_event, _func=self._on_session_event)
self.__invoker.services.logger.info(f"Pruned {prune_result.deleted} finished queue items")
if prune_result.deleted > 0:
self.__invoker.services.logger.info(f"Pruned {prune_result.deleted} finished queue items")
def __init__(self, db: SqliteDatabase) -> None:
super().__init__()

View File

@ -207,10 +207,12 @@ class IterateInvocationOutput(BaseInvocationOutput):
item: Any = OutputField(
description="The item being iterated over", title="Collection Item", ui_type=UIType._CollectionItem
)
index: int = OutputField(description="The index of the item", title="Index")
total: int = OutputField(description="The total number of items", title="Total")
# TODO: Fill this out and move to invocations
@invocation("iterate", version="1.0.0")
@invocation("iterate", version="1.1.0")
class IterateInvocation(BaseInvocation):
"""Iterates over a list of items"""
@ -221,7 +223,7 @@ class IterateInvocation(BaseInvocation):
def invoke(self, context: InvocationContext) -> IterateInvocationOutput:
"""Produces the outputs as values"""
return IterateInvocationOutput(item=self.collection[self.index])
return IterateInvocationOutput(item=self.collection[self.index], index=self.index, total=len(self.collection))
@invocation_output("collect_output")

View File

@ -1,6 +1,7 @@
import sqlite3
import threading
from logging import Logger
from pathlib import Path
from invokeai.app.services.config import InvokeAIAppConfig
@ -8,25 +9,20 @@ sqlite_memory = ":memory:"
class SqliteDatabase:
conn: sqlite3.Connection
lock: threading.RLock
_logger: Logger
_config: InvokeAIAppConfig
def __init__(self, config: InvokeAIAppConfig, logger: Logger):
self._logger = logger
self._config = config
if self._config.use_memory_db:
location = sqlite_memory
self.db_path = sqlite_memory
logger.info("Using in-memory database")
else:
db_path = self._config.db_path
db_path.parent.mkdir(parents=True, exist_ok=True)
location = str(db_path)
self._logger.info(f"Using database at {location}")
self.db_path = str(db_path)
self._logger.info(f"Using database at {self.db_path}")
self.conn = sqlite3.connect(location, check_same_thread=False)
self.conn = sqlite3.connect(self.db_path, check_same_thread=False)
self.lock = threading.RLock()
self.conn.row_factory = sqlite3.Row
@ -37,10 +33,16 @@ class SqliteDatabase:
def clean(self) -> None:
try:
if self.db_path == sqlite_memory:
return
initial_db_size = Path(self.db_path).stat().st_size
self.lock.acquire()
self.conn.execute("VACUUM;")
self.conn.commit()
self._logger.info("Cleaned database")
final_db_size = Path(self.db_path).stat().st_size
freed_space_in_mb = round((initial_db_size - final_db_size) / 1024 / 1024, 2)
if freed_space_in_mb > 0:
self._logger.info(f"Cleaned database (freed {freed_space_in_mb}MB)")
except Exception as e:
self._logger.error(f"Error cleaning database: {e}")
raise e