diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 02bc10df90..0f0b953be7 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -42,6 +42,21 @@ Please provide steps on how to test changes, any hardware or software specifications as well as any other pertinent information. --> +## Merge Plan + + + ## Added/updated tests? - [ ] Yes diff --git a/.github/workflows/lint-frontend.yml b/.github/workflows/lint-frontend.yml index 3b354d1003..a4e1bba428 100644 --- a/.github/workflows/lint-frontend.yml +++ b/.github/workflows/lint-frontend.yml @@ -21,16 +21,16 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-22.04 steps: - - name: Setup Node 20 + - name: Setup Node 18 uses: actions/setup-node@v4 with: - node-version: '20' + node-version: '18' - name: Checkout uses: actions/checkout@v4 - name: Setup pnpm uses: pnpm/action-setup@v2 with: - version: 8 + version: '8.12.1' - name: Install dependencies run: 'pnpm install --prefer-frozen-lockfile' - name: Typescript diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 5b7d2cd2fa..162cbe3427 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -1,13 +1,15 @@ name: PyPI Release on: - push: - paths: - - 'invokeai/version/invokeai_version.py' workflow_dispatch: + inputs: + publish_package: + description: 'Publish build on PyPi? [true/false]' + required: true + default: 'false' jobs: - release: + build-and-release: if: github.repository == 'invoke-ai/InvokeAI' runs-on: ubuntu-22.04 env: @@ -15,19 +17,43 @@ jobs: TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} TWINE_NON_INTERACTIVE: 1 steps: - - name: checkout sources - uses: actions/checkout@v3 + - name: Checkout + uses: actions/checkout@v4 - - name: install deps + - name: Setup Node 18 + uses: actions/setup-node@v4 + with: + node-version: '18' + + - name: Setup pnpm + uses: pnpm/action-setup@v2 + with: + version: '8.12.1' + + - name: Install frontend dependencies + run: pnpm install --prefer-frozen-lockfile + working-directory: invokeai/frontend/web + + - name: Build frontend + run: pnpm run build + working-directory: invokeai/frontend/web + + - name: Install python dependencies run: pip install --upgrade build twine - - name: build package + - name: Build python package run: python3 -m build - - name: check distribution + - name: Upload build as workflow artifact + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist + + - name: Check distribution run: twine check dist/* - - name: check PyPI versions + - name: Check PyPI versions if: github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/') run: | pip install --upgrade requests @@ -36,6 +62,6 @@ jobs: EXISTS=scripts.pypi_helper.local_on_pypi(); \ print(f'PACKAGE_EXISTS={EXISTS}')" >> $GITHUB_ENV - - name: upload package - if: env.PACKAGE_EXISTS == 'False' && env.TWINE_PASSWORD != '' + - name: Publish build on PyPi + if: env.PACKAGE_EXISTS == 'False' && env.TWINE_PASSWORD != '' && github.event.inputs.publish_package == 'true' run: twine upload dist/* diff --git a/docs/installation/020_INSTALL_MANUAL.md b/docs/installation/020_INSTALL_MANUAL.md index 0ddf1bca68..b395405ba3 100644 --- a/docs/installation/020_INSTALL_MANUAL.md +++ b/docs/installation/020_INSTALL_MANUAL.md @@ -293,6 +293,19 @@ manager, please follow these steps: ## Developer Install +!!! warning + + InvokeAI uses a SQLite database. By running on `main`, you accept responsibility for your database. This + means making regular backups (especially before pulling) and/or fixing it yourself in the event that a + PR introduces a schema change. + + If you don't need persistent backend storage, you can use an ephemeral in-memory database by setting + `use_memory_db: true` under `Path:` in your `invokeai.yaml` file. + + If this is untenable, you should run the application via the official installer or a manual install of the + python package from pypi. These releases will not break your database. + + If you have an interest in how InvokeAI works, or you would like to add features or bugfixes, you are encouraged to install the source code for InvokeAI. For this to work, you will need to install the @@ -388,3 +401,5 @@ environment variable INVOKEAI_ROOT to point to the installation directory. Note that if you run into problems with the Conda installation, the InvokeAI staff will **not** be able to help you out. Caveat Emptor! + +[dev-chat]: https://discord.com/channels/1020123559063990373/1049495067846524939 \ No newline at end of file diff --git a/docs/javascripts/init_kapa_widget.js b/docs/javascripts/init_kapa_widget.js new file mode 100644 index 0000000000..f22e276b5a --- /dev/null +++ b/docs/javascripts/init_kapa_widget.js @@ -0,0 +1,10 @@ +document.addEventListener("DOMContentLoaded", function () { + var script = document.createElement("script"); + script.src = "https://widget.kapa.ai/kapa-widget.bundle.js"; + script.setAttribute("data-website-id", "b5973bb1-476b-451e-8cf4-98de86745a10"); + script.setAttribute("data-project-name", "Invoke.AI"); + script.setAttribute("data-project-color", "#11213C"); + script.setAttribute("data-project-logo", "https://avatars.githubusercontent.com/u/113954515?s=280&v=4"); + script.async = true; + document.head.appendChild(script); +}); diff --git a/installer/create_installer.sh b/installer/create_installer.sh index ed8cbb0d0a..b32f65d9bf 100755 --- a/installer/create_installer.sh +++ b/installer/create_installer.sh @@ -91,9 +91,11 @@ rm -rf InvokeAI-Installer # copy content mkdir InvokeAI-Installer -for f in templates lib *.txt *.reg; do +for f in templates *.txt *.reg; do cp -r ${f} InvokeAI-Installer/ done +mkdir InvokeAI-Installer/lib +cp lib/*.py InvokeAI-Installer/lib # Move the wheel mv dist/*.whl InvokeAI-Installer/lib/ @@ -111,6 +113,6 @@ cp WinLongPathsEnabled.reg InvokeAI-Installer/ zip -r InvokeAI-installer-$VERSION.zip InvokeAI-Installer # clean up -rm -rf InvokeAI-Installer tmp dist +rm -rf InvokeAI-Installer tmp dist ../invokeai/frontend/web/dist/ exit 0 diff --git a/installer/lib/installer.py b/installer/lib/installer.py index 7b928f3c9c..a609b2c85e 100644 --- a/installer/lib/installer.py +++ b/installer/lib/installer.py @@ -244,9 +244,9 @@ class InvokeAiInstance: "numpy~=1.24.0", # choose versions that won't be uninstalled during phase 2 "urllib3~=1.26.0", "requests~=2.28.0", - "torch==2.1.0", + "torch==2.1.1", "torchmetrics==0.11.4", - "torchvision>=0.14.1", + "torchvision>=0.16.1", "--force-reinstall", "--find-links" if find_links is not None else None, find_links, diff --git a/invokeai/app/api/dependencies.py b/invokeai/app/api/dependencies.py index 803e988e52..2b88efbb95 100644 --- a/invokeai/app/api/dependencies.py +++ b/invokeai/app/api/dependencies.py @@ -2,6 +2,7 @@ from logging import Logger +from invokeai.app.services.shared.sqlite.sqlite_util import init_db from invokeai.backend.util.logging import InvokeAILogger from invokeai.version.invokeai_version import __version__ @@ -30,7 +31,6 @@ from ..services.session_processor.session_processor_default import DefaultSessio from ..services.session_queue.session_queue_sqlite import SqliteSessionQueue from ..services.shared.default_graphs import create_system_graphs from ..services.shared.graph import GraphExecutionState, LibraryGraph -from ..services.shared.sqlite.sqlite_database import SqliteDatabase from ..services.urls.urls_default import LocalUrlService from ..services.workflow_records.workflow_records_sqlite import SqliteWorkflowRecordsStorage from .events import FastAPIEventService @@ -67,8 +67,9 @@ class ApiDependencies: logger.debug(f"Internet connectivity is {config.internet_available}") output_folder = config.output_path + image_files = DiskImageFileStorage(f"{output_folder}/images") - db = SqliteDatabase(config, logger) + db = init_db(config=config, logger=logger, image_files=image_files) configuration = config logger = logger @@ -80,7 +81,6 @@ class ApiDependencies: events = FastAPIEventService(event_handler_id) graph_execution_manager = SqliteItemStorage[GraphExecutionState](db=db, table_name="graph_executions") graph_library = SqliteItemStorage[LibraryGraph](db=db, table_name="graphs") - image_files = DiskImageFileStorage(f"{output_folder}/images") image_records = SqliteImageRecordStorage(db=db) images = ImageService() invocation_cache = MemoryInvocationCache(max_cache_size=config.node_cache_size) diff --git a/invokeai/app/api/routers/model_records.py b/invokeai/app/api/routers/model_records.py index 997f76a185..934a7d15b3 100644 --- a/invokeai/app/api/routers/model_records.py +++ b/invokeai/app/api/routers/model_records.py @@ -45,6 +45,9 @@ async def list_model_records( base_models: Optional[List[BaseModelType]] = Query(default=None, description="Base models to include"), model_type: Optional[ModelType] = Query(default=None, description="The type of model to get"), model_name: Optional[str] = Query(default=None, description="Exact match on the name of the model"), + model_format: Optional[str] = Query( + default=None, description="Exact match on the format of the model (e.g. 'diffusers')" + ), ) -> ModelsList: """Get a list of models.""" record_store = ApiDependencies.invoker.services.model_records @@ -52,10 +55,14 @@ async def list_model_records( if base_models: for base_model in base_models: found_models.extend( - record_store.search_by_attr(base_model=base_model, model_type=model_type, model_name=model_name) + record_store.search_by_attr( + base_model=base_model, model_type=model_type, model_name=model_name, model_format=model_format + ) ) else: - found_models.extend(record_store.search_by_attr(model_type=model_type, model_name=model_name)) + found_models.extend( + record_store.search_by_attr(model_type=model_type, model_name=model_name, model_format=model_format) + ) return ModelsList(models=found_models) diff --git a/invokeai/app/invocations/image.py b/invokeai/app/invocations/image.py index 89209fed41..f729d60cdd 100644 --- a/invokeai/app/invocations/image.py +++ b/invokeai/app/invocations/image.py @@ -13,7 +13,15 @@ from invokeai.app.shared.fields import FieldDescriptions from invokeai.backend.image_util.invisible_watermark import InvisibleWatermark from invokeai.backend.image_util.safety_checker import SafetyChecker -from .baseinvocation import BaseInvocation, Input, InputField, InvocationContext, WithMetadata, invocation +from .baseinvocation import ( + BaseInvocation, + Classification, + Input, + InputField, + InvocationContext, + WithMetadata, + invocation, +) @invocation("show_image", title="Show Image", tags=["image"], category="image", version="1.0.0") @@ -421,6 +429,64 @@ class ImageBlurInvocation(BaseInvocation, WithMetadata): ) +@invocation( + "unsharp_mask", + title="Unsharp Mask", + tags=["image", "unsharp_mask"], + category="image", + version="1.2.0", + classification=Classification.Beta, +) +class UnsharpMaskInvocation(BaseInvocation, WithMetadata): + """Applies an unsharp mask filter to an image""" + + image: ImageField = InputField(description="The image to use") + radius: float = InputField(gt=0, description="Unsharp mask radius", default=2) + strength: float = InputField(ge=0, description="Unsharp mask strength", default=50) + + def pil_from_array(self, arr): + return Image.fromarray((arr * 255).astype("uint8")) + + def array_from_pil(self, img): + return numpy.array(img) / 255 + + def invoke(self, context: InvocationContext) -> ImageOutput: + image = context.services.images.get_pil_image(self.image.image_name) + mode = image.mode + + alpha_channel = image.getchannel("A") if mode == "RGBA" else None + image = image.convert("RGB") + image_blurred = self.array_from_pil(image.filter(ImageFilter.GaussianBlur(radius=self.radius))) + + image = self.array_from_pil(image) + image += (image - image_blurred) * (self.strength / 100.0) + image = numpy.clip(image, 0, 1) + image = self.pil_from_array(image) + + image = image.convert(mode) + + # Make the image RGBA if we had a source alpha channel + if alpha_channel is not None: + image.putalpha(alpha_channel) + + image_dto = context.services.images.create( + image=image, + image_origin=ResourceOrigin.INTERNAL, + image_category=ImageCategory.GENERAL, + node_id=self.id, + session_id=context.graph_execution_state_id, + is_intermediate=self.is_intermediate, + metadata=self.metadata, + workflow=context.workflow, + ) + + return ImageOutput( + image=ImageField(image_name=image_dto.image_name), + width=image.width, + height=image.height, + ) + + PIL_RESAMPLING_MODES = Literal[ "nearest", "box", diff --git a/invokeai/app/invocations/tiles.py b/invokeai/app/invocations/tiles.py index c0582986a8..cbf63ab169 100644 --- a/invokeai/app/invocations/tiles.py +++ b/invokeai/app/invocations/tiles.py @@ -38,7 +38,14 @@ class CalculateImageTilesOutput(BaseInvocationOutput): tiles: list[Tile] = OutputField(description="The tiles coordinates that cover a particular image shape.") -@invocation("calculate_image_tiles", title="Calculate Image Tiles", tags=["tiles"], category="tiles", version="1.0.0") +@invocation( + "calculate_image_tiles", + title="Calculate Image Tiles", + tags=["tiles"], + category="tiles", + version="1.0.0", + classification=Classification.Beta, +) class CalculateImageTilesInvocation(BaseInvocation): """Calculate the coordinates and overlaps of tiles that cover a target image shape.""" diff --git a/invokeai/app/services/board_image_records/board_image_records_sqlite.py b/invokeai/app/services/board_image_records/board_image_records_sqlite.py index 54d9a0af04..cde810a739 100644 --- a/invokeai/app/services/board_image_records/board_image_records_sqlite.py +++ b/invokeai/app/services/board_image_records/board_image_records_sqlite.py @@ -20,63 +20,6 @@ class SqliteBoardImageRecordStorage(BoardImageRecordStorageBase): self._conn = db.conn self._cursor = self._conn.cursor() - try: - self._lock.acquire() - self._create_tables() - self._conn.commit() - finally: - self._lock.release() - - def _create_tables(self) -> None: - """Creates the `board_images` junction table.""" - - # Create the `board_images` junction table. - self._cursor.execute( - """--sql - CREATE TABLE IF NOT EXISTS board_images ( - board_id TEXT NOT NULL, - image_name TEXT NOT NULL, - created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- updated via trigger - updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- Soft delete, currently unused - deleted_at DATETIME, - -- enforce one-to-many relationship between boards and images using PK - -- (we can extend this to many-to-many later) - PRIMARY KEY (image_name), - FOREIGN KEY (board_id) REFERENCES boards (board_id) ON DELETE CASCADE, - FOREIGN KEY (image_name) REFERENCES images (image_name) ON DELETE CASCADE - ); - """ - ) - - # Add index for board id - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_board_images_board_id ON board_images (board_id); - """ - ) - - # Add index for board id, sorted by created_at - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_board_images_board_id_created_at ON board_images (board_id, created_at); - """ - ) - - # Add trigger for `updated_at`. - self._cursor.execute( - """--sql - CREATE TRIGGER IF NOT EXISTS tg_board_images_updated_at - AFTER UPDATE - ON board_images FOR EACH ROW - BEGIN - UPDATE board_images SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') - WHERE board_id = old.board_id AND image_name = old.image_name; - END; - """ - ) - def add_image_to_board( self, board_id: str, diff --git a/invokeai/app/services/board_records/board_records_sqlite.py b/invokeai/app/services/board_records/board_records_sqlite.py index 165ce8df0c..a3836cb6c7 100644 --- a/invokeai/app/services/board_records/board_records_sqlite.py +++ b/invokeai/app/services/board_records/board_records_sqlite.py @@ -28,52 +28,6 @@ class SqliteBoardRecordStorage(BoardRecordStorageBase): self._conn = db.conn self._cursor = self._conn.cursor() - try: - self._lock.acquire() - self._create_tables() - self._conn.commit() - finally: - self._lock.release() - - def _create_tables(self) -> None: - """Creates the `boards` table and `board_images` junction table.""" - - # Create the `boards` table. - self._cursor.execute( - """--sql - CREATE TABLE IF NOT EXISTS boards ( - board_id TEXT NOT NULL PRIMARY KEY, - board_name TEXT NOT NULL, - cover_image_name TEXT, - created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- Updated via trigger - updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- Soft delete, currently unused - deleted_at DATETIME, - FOREIGN KEY (cover_image_name) REFERENCES images (image_name) ON DELETE SET NULL - ); - """ - ) - - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_boards_created_at ON boards (created_at); - """ - ) - - # Add trigger for `updated_at`. - self._cursor.execute( - """--sql - CREATE TRIGGER IF NOT EXISTS tg_boards_updated_at - AFTER UPDATE - ON boards FOR EACH ROW - BEGIN - UPDATE boards SET updated_at = current_timestamp - WHERE board_id = old.board_id; - END; - """ - ) - def delete(self, board_id: str) -> None: try: self._lock.acquire() diff --git a/invokeai/app/services/image_records/image_records_sqlite.py b/invokeai/app/services/image_records/image_records_sqlite.py index b14c322f50..74f82e7d84 100644 --- a/invokeai/app/services/image_records/image_records_sqlite.py +++ b/invokeai/app/services/image_records/image_records_sqlite.py @@ -32,101 +32,6 @@ class SqliteImageRecordStorage(ImageRecordStorageBase): self._conn = db.conn self._cursor = self._conn.cursor() - try: - self._lock.acquire() - self._create_tables() - self._conn.commit() - finally: - self._lock.release() - - def _create_tables(self) -> None: - """Creates the `images` table.""" - - # Create the `images` table. - self._cursor.execute( - """--sql - CREATE TABLE IF NOT EXISTS images ( - image_name TEXT NOT NULL PRIMARY KEY, - -- This is an enum in python, unrestricted string here for flexibility - image_origin TEXT NOT NULL, - -- This is an enum in python, unrestricted string here for flexibility - image_category TEXT NOT NULL, - width INTEGER NOT NULL, - height INTEGER NOT NULL, - session_id TEXT, - node_id TEXT, - metadata TEXT, - is_intermediate BOOLEAN DEFAULT FALSE, - created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- Updated via trigger - updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- Soft delete, currently unused - deleted_at DATETIME - ); - """ - ) - - self._cursor.execute("PRAGMA table_info(images)") - columns = [column[1] for column in self._cursor.fetchall()] - - if "starred" not in columns: - self._cursor.execute( - """--sql - ALTER TABLE images ADD COLUMN starred BOOLEAN DEFAULT FALSE; - """ - ) - - # Create the `images` table indices. - self._cursor.execute( - """--sql - CREATE UNIQUE INDEX IF NOT EXISTS idx_images_image_name ON images(image_name); - """ - ) - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_images_image_origin ON images(image_origin); - """ - ) - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_images_image_category ON images(image_category); - """ - ) - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_images_created_at ON images(created_at); - """ - ) - - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_images_starred ON images(starred); - """ - ) - - # Add trigger for `updated_at`. - self._cursor.execute( - """--sql - CREATE TRIGGER IF NOT EXISTS tg_images_updated_at - AFTER UPDATE - ON images FOR EACH ROW - BEGIN - UPDATE images SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') - WHERE image_name = old.image_name; - END; - """ - ) - - self._cursor.execute("PRAGMA table_info(images)") - columns = [column[1] for column in self._cursor.fetchall()] - if "has_workflow" not in columns: - self._cursor.execute( - """--sql - ALTER TABLE images - ADD COLUMN has_workflow BOOLEAN DEFAULT FALSE; - """ - ) - def get(self, image_name: str) -> ImageRecord: try: self._lock.acquire() diff --git a/invokeai/app/services/model_install/model_install_base.py b/invokeai/app/services/model_install/model_install_base.py index 81feaf85e7..80b493d02e 100644 --- a/invokeai/app/services/model_install/model_install_base.py +++ b/invokeai/app/services/model_install/model_install_base.py @@ -5,7 +5,6 @@ from enum import Enum from pathlib import Path from typing import Any, Dict, List, Literal, Optional, Union -from fastapi import Body from pydantic import BaseModel, Field, field_validator from pydantic.networks import AnyHttpUrl from typing_extensions import Annotated @@ -112,17 +111,7 @@ class URLModelSource(StringLikeSource): return str(self.url) -# Body() is being applied here rather than Field() because otherwise FastAPI will -# refuse to generate a schema. Relevant links: -# -# "Model Manager Refactor Phase 1 - SQL-based config storage -# https://github.com/invoke-ai/InvokeAI/pull/5039#discussion_r1389752119 (comment) -# Param: xyz can only be a request body, using Body() when using discriminated unions -# https://github.com/tiangolo/fastapi/discussions/9761 -# Body parameter cannot be a pydantic union anymore sinve v0.95 -# https://github.com/tiangolo/fastapi/discussions/9287 - -ModelSource = Annotated[Union[LocalModelSource, HFModelSource, URLModelSource], Body(discriminator="type")] +ModelSource = Annotated[Union[LocalModelSource, HFModelSource, URLModelSource], Field(discriminator="type")] class ModelInstallJob(BaseModel): diff --git a/invokeai/app/services/model_records/model_records_base.py b/invokeai/app/services/model_records/model_records_base.py index 9b0612a846..ae0e633990 100644 --- a/invokeai/app/services/model_records/model_records_base.py +++ b/invokeai/app/services/model_records/model_records_base.py @@ -7,10 +7,7 @@ from abc import ABC, abstractmethod from pathlib import Path from typing import List, Optional, Union -from invokeai.backend.model_manager.config import AnyModelConfig, BaseModelType, ModelType - -# should match the InvokeAI version when this is first released. -CONFIG_FILE_VERSION = "3.2.0" +from invokeai.backend.model_manager.config import AnyModelConfig, BaseModelType, ModelFormat, ModelType class DuplicateModelException(Exception): @@ -32,12 +29,6 @@ class ConfigFileVersionMismatchException(Exception): class ModelRecordServiceBase(ABC): """Abstract base class for storage and retrieval of model configs.""" - @property - @abstractmethod - def version(self) -> str: - """Return the config file/database schema version.""" - pass - @abstractmethod def add_model(self, key: str, config: Union[dict, AnyModelConfig]) -> AnyModelConfig: """ @@ -115,6 +106,7 @@ class ModelRecordServiceBase(ABC): model_name: Optional[str] = None, base_model: Optional[BaseModelType] = None, model_type: Optional[ModelType] = None, + model_format: Optional[ModelFormat] = None, ) -> List[AnyModelConfig]: """ Return models matching name, base and/or type. @@ -122,6 +114,7 @@ class ModelRecordServiceBase(ABC): :param model_name: Filter by name of model (optional) :param base_model: Filter by base model (optional) :param model_type: Filter by type of model (optional) + :param model_format: Filter by model format (e.g. "diffusers") (optional) If none of the optional filters are passed, will return all models in the database. diff --git a/invokeai/app/services/model_records/model_records_sql.py b/invokeai/app/services/model_records/model_records_sql.py index 6acf1eb12e..a8e777cf64 100644 --- a/invokeai/app/services/model_records/model_records_sql.py +++ b/invokeai/app/services/model_records/model_records_sql.py @@ -49,12 +49,12 @@ from invokeai.backend.model_manager.config import ( AnyModelConfig, BaseModelType, ModelConfigFactory, + ModelFormat, ModelType, ) from ..shared.sqlite.sqlite_database import SqliteDatabase from .model_records_base import ( - CONFIG_FILE_VERSION, DuplicateModelException, ModelRecordServiceBase, UnknownModelException, @@ -78,86 +78,6 @@ class ModelRecordServiceSQL(ModelRecordServiceBase): self._db = db self._cursor = self._db.conn.cursor() - with self._db.lock: - # Enable foreign keys - self._db.conn.execute("PRAGMA foreign_keys = ON;") - self._create_tables() - self._db.conn.commit() - assert ( - str(self.version) == CONFIG_FILE_VERSION - ), f"Model config version {self.version} does not match expected version {CONFIG_FILE_VERSION}" - - def _create_tables(self) -> None: - """Create sqlite3 tables.""" - # model_config table breaks out the fields that are common to all config objects - # and puts class-specific ones in a serialized json object - self._cursor.execute( - """--sql - CREATE TABLE IF NOT EXISTS model_config ( - id TEXT NOT NULL PRIMARY KEY, - -- The next 3 fields are enums in python, unrestricted string here - base TEXT GENERATED ALWAYS as (json_extract(config, '$.base')) VIRTUAL NOT NULL, - type TEXT GENERATED ALWAYS as (json_extract(config, '$.type')) VIRTUAL NOT NULL, - name TEXT GENERATED ALWAYS as (json_extract(config, '$.name')) VIRTUAL NOT NULL, - path TEXT GENERATED ALWAYS as (json_extract(config, '$.path')) VIRTUAL NOT NULL, - format TEXT GENERATED ALWAYS as (json_extract(config, '$.format')) VIRTUAL NOT NULL, - original_hash TEXT, -- could be null - -- Serialized JSON representation of the whole config object, - -- which will contain additional fields from subclasses - config TEXT NOT NULL, - created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- Updated via trigger - updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- unique constraint on combo of name, base and type - UNIQUE(name, base, type) - ); - """ - ) - - # metadata table - self._cursor.execute( - """--sql - CREATE TABLE IF NOT EXISTS model_manager_metadata ( - metadata_key TEXT NOT NULL PRIMARY KEY, - metadata_value TEXT NOT NULL - ); - """ - ) - - # Add trigger for `updated_at`. - self._cursor.execute( - """--sql - CREATE TRIGGER IF NOT EXISTS model_config_updated_at - AFTER UPDATE - ON model_config FOR EACH ROW - BEGIN - UPDATE model_config SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') - WHERE id = old.id; - END; - """ - ) - - # Add indexes for searchable fields - for stmt in [ - "CREATE INDEX IF NOT EXISTS base_index ON model_config(base);", - "CREATE INDEX IF NOT EXISTS type_index ON model_config(type);", - "CREATE INDEX IF NOT EXISTS name_index ON model_config(name);", - "CREATE UNIQUE INDEX IF NOT EXISTS path_index ON model_config(path);", - ]: - self._cursor.execute(stmt) - - # Add our version to the metadata table - self._cursor.execute( - """--sql - INSERT OR IGNORE into model_manager_metadata ( - metadata_key, - metadata_value - ) - VALUES (?,?); - """, - ("version", CONFIG_FILE_VERSION), - ) - def add_model(self, key: str, config: Union[dict, AnyModelConfig]) -> AnyModelConfig: """ Add a model to the database. @@ -207,22 +127,6 @@ class ModelRecordServiceSQL(ModelRecordServiceBase): return self.get_model(key) - @property - def version(self) -> str: - """Return the version of the database schema.""" - with self._db.lock: - self._cursor.execute( - """--sql - SELECT metadata_value FROM model_manager_metadata - WHERE metadata_key=?; - """, - ("version",), - ) - rows = self._cursor.fetchone() - if not rows: - raise KeyError("Models database does not have metadata key 'version'") - return rows[0] - def del_model(self, key: str) -> None: """ Delete a model. @@ -322,6 +226,7 @@ class ModelRecordServiceSQL(ModelRecordServiceBase): model_name: Optional[str] = None, base_model: Optional[BaseModelType] = None, model_type: Optional[ModelType] = None, + model_format: Optional[ModelFormat] = None, ) -> List[AnyModelConfig]: """ Return models matching name, base and/or type. @@ -329,6 +234,7 @@ class ModelRecordServiceSQL(ModelRecordServiceBase): :param model_name: Filter by name of model (optional) :param base_model: Filter by base model (optional) :param model_type: Filter by type of model (optional) + :param model_format: Filter by model format (e.g. "diffusers") (optional) If none of the optional filters are passed, will return all models in the database. @@ -345,6 +251,9 @@ class ModelRecordServiceSQL(ModelRecordServiceBase): if model_type: where_clause.append("type=?") bindings.append(model_type) + if model_format: + where_clause.append("format=?") + bindings.append(model_format) where = f"WHERE {' AND '.join(where_clause)}" if where_clause else "" with self._db.lock: self._cursor.execute( diff --git a/invokeai/app/services/session_queue/session_queue_sqlite.py b/invokeai/app/services/session_queue/session_queue_sqlite.py index 71f28c102b..64642690e9 100644 --- a/invokeai/app/services/session_queue/session_queue_sqlite.py +++ b/invokeai/app/services/session_queue/session_queue_sqlite.py @@ -50,7 +50,6 @@ class SqliteSessionQueue(SessionQueueBase): self.__lock = db.lock self.__conn = db.conn self.__cursor = self.__conn.cursor() - self._create_tables() def _match_event_name(self, event: FastAPIEvent, match_in: list[str]) -> bool: return event[1]["event"] in match_in @@ -98,123 +97,6 @@ class SqliteSessionQueue(SessionQueueBase): except SessionQueueItemNotFoundError: return - def _create_tables(self) -> None: - """Creates the session queue tables, indicies, and triggers""" - try: - self.__lock.acquire() - self.__cursor.execute( - """--sql - CREATE TABLE IF NOT EXISTS session_queue ( - item_id INTEGER PRIMARY KEY AUTOINCREMENT, -- used for ordering, cursor pagination - batch_id TEXT NOT NULL, -- identifier of the batch this queue item belongs to - queue_id TEXT NOT NULL, -- identifier of the queue this queue item belongs to - session_id TEXT NOT NULL UNIQUE, -- duplicated data from the session column, for ease of access - field_values TEXT, -- NULL if no values are associated with this queue item - session TEXT NOT NULL, -- the session to be executed - status TEXT NOT NULL DEFAULT 'pending', -- the status of the queue item, one of 'pending', 'in_progress', 'completed', 'failed', 'canceled' - priority INTEGER NOT NULL DEFAULT 0, -- the priority, higher is more important - error TEXT, -- any errors associated with this queue item - created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), -- updated via trigger - started_at DATETIME, -- updated via trigger - completed_at DATETIME -- updated via trigger, completed items are cleaned up on application startup - -- Ideally this is a FK, but graph_executions uses INSERT OR REPLACE, and REPLACE triggers the ON DELETE CASCADE... - -- FOREIGN KEY (session_id) REFERENCES graph_executions (id) ON DELETE CASCADE - ); - """ - ) - - self.__cursor.execute( - """--sql - CREATE UNIQUE INDEX IF NOT EXISTS idx_session_queue_item_id ON session_queue(item_id); - """ - ) - - self.__cursor.execute( - """--sql - CREATE UNIQUE INDEX IF NOT EXISTS idx_session_queue_session_id ON session_queue(session_id); - """ - ) - - self.__cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_session_queue_batch_id ON session_queue(batch_id); - """ - ) - - self.__cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_session_queue_created_priority ON session_queue(priority); - """ - ) - - self.__cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_session_queue_created_status ON session_queue(status); - """ - ) - - self.__cursor.execute( - """--sql - CREATE TRIGGER IF NOT EXISTS tg_session_queue_completed_at - AFTER UPDATE OF status ON session_queue - FOR EACH ROW - WHEN - NEW.status = 'completed' - OR NEW.status = 'failed' - OR NEW.status = 'canceled' - BEGIN - UPDATE session_queue - SET completed_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') - WHERE item_id = NEW.item_id; - END; - """ - ) - - self.__cursor.execute( - """--sql - CREATE TRIGGER IF NOT EXISTS tg_session_queue_started_at - AFTER UPDATE OF status ON session_queue - FOR EACH ROW - WHEN - NEW.status = 'in_progress' - BEGIN - UPDATE session_queue - SET started_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') - WHERE item_id = NEW.item_id; - END; - """ - ) - - self.__cursor.execute( - """--sql - CREATE TRIGGER IF NOT EXISTS tg_session_queue_updated_at - AFTER UPDATE - ON session_queue FOR EACH ROW - BEGIN - UPDATE session_queue - SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') - WHERE item_id = old.item_id; - END; - """ - ) - - self.__cursor.execute("PRAGMA table_info(session_queue)") - columns = [column[1] for column in self.__cursor.fetchall()] - if "workflow" not in columns: - self.__cursor.execute( - """--sql - ALTER TABLE session_queue ADD COLUMN workflow TEXT; - """ - ) - - self.__conn.commit() - except Exception: - self.__conn.rollback() - raise - finally: - self.__lock.release() - def _set_in_progress_to_canceled(self) -> None: """ Sets all in_progress queue items to canceled. Run on app startup, not associated with any queue. diff --git a/invokeai/app/services/shared/sqlite/sqlite_database.py b/invokeai/app/services/shared/sqlite/sqlite_database.py index 006eb61cbd..e860160044 100644 --- a/invokeai/app/services/shared/sqlite/sqlite_database.py +++ b/invokeai/app/services/shared/sqlite/sqlite_database.py @@ -3,45 +3,65 @@ import threading from logging import Logger from pathlib import Path -from invokeai.app.services.config import InvokeAIAppConfig from invokeai.app.services.shared.sqlite.sqlite_common import sqlite_memory class SqliteDatabase: - def __init__(self, config: InvokeAIAppConfig, logger: Logger): - self._logger = logger - self._config = config + """ + Manages a connection to an SQLite database. - if self._config.use_memory_db: - self.db_path = sqlite_memory - logger.info("Using in-memory database") + :param db_path: Path to the database file. If None, an in-memory database is used. + :param logger: Logger to use for logging. + :param verbose: Whether to log SQL statements. Provides `logger.debug` as the SQLite trace callback. + + This is a light wrapper around the `sqlite3` module, providing a few conveniences: + - The database file is written to disk if it does not exist. + - Foreign key constraints are enabled by default. + - The connection is configured to use the `sqlite3.Row` row factory. + + In addition to the constructor args, the instance provides the following attributes and methods: + - `conn`: A `sqlite3.Connection` object. Note that the connection must never be closed if the database is in-memory. + - `lock`: A shared re-entrant lock, used to approximate thread safety. + - `clean()`: Runs the SQL `VACUUM;` command and reports on the freed space. + """ + + def __init__(self, db_path: Path | None, logger: Logger, verbose: bool = False) -> None: + """Initializes the database. This is used internally by the class constructor.""" + self.logger = logger + self.db_path = db_path + self.verbose = verbose + + if not self.db_path: + logger.info("Initializing in-memory database") else: - db_path = self._config.db_path - db_path.parent.mkdir(parents=True, exist_ok=True) - self.db_path = str(db_path) - self._logger.info(f"Using database at {self.db_path}") + self.db_path.parent.mkdir(parents=True, exist_ok=True) + self.logger.info(f"Initializing database at {self.db_path}") - self.conn = sqlite3.connect(self.db_path, check_same_thread=False) + self.conn = sqlite3.connect(database=self.db_path or sqlite_memory, check_same_thread=False) self.lock = threading.RLock() self.conn.row_factory = sqlite3.Row - if self._config.log_sql: - self.conn.set_trace_callback(self._logger.debug) + if self.verbose: + self.conn.set_trace_callback(self.logger.debug) self.conn.execute("PRAGMA foreign_keys = ON;") def clean(self) -> None: + """ + Cleans the database by running the VACUUM command, reporting on the freed space. + """ + # No need to clean in-memory database + if not self.db_path: + return with self.lock: try: - if self.db_path == sqlite_memory: - return initial_db_size = Path(self.db_path).stat().st_size self.conn.execute("VACUUM;") self.conn.commit() final_db_size = Path(self.db_path).stat().st_size freed_space_in_mb = round((initial_db_size - final_db_size) / 1024 / 1024, 2) if freed_space_in_mb > 0: - self._logger.info(f"Cleaned database (freed {freed_space_in_mb}MB)") + self.logger.info(f"Cleaned database (freed {freed_space_in_mb}MB)") except Exception as e: - self._logger.error(f"Error cleaning database: {e}") + self.logger.error(f"Error cleaning database: {e}") raise diff --git a/invokeai/app/services/shared/sqlite/sqlite_util.py b/invokeai/app/services/shared/sqlite/sqlite_util.py new file mode 100644 index 0000000000..83a42917a7 --- /dev/null +++ b/invokeai/app/services/shared/sqlite/sqlite_util.py @@ -0,0 +1,32 @@ +from logging import Logger + +from invokeai.app.services.config.config_default import InvokeAIAppConfig +from invokeai.app.services.image_files.image_files_base import ImageFileStorageBase +from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase +from invokeai.app.services.shared.sqlite_migrator.migrations.migration_1 import build_migration_1 +from invokeai.app.services.shared.sqlite_migrator.migrations.migration_2 import build_migration_2 +from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_impl import SqliteMigrator + + +def init_db(config: InvokeAIAppConfig, logger: Logger, image_files: ImageFileStorageBase) -> SqliteDatabase: + """ + Initializes the SQLite database. + + :param config: The app config + :param logger: The logger + :param image_files: The image files service (used by migration 2) + + This function: + - Instantiates a :class:`SqliteDatabase` + - Instantiates a :class:`SqliteMigrator` and registers all migrations + - Runs all migrations + """ + db_path = None if config.use_memory_db else config.db_path + db = SqliteDatabase(db_path=db_path, logger=logger, verbose=config.log_sql) + + migrator = SqliteMigrator(db=db) + migrator.register_migration(build_migration_1()) + migrator.register_migration(build_migration_2(image_files=image_files, logger=logger)) + migrator.run_migrations() + + return db diff --git a/invokeai/app/services/shared/sqlite_migrator/__init__.py b/invokeai/app/services/shared/sqlite_migrator/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/invokeai/app/services/shared/sqlite_migrator/migrations/__init__.py b/invokeai/app/services/shared/sqlite_migrator/migrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/invokeai/app/services/shared/sqlite_migrator/migrations/migration_1.py b/invokeai/app/services/shared/sqlite_migrator/migrations/migration_1.py new file mode 100644 index 0000000000..574afb472f --- /dev/null +++ b/invokeai/app/services/shared/sqlite_migrator/migrations/migration_1.py @@ -0,0 +1,372 @@ +import sqlite3 + +from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_common import Migration + + +class Migration1Callback: + def __call__(self, cursor: sqlite3.Cursor) -> None: + """Migration callback for database version 1.""" + + self._create_board_images(cursor) + self._create_boards(cursor) + self._create_images(cursor) + self._create_model_config(cursor) + self._create_session_queue(cursor) + self._create_workflow_images(cursor) + self._create_workflows(cursor) + + def _create_board_images(self, cursor: sqlite3.Cursor) -> None: + """Creates the `board_images` table, indices and triggers.""" + tables = [ + """--sql + CREATE TABLE IF NOT EXISTS board_images ( + board_id TEXT NOT NULL, + image_name TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- updated via trigger + updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- Soft delete, currently unused + deleted_at DATETIME, + -- enforce one-to-many relationship between boards and images using PK + -- (we can extend this to many-to-many later) + PRIMARY KEY (image_name), + FOREIGN KEY (board_id) REFERENCES boards (board_id) ON DELETE CASCADE, + FOREIGN KEY (image_name) REFERENCES images (image_name) ON DELETE CASCADE + ); + """ + ] + + indices = [ + "CREATE INDEX IF NOT EXISTS idx_board_images_board_id ON board_images (board_id);", + "CREATE INDEX IF NOT EXISTS idx_board_images_board_id_created_at ON board_images (board_id, created_at);", + ] + + triggers = [ + """--sql + CREATE TRIGGER IF NOT EXISTS tg_board_images_updated_at + AFTER UPDATE + ON board_images FOR EACH ROW + BEGIN + UPDATE board_images SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') + WHERE board_id = old.board_id AND image_name = old.image_name; + END; + """ + ] + + for stmt in tables + indices + triggers: + cursor.execute(stmt) + + def _create_boards(self, cursor: sqlite3.Cursor) -> None: + """Creates the `boards` table, indices and triggers.""" + tables = [ + """--sql + CREATE TABLE IF NOT EXISTS boards ( + board_id TEXT NOT NULL PRIMARY KEY, + board_name TEXT NOT NULL, + cover_image_name TEXT, + created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- Updated via trigger + updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- Soft delete, currently unused + deleted_at DATETIME, + FOREIGN KEY (cover_image_name) REFERENCES images (image_name) ON DELETE SET NULL + ); + """ + ] + + indices = ["CREATE INDEX IF NOT EXISTS idx_boards_created_at ON boards (created_at);"] + + triggers = [ + """--sql + CREATE TRIGGER IF NOT EXISTS tg_boards_updated_at + AFTER UPDATE + ON boards FOR EACH ROW + BEGIN + UPDATE boards SET updated_at = current_timestamp + WHERE board_id = old.board_id; + END; + """ + ] + + for stmt in tables + indices + triggers: + cursor.execute(stmt) + + def _create_images(self, cursor: sqlite3.Cursor) -> None: + """Creates the `images` table, indices and triggers. Adds the `starred` column.""" + + tables = [ + """--sql + CREATE TABLE IF NOT EXISTS images ( + image_name TEXT NOT NULL PRIMARY KEY, + -- This is an enum in python, unrestricted string here for flexibility + image_origin TEXT NOT NULL, + -- This is an enum in python, unrestricted string here for flexibility + image_category TEXT NOT NULL, + width INTEGER NOT NULL, + height INTEGER NOT NULL, + session_id TEXT, + node_id TEXT, + metadata TEXT, + is_intermediate BOOLEAN DEFAULT FALSE, + created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- Updated via trigger + updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- Soft delete, currently unused + deleted_at DATETIME + ); + """ + ] + + indices = [ + "CREATE UNIQUE INDEX IF NOT EXISTS idx_images_image_name ON images(image_name);", + "CREATE INDEX IF NOT EXISTS idx_images_image_origin ON images(image_origin);", + "CREATE INDEX IF NOT EXISTS idx_images_image_category ON images(image_category);", + "CREATE INDEX IF NOT EXISTS idx_images_created_at ON images(created_at);", + ] + + triggers = [ + """--sql + CREATE TRIGGER IF NOT EXISTS tg_images_updated_at + AFTER UPDATE + ON images FOR EACH ROW + BEGIN + UPDATE images SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') + WHERE image_name = old.image_name; + END; + """ + ] + + # Add the 'starred' column to `images` if it doesn't exist + cursor.execute("PRAGMA table_info(images)") + columns = [column[1] for column in cursor.fetchall()] + + if "starred" not in columns: + tables.append("ALTER TABLE images ADD COLUMN starred BOOLEAN DEFAULT FALSE;") + indices.append("CREATE INDEX IF NOT EXISTS idx_images_starred ON images(starred);") + + for stmt in tables + indices + triggers: + cursor.execute(stmt) + + def _create_model_config(self, cursor: sqlite3.Cursor) -> None: + """Creates the `model_config` table, `model_manager_metadata` table, indices and triggers.""" + + tables = [ + """--sql + CREATE TABLE IF NOT EXISTS model_config ( + id TEXT NOT NULL PRIMARY KEY, + -- The next 3 fields are enums in python, unrestricted string here + base TEXT NOT NULL, + type TEXT NOT NULL, + name TEXT NOT NULL, + path TEXT NOT NULL, + original_hash TEXT, -- could be null + -- Serialized JSON representation of the whole config object, + -- which will contain additional fields from subclasses + config TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- Updated via trigger + updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- unique constraint on combo of name, base and type + UNIQUE(name, base, type) + ); + """, + """--sql + CREATE TABLE IF NOT EXISTS model_manager_metadata ( + metadata_key TEXT NOT NULL PRIMARY KEY, + metadata_value TEXT NOT NULL + ); + """, + ] + + # Add trigger for `updated_at`. + triggers = [ + """--sql + CREATE TRIGGER IF NOT EXISTS model_config_updated_at + AFTER UPDATE + ON model_config FOR EACH ROW + BEGIN + UPDATE model_config SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') + WHERE id = old.id; + END; + """ + ] + + # Add indexes for searchable fields + indices = [ + "CREATE INDEX IF NOT EXISTS base_index ON model_config(base);", + "CREATE INDEX IF NOT EXISTS type_index ON model_config(type);", + "CREATE INDEX IF NOT EXISTS name_index ON model_config(name);", + "CREATE UNIQUE INDEX IF NOT EXISTS path_index ON model_config(path);", + ] + + for stmt in tables + indices + triggers: + cursor.execute(stmt) + + def _create_session_queue(self, cursor: sqlite3.Cursor) -> None: + tables = [ + """--sql + CREATE TABLE IF NOT EXISTS session_queue ( + item_id INTEGER PRIMARY KEY AUTOINCREMENT, -- used for ordering, cursor pagination + batch_id TEXT NOT NULL, -- identifier of the batch this queue item belongs to + queue_id TEXT NOT NULL, -- identifier of the queue this queue item belongs to + session_id TEXT NOT NULL UNIQUE, -- duplicated data from the session column, for ease of access + field_values TEXT, -- NULL if no values are associated with this queue item + session TEXT NOT NULL, -- the session to be executed + status TEXT NOT NULL DEFAULT 'pending', -- the status of the queue item, one of 'pending', 'in_progress', 'completed', 'failed', 'canceled' + priority INTEGER NOT NULL DEFAULT 0, -- the priority, higher is more important + error TEXT, -- any errors associated with this queue item + created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), -- updated via trigger + started_at DATETIME, -- updated via trigger + completed_at DATETIME -- updated via trigger, completed items are cleaned up on application startup + -- Ideally this is a FK, but graph_executions uses INSERT OR REPLACE, and REPLACE triggers the ON DELETE CASCADE... + -- FOREIGN KEY (session_id) REFERENCES graph_executions (id) ON DELETE CASCADE + ); + """ + ] + + indices = [ + "CREATE UNIQUE INDEX IF NOT EXISTS idx_session_queue_item_id ON session_queue(item_id);", + "CREATE UNIQUE INDEX IF NOT EXISTS idx_session_queue_session_id ON session_queue(session_id);", + "CREATE INDEX IF NOT EXISTS idx_session_queue_batch_id ON session_queue(batch_id);", + "CREATE INDEX IF NOT EXISTS idx_session_queue_created_priority ON session_queue(priority);", + "CREATE INDEX IF NOT EXISTS idx_session_queue_created_status ON session_queue(status);", + ] + + triggers = [ + """--sql + CREATE TRIGGER IF NOT EXISTS tg_session_queue_completed_at + AFTER UPDATE OF status ON session_queue + FOR EACH ROW + WHEN + NEW.status = 'completed' + OR NEW.status = 'failed' + OR NEW.status = 'canceled' + BEGIN + UPDATE session_queue + SET completed_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') + WHERE item_id = NEW.item_id; + END; + """, + """--sql + CREATE TRIGGER IF NOT EXISTS tg_session_queue_started_at + AFTER UPDATE OF status ON session_queue + FOR EACH ROW + WHEN + NEW.status = 'in_progress' + BEGIN + UPDATE session_queue + SET started_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') + WHERE item_id = NEW.item_id; + END; + """, + """--sql + CREATE TRIGGER IF NOT EXISTS tg_session_queue_updated_at + AFTER UPDATE + ON session_queue FOR EACH ROW + BEGIN + UPDATE session_queue + SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') + WHERE item_id = old.item_id; + END; + """, + ] + + for stmt in tables + indices + triggers: + cursor.execute(stmt) + + def _create_workflow_images(self, cursor: sqlite3.Cursor) -> None: + tables = [ + """--sql + CREATE TABLE IF NOT EXISTS workflow_images ( + workflow_id TEXT NOT NULL, + image_name TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- updated via trigger + updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- Soft delete, currently unused + deleted_at DATETIME, + -- enforce one-to-many relationship between workflows and images using PK + -- (we can extend this to many-to-many later) + PRIMARY KEY (image_name), + FOREIGN KEY (workflow_id) REFERENCES workflows (workflow_id) ON DELETE CASCADE, + FOREIGN KEY (image_name) REFERENCES images (image_name) ON DELETE CASCADE + ); + """ + ] + + indices = [ + "CREATE INDEX IF NOT EXISTS idx_workflow_images_workflow_id ON workflow_images (workflow_id);", + "CREATE INDEX IF NOT EXISTS idx_workflow_images_workflow_id_created_at ON workflow_images (workflow_id, created_at);", + ] + + triggers = [ + """--sql + CREATE TRIGGER IF NOT EXISTS tg_workflow_images_updated_at + AFTER UPDATE + ON workflow_images FOR EACH ROW + BEGIN + UPDATE workflow_images SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') + WHERE workflow_id = old.workflow_id AND image_name = old.image_name; + END; + """ + ] + + for stmt in tables + indices + triggers: + cursor.execute(stmt) + + def _create_workflows(self, cursor: sqlite3.Cursor) -> None: + tables = [ + """--sql + CREATE TABLE IF NOT EXISTS workflows ( + workflow TEXT NOT NULL, + workflow_id TEXT GENERATED ALWAYS AS (json_extract(workflow, '$.id')) VIRTUAL NOT NULL UNIQUE, -- gets implicit index + created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) -- updated via trigger + ); + """ + ] + + triggers = [ + """--sql + CREATE TRIGGER IF NOT EXISTS tg_workflows_updated_at + AFTER UPDATE + ON workflows FOR EACH ROW + BEGIN + UPDATE workflows + SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') + WHERE workflow_id = old.workflow_id; + END; + """ + ] + + for stmt in tables + triggers: + cursor.execute(stmt) + + +def build_migration_1() -> Migration: + """ + Builds the migration from database version 0 (init) to 1. + + This migration represents the state of the database circa InvokeAI v3.4.0, which was the last + version to not use migrations to manage the database. + + As such, this migration does include some ALTER statements, and the SQL statements are written + to be idempotent. + + - Create `board_images` junction table + - Create `boards` table + - Create `images` table, add `starred` column + - Create `model_config` table + - Create `session_queue` table + - Create `workflow_images` junction table + - Create `workflows` table + """ + + migration_1 = Migration( + from_version=0, + to_version=1, + callback=Migration1Callback(), + ) + + return migration_1 diff --git a/invokeai/app/services/shared/sqlite_migrator/migrations/migration_2.py b/invokeai/app/services/shared/sqlite_migrator/migrations/migration_2.py new file mode 100644 index 0000000000..9b9dedcc58 --- /dev/null +++ b/invokeai/app/services/shared/sqlite_migrator/migrations/migration_2.py @@ -0,0 +1,198 @@ +import sqlite3 +from logging import Logger + +from pydantic import ValidationError +from tqdm import tqdm + +from invokeai.app.services.image_files.image_files_base import ImageFileStorageBase +from invokeai.app.services.image_files.image_files_common import ImageFileNotFoundException +from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_common import Migration +from invokeai.app.services.workflow_records.workflow_records_common import ( + UnsafeWorkflowWithVersionValidator, +) + + +class Migration2Callback: + def __init__(self, image_files: ImageFileStorageBase, logger: Logger): + self._image_files = image_files + self._logger = logger + + def __call__(self, cursor: sqlite3.Cursor): + self._add_images_has_workflow(cursor) + self._add_session_queue_workflow(cursor) + self._drop_old_workflow_tables(cursor) + self._add_workflow_library(cursor) + self._drop_model_manager_metadata(cursor) + self._recreate_model_config(cursor) + self._migrate_embedded_workflows(cursor) + + def _add_images_has_workflow(self, cursor: sqlite3.Cursor) -> None: + """Add the `has_workflow` column to `images` table.""" + cursor.execute("PRAGMA table_info(images)") + columns = [column[1] for column in cursor.fetchall()] + + if "has_workflow" not in columns: + cursor.execute("ALTER TABLE images ADD COLUMN has_workflow BOOLEAN DEFAULT FALSE;") + + def _add_session_queue_workflow(self, cursor: sqlite3.Cursor) -> None: + """Add the `workflow` column to `session_queue` table.""" + + cursor.execute("PRAGMA table_info(session_queue)") + columns = [column[1] for column in cursor.fetchall()] + + if "workflow" not in columns: + cursor.execute("ALTER TABLE session_queue ADD COLUMN workflow TEXT;") + + def _drop_old_workflow_tables(self, cursor: sqlite3.Cursor) -> None: + """Drops the `workflows` and `workflow_images` tables.""" + cursor.execute("DROP TABLE IF EXISTS workflow_images;") + cursor.execute("DROP TABLE IF EXISTS workflows;") + + def _add_workflow_library(self, cursor: sqlite3.Cursor) -> None: + """Adds the `workflow_library` table and drops the `workflows` and `workflow_images` tables.""" + tables = [ + """--sql + CREATE TABLE IF NOT EXISTS workflow_library ( + workflow_id TEXT NOT NULL PRIMARY KEY, + workflow TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- updated via trigger + updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- updated manually when retrieving workflow + opened_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- Generated columns, needed for indexing and searching + category TEXT GENERATED ALWAYS as (json_extract(workflow, '$.meta.category')) VIRTUAL NOT NULL, + name TEXT GENERATED ALWAYS as (json_extract(workflow, '$.name')) VIRTUAL NOT NULL, + description TEXT GENERATED ALWAYS as (json_extract(workflow, '$.description')) VIRTUAL NOT NULL + ); + """, + ] + + indices = [ + "CREATE INDEX IF NOT EXISTS idx_workflow_library_created_at ON workflow_library(created_at);", + "CREATE INDEX IF NOT EXISTS idx_workflow_library_updated_at ON workflow_library(updated_at);", + "CREATE INDEX IF NOT EXISTS idx_workflow_library_opened_at ON workflow_library(opened_at);", + "CREATE INDEX IF NOT EXISTS idx_workflow_library_category ON workflow_library(category);", + "CREATE INDEX IF NOT EXISTS idx_workflow_library_name ON workflow_library(name);", + "CREATE INDEX IF NOT EXISTS idx_workflow_library_description ON workflow_library(description);", + ] + + triggers = [ + """--sql + CREATE TRIGGER IF NOT EXISTS tg_workflow_library_updated_at + AFTER UPDATE + ON workflow_library FOR EACH ROW + BEGIN + UPDATE workflow_library + SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') + WHERE workflow_id = old.workflow_id; + END; + """ + ] + + for stmt in tables + indices + triggers: + cursor.execute(stmt) + + def _drop_model_manager_metadata(self, cursor: sqlite3.Cursor) -> None: + """Drops the `model_manager_metadata` table.""" + cursor.execute("DROP TABLE IF EXISTS model_manager_metadata;") + + def _recreate_model_config(self, cursor: sqlite3.Cursor) -> None: + """ + Drops the `model_config` table, recreating it. + + In 3.4.0, this table used explicit columns but was changed to use json_extract 3.5.0. + + Because this table is not used in production, we are able to simply drop it and recreate it. + """ + + cursor.execute("DROP TABLE IF EXISTS model_config;") + + cursor.execute( + """--sql + CREATE TABLE IF NOT EXISTS model_config ( + id TEXT NOT NULL PRIMARY KEY, + -- The next 3 fields are enums in python, unrestricted string here + base TEXT GENERATED ALWAYS as (json_extract(config, '$.base')) VIRTUAL NOT NULL, + type TEXT GENERATED ALWAYS as (json_extract(config, '$.type')) VIRTUAL NOT NULL, + name TEXT GENERATED ALWAYS as (json_extract(config, '$.name')) VIRTUAL NOT NULL, + path TEXT GENERATED ALWAYS as (json_extract(config, '$.path')) VIRTUAL NOT NULL, + format TEXT GENERATED ALWAYS as (json_extract(config, '$.format')) VIRTUAL NOT NULL, + original_hash TEXT, -- could be null + -- Serialized JSON representation of the whole config object, + -- which will contain additional fields from subclasses + config TEXT NOT NULL, + created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- Updated via trigger + updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), + -- unique constraint on combo of name, base and type + UNIQUE(name, base, type) + ); + """ + ) + + def _migrate_embedded_workflows(self, cursor: sqlite3.Cursor) -> None: + """ + In the v3.5.0 release, InvokeAI changed how it handles embedded workflows. The `images` table in + the database now has a `has_workflow` column, indicating if an image has a workflow embedded. + + This migrate callback checks each image for the presence of an embedded workflow, then updates its entry + in the database accordingly. + """ + # Get all image names + cursor.execute("SELECT image_name FROM images") + image_names: list[str] = [image[0] for image in cursor.fetchall()] + total_image_names = len(image_names) + + if not total_image_names: + return + + self._logger.info(f"Migrating workflows for {total_image_names} images") + + # Migrate the images + to_migrate: list[tuple[bool, str]] = [] + pbar = tqdm(image_names) + for idx, image_name in enumerate(pbar): + pbar.set_description(f"Checking image {idx + 1}/{total_image_names} for workflow") + try: + pil_image = self._image_files.get(image_name) + except ImageFileNotFoundException: + self._logger.warning(f"Image {image_name} not found, skipping") + continue + if "invokeai_workflow" in pil_image.info: + try: + UnsafeWorkflowWithVersionValidator.validate_json(pil_image.info.get("invokeai_workflow", "")) + except ValidationError: + self._logger.warning(f"Image {image_name} has invalid embedded workflow, skipping") + continue + to_migrate.append((True, image_name)) + + self._logger.info(f"Adding {len(to_migrate)} embedded workflows to database") + cursor.executemany("UPDATE images SET has_workflow = ? WHERE image_name = ?", to_migrate) + + +def build_migration_2(image_files: ImageFileStorageBase, logger: Logger) -> Migration: + """ + Builds the migration from database version 1 to 2. + + Introduced in v3.5.0 for the new workflow library. + + :param image_files: The image files service, used to check for embedded workflows + :param logger: The logger, used to log progress during embedded workflows handling + + This migration does the following: + - Add `has_workflow` column to `images` table + - Add `workflow` column to `session_queue` table + - Drop `workflows` and `workflow_images` tables + - Add `workflow_library` table + - Drops the `model_manager_metadata` table + - Drops the `model_config` table, recreating it (at this point, there is no user data in this table) + - Populates the `has_workflow` column in the `images` table (requires `image_files` & `logger` dependencies) + """ + migration_2 = Migration( + from_version=1, + to_version=2, + callback=Migration2Callback(image_files=image_files, logger=logger), + ) + + return migration_2 diff --git a/invokeai/app/services/shared/sqlite_migrator/sqlite_migrator_common.py b/invokeai/app/services/shared/sqlite_migrator/sqlite_migrator_common.py new file mode 100644 index 0000000000..47ed5da505 --- /dev/null +++ b/invokeai/app/services/shared/sqlite_migrator/sqlite_migrator_common.py @@ -0,0 +1,164 @@ +import sqlite3 +from typing import Optional, Protocol, runtime_checkable + +from pydantic import BaseModel, ConfigDict, Field, model_validator + + +@runtime_checkable +class MigrateCallback(Protocol): + """ + A callback that performs a migration. + + Migrate callbacks are provided an open cursor to the database. They should not commit their + transaction; this is handled by the migrator. + + If the callback needs to access additional dependencies, will be provided to the callback at runtime. + + See :class:`Migration` for an example. + """ + + def __call__(self, cursor: sqlite3.Cursor) -> None: + ... + + +class MigrationError(RuntimeError): + """Raised when a migration fails.""" + + +class MigrationVersionError(ValueError): + """Raised when a migration version is invalid.""" + + +class Migration(BaseModel): + """ + Represents a migration for a SQLite database. + + :param from_version: The database version on which this migration may be run + :param to_version: The database version that results from this migration + :param migrate_callback: The callback to run to perform the migration + + Migration callbacks will be provided an open cursor to the database. They should not commit their + transaction; this is handled by the migrator. + + It is suggested to use a class to define the migration callback and a builder function to create + the :class:`Migration`. This allows the callback to be provided with additional dependencies and + keeps things tidy, as all migration logic is self-contained. + + Example: + ```py + # Define the migration callback class + class Migration1Callback: + # This migration needs a logger, so we define a class that accepts a logger in its constructor. + def __init__(self, image_files: ImageFileStorageBase) -> None: + self._image_files = ImageFileStorageBase + + # This dunder method allows the instance of the class to be called like a function. + def __call__(self, cursor: sqlite3.Cursor) -> None: + self._add_with_banana_column(cursor) + self._do_something_with_images(cursor) + + def _add_with_banana_column(self, cursor: sqlite3.Cursor) -> None: + \"""Adds the with_banana column to the sushi table.\""" + # Execute SQL using the cursor, taking care to *not commit* a transaction + cursor.execute('ALTER TABLE sushi ADD COLUMN with_banana BOOLEAN DEFAULT TRUE;') + + def _do_something_with_images(self, cursor: sqlite3.Cursor) -> None: + \"""Does something with the image files service.\""" + self._image_files.get(...) + + # Define the migration builder function. This function creates an instance of the migration callback + # class and returns a Migration. + def build_migration_1(image_files: ImageFileStorageBase) -> Migration: + \"""Builds the migration from database version 0 to 1. + Requires the image files service to... + \""" + + migration_1 = Migration( + from_version=0, + to_version=1, + migrate_callback=Migration1Callback(image_files=image_files), + ) + + return migration_1 + + # Register the migration after all dependencies have been initialized + db = SqliteDatabase(db_path, logger) + migrator = SqliteMigrator(db) + migrator.register_migration(build_migration_1(image_files)) + migrator.run_migrations() + ``` + """ + + from_version: int = Field(ge=0, strict=True, description="The database version on which this migration may be run") + to_version: int = Field(ge=1, strict=True, description="The database version that results from this migration") + callback: MigrateCallback = Field(description="The callback to run to perform the migration") + + @model_validator(mode="after") + def validate_to_version(self) -> "Migration": + """Validates that to_version is one greater than from_version.""" + if self.to_version != self.from_version + 1: + raise MigrationVersionError("to_version must be one greater than from_version") + return self + + def __hash__(self) -> int: + # Callables are not hashable, so we need to implement our own __hash__ function to use this class in a set. + return hash((self.from_version, self.to_version)) + + model_config = ConfigDict(arbitrary_types_allowed=True) + + +class MigrationSet: + """ + A set of Migrations. Performs validation during migration registration and provides utility methods. + + Migrations should be registered with `register()`. Once all are registered, `validate_migration_chain()` + should be called to ensure that the migrations form a single chain of migrations from version 0 to the latest version. + """ + + def __init__(self) -> None: + self._migrations: set[Migration] = set() + + def register(self, migration: Migration) -> None: + """Registers a migration.""" + migration_from_already_registered = any(m.from_version == migration.from_version for m in self._migrations) + migration_to_already_registered = any(m.to_version == migration.to_version for m in self._migrations) + if migration_from_already_registered or migration_to_already_registered: + raise MigrationVersionError("Migration with from_version or to_version already registered") + self._migrations.add(migration) + + def get(self, from_version: int) -> Optional[Migration]: + """Gets the migration that may be run on the given database version.""" + # register() ensures that there is only one migration with a given from_version, so this is safe. + return next((m for m in self._migrations if m.from_version == from_version), None) + + def validate_migration_chain(self) -> None: + """ + Validates that the migrations form a single chain of migrations from version 0 to the latest version, + Raises a MigrationError if there is a problem. + """ + if self.count == 0: + return + if self.latest_version == 0: + return + next_migration = self.get(from_version=0) + if next_migration is None: + raise MigrationError("Migration chain is fragmented") + touched_count = 1 + while next_migration is not None: + next_migration = self.get(next_migration.to_version) + if next_migration is not None: + touched_count += 1 + if touched_count != self.count: + raise MigrationError("Migration chain is fragmented") + + @property + def count(self) -> int: + """The count of registered migrations.""" + return len(self._migrations) + + @property + def latest_version(self) -> int: + """Gets latest to_version among registered migrations. Returns 0 if there are no migrations registered.""" + if self.count == 0: + return 0 + return sorted(self._migrations, key=lambda m: m.to_version)[-1].to_version diff --git a/invokeai/app/services/shared/sqlite_migrator/sqlite_migrator_impl.py b/invokeai/app/services/shared/sqlite_migrator/sqlite_migrator_impl.py new file mode 100644 index 0000000000..eef9c07ed4 --- /dev/null +++ b/invokeai/app/services/shared/sqlite_migrator/sqlite_migrator_impl.py @@ -0,0 +1,130 @@ +import sqlite3 +from pathlib import Path +from typing import Optional + +from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase +from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_common import Migration, MigrationError, MigrationSet + + +class SqliteMigrator: + """ + Manages migrations for a SQLite database. + + :param db: The instance of :class:`SqliteDatabase` to migrate. + + Migrations should be registered with :meth:`register_migration`. + + Each migration is run in a transaction. If a migration fails, the transaction is rolled back. + + Example Usage: + ```py + db = SqliteDatabase(db_path="my_db.db", logger=logger) + migrator = SqliteMigrator(db=db) + migrator.register_migration(build_migration_1()) + migrator.register_migration(build_migration_2()) + migrator.run_migrations() + ``` + """ + + backup_path: Optional[Path] = None + + def __init__(self, db: SqliteDatabase) -> None: + self._db = db + self._logger = db.logger + self._migration_set = MigrationSet() + + def register_migration(self, migration: Migration) -> None: + """Registers a migration.""" + self._migration_set.register(migration) + self._logger.debug(f"Registered migration {migration.from_version} -> {migration.to_version}") + + def run_migrations(self) -> bool: + """Migrates the database to the latest version.""" + with self._db.lock: + # This throws if there is a problem. + self._migration_set.validate_migration_chain() + cursor = self._db.conn.cursor() + self._create_migrations_table(cursor=cursor) + + if self._migration_set.count == 0: + self._logger.debug("No migrations registered") + return False + + if self._get_current_version(cursor=cursor) == self._migration_set.latest_version: + self._logger.debug("Database is up to date, no migrations to run") + return False + + self._logger.info("Database update needed") + next_migration = self._migration_set.get(from_version=self._get_current_version(cursor)) + while next_migration is not None: + self._run_migration(next_migration) + next_migration = self._migration_set.get(self._get_current_version(cursor)) + self._logger.info("Database updated successfully") + return True + + def _run_migration(self, migration: Migration) -> None: + """Runs a single migration.""" + try: + # Using sqlite3.Connection as a context manager commits a the transaction on exit, or rolls it back if an + # exception is raised. + with self._db.lock, self._db.conn as conn: + cursor = conn.cursor() + if self._get_current_version(cursor) != migration.from_version: + raise MigrationError( + f"Database is at version {self._get_current_version(cursor)}, expected {migration.from_version}" + ) + self._logger.debug(f"Running migration from {migration.from_version} to {migration.to_version}") + + # Run the actual migration + migration.callback(cursor) + + # Update the version + cursor.execute("INSERT INTO migrations (version) VALUES (?);", (migration.to_version,)) + + self._logger.debug( + f"Successfully migrated database from {migration.from_version} to {migration.to_version}" + ) + # We want to catch *any* error, mirroring the behaviour of the sqlite3 module. + except Exception as e: + # The connection context manager has already rolled back the migration, so we don't need to do anything. + msg = f"Error migrating database from {migration.from_version} to {migration.to_version}: {e}" + self._logger.error(msg) + raise MigrationError(msg) from e + + def _create_migrations_table(self, cursor: sqlite3.Cursor) -> None: + """Creates the migrations table for the database, if one does not already exist.""" + with self._db.lock: + try: + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='migrations';") + if cursor.fetchone() is not None: + return + cursor.execute( + """--sql + CREATE TABLE migrations ( + version INTEGER PRIMARY KEY, + migrated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) + ); + """ + ) + cursor.execute("INSERT INTO migrations (version) VALUES (0);") + cursor.connection.commit() + self._logger.debug("Created migrations table") + except sqlite3.Error as e: + msg = f"Problem creating migrations table: {e}" + self._logger.error(msg) + cursor.connection.rollback() + raise MigrationError(msg) from e + + @classmethod + def _get_current_version(cls, cursor: sqlite3.Cursor) -> int: + """Gets the current version of the database, or 0 if the migrations table does not exist.""" + try: + cursor.execute("SELECT MAX(version) FROM migrations;") + version: int = cursor.fetchone()[0] + if version is None: + return 0 + return version + except sqlite3.OperationalError as e: + if "no such table" in str(e): + return 0 + raise diff --git a/invokeai/app/services/workflow_records/workflow_records_common.py b/invokeai/app/services/workflow_records/workflow_records_common.py index ffc98bb786..e7f3d4295f 100644 --- a/invokeai/app/services/workflow_records/workflow_records_common.py +++ b/invokeai/app/services/workflow_records/workflow_records_common.py @@ -65,12 +65,24 @@ class WorkflowWithoutID(BaseModel): nodes: list[dict[str, JsonValue]] = Field(description="The nodes of the workflow.") edges: list[dict[str, JsonValue]] = Field(description="The edges of the workflow.") - model_config = ConfigDict(extra="forbid") + model_config = ConfigDict(extra="ignore") WorkflowWithoutIDValidator = TypeAdapter(WorkflowWithoutID) +class UnsafeWorkflowWithVersion(BaseModel): + """ + This utility model only requires a workflow to have a valid version string. + It is used to validate a workflow version without having to validate the entire workflow. + """ + + meta: WorkflowMeta = Field(description="The meta of the workflow.") + + +UnsafeWorkflowWithVersionValidator = TypeAdapter(UnsafeWorkflowWithVersion) + + class Workflow(WorkflowWithoutID): id: str = Field(description="The id of the workflow.") diff --git a/invokeai/app/services/workflow_records/workflow_records_sqlite.py b/invokeai/app/services/workflow_records/workflow_records_sqlite.py index ecbe7c0c9b..ef9e60fb9a 100644 --- a/invokeai/app/services/workflow_records/workflow_records_sqlite.py +++ b/invokeai/app/services/workflow_records/workflow_records_sqlite.py @@ -26,7 +26,6 @@ class SqliteWorkflowRecordsStorage(WorkflowRecordsStorageBase): self._lock = db.lock self._conn = db.conn self._cursor = self._conn.cursor() - self._create_tables() def start(self, invoker: Invoker) -> None: self._invoker = invoker @@ -233,87 +232,3 @@ class SqliteWorkflowRecordsStorage(WorkflowRecordsStorageBase): raise finally: self._lock.release() - - def _create_tables(self) -> None: - try: - self._lock.acquire() - self._cursor.execute( - """--sql - CREATE TABLE IF NOT EXISTS workflow_library ( - workflow_id TEXT NOT NULL PRIMARY KEY, - workflow TEXT NOT NULL, - created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- updated via trigger - updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- updated manually when retrieving workflow - opened_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- Generated columns, needed for indexing and searching - category TEXT GENERATED ALWAYS as (json_extract(workflow, '$.meta.category')) VIRTUAL NOT NULL, - name TEXT GENERATED ALWAYS as (json_extract(workflow, '$.name')) VIRTUAL NOT NULL, - description TEXT GENERATED ALWAYS as (json_extract(workflow, '$.description')) VIRTUAL NOT NULL - ); - """ - ) - - self._cursor.execute( - """--sql - CREATE TRIGGER IF NOT EXISTS tg_workflow_library_updated_at - AFTER UPDATE - ON workflow_library FOR EACH ROW - BEGIN - UPDATE workflow_library - SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW') - WHERE workflow_id = old.workflow_id; - END; - """ - ) - - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_workflow_library_created_at ON workflow_library(created_at); - """ - ) - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_workflow_library_updated_at ON workflow_library(updated_at); - """ - ) - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_workflow_library_opened_at ON workflow_library(opened_at); - """ - ) - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_workflow_library_category ON workflow_library(category); - """ - ) - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_workflow_library_name ON workflow_library(name); - """ - ) - self._cursor.execute( - """--sql - CREATE INDEX IF NOT EXISTS idx_workflow_library_description ON workflow_library(description); - """ - ) - - # We do not need the original `workflows` table or `workflow_images` junction table. - self._cursor.execute( - """--sql - DROP TABLE IF EXISTS workflow_images; - """ - ) - self._cursor.execute( - """--sql - DROP TABLE IF EXISTS workflows; - """ - ) - - self._conn.commit() - except Exception: - self._conn.rollback() - raise - finally: - self._lock.release() diff --git a/invokeai/backend/model_management/util.py b/invokeai/backend/model_management/util.py index 6d70107c93..f4737d9f0b 100644 --- a/invokeai/backend/model_management/util.py +++ b/invokeai/backend/model_management/util.py @@ -9,7 +9,7 @@ def lora_token_vector_length(checkpoint: dict) -> int: :param checkpoint: The checkpoint """ - def _get_shape_1(key, tensor, checkpoint): + def _get_shape_1(key: str, tensor, checkpoint) -> int: lora_token_vector_length = None if "." not in key: @@ -57,6 +57,10 @@ def lora_token_vector_length(checkpoint: dict) -> int: for key, tensor in checkpoint.items(): if key.startswith("lora_unet_") and ("_attn2_to_k." in key or "_attn2_to_v." in key): lora_token_vector_length = _get_shape_1(key, tensor, checkpoint) + elif key.startswith("lora_unet_") and ( + "time_emb_proj.lora_down" in key + ): # recognizes format at https://civitai.com/models/224641 + lora_token_vector_length = _get_shape_1(key, tensor, checkpoint) elif key.startswith("lora_te") and "_self_attn_" in key: tmp_length = _get_shape_1(key, tensor, checkpoint) if key.startswith("lora_te_"): diff --git a/invokeai/backend/model_manager/migrate_to_db.py b/invokeai/backend/model_manager/migrate_to_db.py index efc8e679af..e68a2eab36 100644 --- a/invokeai/backend/model_manager/migrate_to_db.py +++ b/invokeai/backend/model_manager/migrate_to_db.py @@ -49,7 +49,8 @@ class MigrateModelYamlToDb: def get_db(self) -> ModelRecordServiceSQL: """Fetch the sqlite3 database for this installation.""" - db = SqliteDatabase(self.config, self.logger) + db_path = None if self.config.use_memory_db else self.config.db_path + db = SqliteDatabase(db_path=db_path, logger=self.logger, verbose=self.config.log_sql) return ModelRecordServiceSQL(db) def get_yaml(self) -> DictConfig: diff --git a/invokeai/backend/model_manager/probe.py b/invokeai/backend/model_manager/probe.py index 3ba62fa6ff..64eefb774e 100644 --- a/invokeai/backend/model_manager/probe.py +++ b/invokeai/backend/model_manager/probe.py @@ -400,6 +400,8 @@ class LoRACheckpointProbe(CheckpointProbeBase): return BaseModelType.StableDiffusion1 elif token_vector_length == 1024: return BaseModelType.StableDiffusion2 + elif token_vector_length == 1280: + return BaseModelType.StableDiffusionXL # recognizes format at https://civitai.com/models/224641 elif token_vector_length == 2048: return BaseModelType.StableDiffusionXL else: diff --git a/invokeai/backend/stable_diffusion/diffusers_pipeline.py b/invokeai/backend/stable_diffusion/diffusers_pipeline.py index ae0cc17203..a17f0080b8 100644 --- a/invokeai/backend/stable_diffusion/diffusers_pipeline.py +++ b/invokeai/backend/stable_diffusion/diffusers_pipeline.py @@ -242,17 +242,6 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline): control_model: ControlNetModel = None, ): super().__init__( - vae, - text_encoder, - tokenizer, - unet, - scheduler, - safety_checker, - feature_extractor, - requires_safety_checker, - ) - - self.register_modules( vae=vae, text_encoder=text_encoder, tokenizer=tokenizer, @@ -260,9 +249,9 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline): scheduler=scheduler, safety_checker=safety_checker, feature_extractor=feature_extractor, - # FIXME: can't currently register control module - # control_model=control_model, + requires_safety_checker=requires_safety_checker, ) + self.invokeai_diffuser = InvokeAIDiffuserComponent(self.unet, self._unet_forward) self.control_model = control_model self.use_ip_adapter = False diff --git a/invokeai/frontend/web/public/locales/en.json b/invokeai/frontend/web/public/locales/en.json index ec257ee044..f5f3f434f5 100644 --- a/invokeai/frontend/web/public/locales/en.json +++ b/invokeai/frontend/web/public/locales/en.json @@ -950,9 +950,9 @@ "problemSettingTitle": "Problem Setting Title", "reloadNodeTemplates": "Reload Node Templates", "removeLinearView": "Remove from Linear View", - "resetWorkflow": "Reset Workflow Editor", - "resetWorkflowDesc": "Are you sure you want to reset the Workflow Editor?", - "resetWorkflowDesc2": "Resetting the Workflow Editor will clear all nodes, edges and workflow details. Saved workflows will not be affected.", + "newWorkflow": "New Workflow", + "newWorkflowDesc": "Create a new workflow?", + "newWorkflowDesc2": "Your current workflow has unsaved changes.", "scheduler": "Scheduler", "schedulerDescription": "TODO", "sDXLMainModelField": "SDXL Model", @@ -1634,10 +1634,10 @@ "userWorkflows": "My Workflows", "defaultWorkflows": "Default Workflows", "openWorkflow": "Open Workflow", - "uploadWorkflow": "Upload Workflow", + "uploadWorkflow": "Load from File", "deleteWorkflow": "Delete Workflow", "unnamedWorkflow": "Unnamed Workflow", - "downloadWorkflow": "Download Workflow", + "downloadWorkflow": "Save to File", "saveWorkflow": "Save Workflow", "saveWorkflowAs": "Save Workflow As", "savingWorkflow": "Saving Workflow...", @@ -1652,7 +1652,7 @@ "searchWorkflows": "Search Workflows", "clearWorkflowSearchFilter": "Clear Workflow Search Filter", "workflowName": "Workflow Name", - "workflowEditorReset": "Workflow Editor Reset", + "newWorkflowCreated": "New Workflow Created", "workflowEditorMenu": "Workflow Editor Menu", "workflowIsOpen": "Workflow is Open" }, diff --git a/invokeai/frontend/web/public/locales/es.json b/invokeai/frontend/web/public/locales/es.json index 20c15f2698..4ce0072517 100644 --- a/invokeai/frontend/web/public/locales/es.json +++ b/invokeai/frontend/web/public/locales/es.json @@ -727,9 +727,6 @@ "showMinimapnodes": "Mostrar el minimapa", "reloadNodeTemplates": "Recargar las plantillas de nodos", "loadWorkflow": "Cargar el flujo de trabajo", - "resetWorkflow": "Reiniciar e flujo de trabajo", - "resetWorkflowDesc": "¿Está seguro de que deseas restablecer este flujo de trabajo?", - "resetWorkflowDesc2": "Al reiniciar el flujo de trabajo se borrarán todos los nodos, aristas y detalles del flujo de trabajo.", "downloadWorkflow": "Descargar el flujo de trabajo en un archivo JSON" } } diff --git a/invokeai/frontend/web/public/locales/it.json b/invokeai/frontend/web/public/locales/it.json index 31eea70368..b816858bd8 100644 --- a/invokeai/frontend/web/public/locales/it.json +++ b/invokeai/frontend/web/public/locales/it.json @@ -104,7 +104,16 @@ "copyError": "$t(gallery.copy) Errore", "input": "Ingresso", "notInstalled": "Non $t(common.installed)", - "unknownError": "Errore sconosciuto" + "unknownError": "Errore sconosciuto", + "updated": "Aggiornato", + "save": "Salva", + "created": "Creato", + "prevPage": "Pagina precedente", + "delete": "Elimina", + "orderBy": "Ordinato per", + "nextPage": "Pagina successiva", + "saveAs": "Salva come", + "unsaved": "Non salvato" }, "gallery": { "generations": "Generazioni", @@ -763,7 +772,10 @@ "setIPAdapterImage": "Imposta come immagine per l'Adattatore IP", "problemSavingMaskDesc": "Impossibile salvare la maschera", "setAsCanvasInitialImage": "Imposta come immagine iniziale della tela", - "invalidUpload": "Caricamento non valido" + "invalidUpload": "Caricamento non valido", + "problemDeletingWorkflow": "Problema durante l'eliminazione del flusso di lavoro", + "workflowDeleted": "Flusso di lavoro eliminato", + "problemRetrievingWorkflow": "Problema nel recupero del flusso di lavoro" }, "tooltip": { "feature": { @@ -886,11 +898,8 @@ "zoomInNodes": "Ingrandire", "fitViewportNodes": "Adatta vista", "showGraphNodes": "Mostra sovrapposizione grafico", - "resetWorkflowDesc2": "Reimpostare il flusso di lavoro cancellerà tutti i nodi, i bordi e i dettagli del flusso di lavoro.", "reloadNodeTemplates": "Ricarica i modelli di nodo", "loadWorkflow": "Importa flusso di lavoro JSON", - "resetWorkflow": "Reimposta flusso di lavoro", - "resetWorkflowDesc": "Sei sicuro di voler reimpostare questo flusso di lavoro?", "downloadWorkflow": "Esporta flusso di lavoro JSON", "scheduler": "Campionatore", "addNode": "Aggiungi nodo", @@ -1080,25 +1089,27 @@ "collectionOrScalarFieldType": "{{name}} Raccolta|Scalare", "nodeVersion": "Versione Nodo", "inputFieldTypeParseError": "Impossibile analizzare il tipo di campo di input {{node}}.{{field}} ({{message}})", - "unsupportedArrayItemType": "tipo di elemento dell'array non supportato \"{{type}}\"", + "unsupportedArrayItemType": "Tipo di elemento dell'array non supportato \"{{type}}\"", "targetNodeFieldDoesNotExist": "Connessione non valida: il campo di destinazione/input {{node}}.{{field}} non esiste", "unsupportedMismatchedUnion": "tipo CollectionOrScalar non corrispondente con tipi di base {{firstType}} e {{secondType}}", "allNodesUpdated": "Tutti i nodi sono aggiornati", "sourceNodeDoesNotExist": "Connessione non valida: il nodo di origine/output {{node}} non esiste", - "unableToExtractEnumOptions": "impossibile estrarre le opzioni enum", - "unableToParseFieldType": "impossibile analizzare il tipo di campo", + "unableToExtractEnumOptions": "Impossibile estrarre le opzioni enum", + "unableToParseFieldType": "Impossibile analizzare il tipo di campo", "unrecognizedWorkflowVersion": "Versione dello schema del flusso di lavoro non riconosciuta {{version}}", "outputFieldTypeParseError": "Impossibile analizzare il tipo di campo di output {{node}}.{{field}} ({{message}})", "sourceNodeFieldDoesNotExist": "Connessione non valida: il campo di origine/output {{node}}.{{field}} non esiste", "unableToGetWorkflowVersion": "Impossibile ottenere la versione dello schema del flusso di lavoro", "nodePack": "Pacchetto di nodi", - "unableToExtractSchemaNameFromRef": "impossibile estrarre il nome dello schema dal riferimento", + "unableToExtractSchemaNameFromRef": "Impossibile estrarre il nome dello schema dal riferimento", "unknownOutput": "Output sconosciuto: {{name}}", "unknownNodeType": "Tipo di nodo sconosciuto", "targetNodeDoesNotExist": "Connessione non valida: il nodo di destinazione/input {{node}} non esiste", "unknownFieldType": "$t(nodes.unknownField) tipo: {{type}}", "deletedInvalidEdge": "Eliminata connessione non valida {{source}} -> {{target}}", - "unknownInput": "Input sconosciuto: {{name}}" + "unknownInput": "Input sconosciuto: {{name}}", + "prototypeDesc": "Questa invocazione è un prototipo. Potrebbe subire modifiche sostanziali durante gli aggiornamenti dell'app e potrebbe essere rimossa in qualsiasi momento.", + "betaDesc": "Questa invocazione è in versione beta. Fino a quando non sarà stabile, potrebbe subire modifiche importanti durante gli aggiornamenti dell'app. Abbiamo intenzione di supportare questa invocazione a lungo termine." }, "boards": { "autoAddBoard": "Aggiungi automaticamente bacheca", @@ -1594,5 +1605,33 @@ "hrf": "Correzione Alta Risoluzione", "hrfStrength": "Forza della Correzione Alta Risoluzione", "strengthTooltip": "Valori più bassi comportano meno dettagli, il che può ridurre potenziali artefatti." + }, + "workflows": { + "saveWorkflowAs": "Salva flusso di lavoro come", + "workflowEditorMenu": "Menu dell'editor del flusso di lavoro", + "noSystemWorkflows": "Nessun flusso di lavoro del sistema", + "workflowName": "Nome del flusso di lavoro", + "noUserWorkflows": "Nessun flusso di lavoro utente", + "defaultWorkflows": "Flussi di lavoro predefiniti", + "saveWorkflow": "Salva flusso di lavoro", + "openWorkflow": "Apri flusso di lavoro", + "clearWorkflowSearchFilter": "Cancella il filtro di ricerca del flusso di lavoro", + "workflowLibrary": "Libreria", + "noRecentWorkflows": "Nessun flusso di lavoro recente", + "workflowSaved": "Flusso di lavoro salvato", + "workflowIsOpen": "Il flusso di lavoro è aperto", + "unnamedWorkflow": "Flusso di lavoro senza nome", + "savingWorkflow": "Salvataggio del flusso di lavoro...", + "problemLoading": "Problema durante il caricamento dei flussi di lavoro", + "loading": "Caricamento dei flussi di lavoro", + "searchWorkflows": "Cerca flussi di lavoro", + "problemSavingWorkflow": "Problema durante il salvataggio del flusso di lavoro", + "deleteWorkflow": "Elimina flusso di lavoro", + "workflows": "Flussi di lavoro", + "noDescription": "Nessuna descrizione", + "userWorkflows": "I miei flussi di lavoro" + }, + "app": { + "storeNotInitialized": "Il negozio non è inizializzato" } } diff --git a/invokeai/frontend/web/public/locales/nl.json b/invokeai/frontend/web/public/locales/nl.json index dfdfb54bd0..6be48de918 100644 --- a/invokeai/frontend/web/public/locales/nl.json +++ b/invokeai/frontend/web/public/locales/nl.json @@ -844,9 +844,6 @@ "hideLegendNodes": "Typelegende veld verbergen", "reloadNodeTemplates": "Herlaad knooppuntsjablonen", "loadWorkflow": "Laad werkstroom", - "resetWorkflow": "Herstel werkstroom", - "resetWorkflowDesc": "Weet je zeker dat je deze werkstroom wilt herstellen?", - "resetWorkflowDesc2": "Herstel van een werkstroom haalt alle knooppunten, randen en werkstroomdetails weg.", "downloadWorkflow": "Download JSON van werkstroom", "booleanPolymorphicDescription": "Een verzameling Booleanse waarden.", "scheduler": "Planner", diff --git a/invokeai/frontend/web/public/locales/ru.json b/invokeai/frontend/web/public/locales/ru.json index 22df944086..665a821eb1 100644 --- a/invokeai/frontend/web/public/locales/ru.json +++ b/invokeai/frontend/web/public/locales/ru.json @@ -51,23 +51,23 @@ "statusConvertingModel": "Конвертация модели", "cancel": "Отменить", "accept": "Принять", - "langUkranian": "Украинский", - "langEnglish": "Английский", + "langUkranian": "Украї́нська", + "langEnglish": "English", "postprocessing": "Постобработка", - "langArabic": "Арабский", - "langSpanish": "Испанский", - "langSimplifiedChinese": "Китайский (упрощенный)", - "langDutch": "Нидерландский", - "langFrench": "Французский", - "langGerman": "Немецкий", - "langHebrew": "Иврит", - "langItalian": "Итальянский", - "langJapanese": "Японский", - "langKorean": "Корейский", - "langPolish": "Польский", - "langPortuguese": "Португальский", + "langArabic": "العربية", + "langSpanish": "Español", + "langSimplifiedChinese": "简体中文", + "langDutch": "Nederlands", + "langFrench": "Français", + "langGerman": "German", + "langHebrew": "Hebrew", + "langItalian": "Italiano", + "langJapanese": "日本語", + "langKorean": "한국어", + "langPolish": "Polski", + "langPortuguese": "Português", "txt2img": "Текст в изображение (txt2img)", - "langBrPortuguese": "Португальский (Бразилия)", + "langBrPortuguese": "Português do Brasil", "linear": "Линейная обработка", "dontAskMeAgain": "Больше не спрашивать", "areYouSure": "Вы уверены?", @@ -82,7 +82,46 @@ "darkMode": "Темная тема", "nodeEditor": "Редактор Нодов (Узлов)", "controlNet": "Controlnet", - "advanced": "Расширенные" + "advanced": "Расширенные", + "t2iAdapter": "T2I Adapter", + "checkpoint": "Checkpoint", + "format": "Формат", + "unknown": "Неизвестно", + "folder": "Папка", + "inpaint": "Перерисовать", + "updated": "Обновлен", + "on": "На", + "save": "Сохранить", + "created": "Создано", + "error": "Ошибка", + "prevPage": "Предыдущая страница", + "simple": "Простой", + "ipAdapter": "IP Adapter", + "controlAdapter": "Адаптер контроля", + "installed": "Установлено", + "ai": "ИИ", + "auto": "Авто", + "file": "Файл", + "delete": "Удалить", + "template": "Шаблон", + "outputs": "результаты", + "unknownError": "Неизвестная ошибка", + "statusProcessing": "Обработка", + "imageFailedToLoad": "Невозможно загрузить изображение", + "direction": "Направление", + "data": "Данные", + "somethingWentWrong": "Что-то пошло не так", + "safetensors": "Safetensors", + "outpaint": "Расширить изображение", + "orderBy": "Сортировать по", + "copyError": "Ошибка $t(gallery.copy)", + "learnMore": "Узнать больше", + "nextPage": "Следущая страница", + "saveAs": "Сохранить как", + "unsaved": "несохраненный", + "input": "Вход", + "details": "Детали", + "notInstalled": "Нет $t(common.installed)" }, "gallery": { "generations": "Генерации", @@ -102,7 +141,27 @@ "deleteImageBin": "Удаленные изображения будут отправлены в корзину вашей операционной системы.", "deleteImage": "Удалить изображение", "assets": "Ресурсы", - "autoAssignBoardOnClick": "Авто-назначение доски по клику" + "autoAssignBoardOnClick": "Авто-назначение доски по клику", + "deleteSelection": "Удалить выделенное", + "featuresWillReset": "Если вы удалите это изображение, эти функции будут немедленно сброшены.", + "problemDeletingImagesDesc": "Не удалось удалить одно или несколько изображений", + "loading": "Загрузка", + "unableToLoad": "Невозможно загрузить галерею", + "preparingDownload": "Подготовка к скачиванию", + "preparingDownloadFailed": "Проблема с подготовкой к скачиванию", + "image": "изображение", + "drop": "перебросить", + "problemDeletingImages": "Проблема с удалением изображений", + "downloadSelection": "Скачать выделенное", + "currentlyInUse": "В настоящее время это изображение используется в следующих функциях:", + "unstarImage": "Удалить из избранного", + "dropOrUpload": "$t(gallery.drop) или загрузить", + "copy": "Копировать", + "download": "Скачать", + "noImageSelected": "Изображение не выбрано", + "setCurrentImage": "Установить как текущее изображение", + "starImage": "Добавить в избранное", + "dropToUpload": "$t(gallery.drop) чтоб загрузить" }, "hotkeys": { "keyboardShortcuts": "Горячие клавиши", @@ -334,7 +393,7 @@ "config": "Файл конфигурации", "configValidationMsg": "Путь до файла конфигурации.", "modelLocation": "Расположение модели", - "modelLocationValidationMsg": "Путь до файла с моделью.", + "modelLocationValidationMsg": "Укажите путь к локальной папке, в которой хранится ваша модель Diffusers", "vaeLocation": "Расположение VAE", "vaeLocationValidationMsg": "Путь до файла VAE.", "width": "Ширина", @@ -377,7 +436,7 @@ "convertToDiffusersHelpText3": "Ваш файл контрольной точки НА ДИСКЕ будет УДАЛЕН, если он находится в корневой папке InvokeAI. Если он находится в пользовательском расположении, то он НЕ будет удален.", "vaeRepoID": "ID репозитория VAE", "mergedModelName": "Название объединенной модели", - "checkpointModels": "Checkpoints", + "checkpointModels": "Модели Checkpoint", "allModels": "Все модели", "addDiffuserModel": "Добавить Diffusers", "repo_id": "ID репозитория", @@ -413,7 +472,7 @@ "none": "пусто", "addDifference": "Добавить разницу", "vaeRepoIDValidationMsg": "Онлайн репозиторий VAE", - "convertToDiffusersHelpText2": "Этот процесс заменит вашу запись в Model Manager на версию той же модели в Diffusers.", + "convertToDiffusersHelpText2": "Этот процесс заменит вашу запись в менеджере моделей на версию той же модели в Diffusers.", "custom": "Пользовательский", "modelTwo": "Модель 2", "mergedModelSaveLocation": "Путь сохранения", @@ -444,14 +503,27 @@ "modelUpdateFailed": "Не удалось обновить модель", "modelConversionFailed": "Не удалось сконвертировать модель", "modelsMergeFailed": "Не удалось выполнить слияние моделей", - "loraModels": "LoRAs", - "onnxModels": "Onnx", - "oliveModels": "Olives" + "loraModels": "Модели LoRA", + "onnxModels": "Модели Onnx", + "oliveModels": "Модели Olives", + "conversionNotSupported": "Преобразование не поддерживается", + "noModels": "Нет моделей", + "predictionType": "Тип прогноза (для моделей Stable Diffusion 2.x и периодических моделей Stable Diffusion 1.x)", + "quickAdd": "Быстрое добавление", + "simpleModelDesc": "Укажите путь к локальной модели Diffusers , локальной модели checkpoint / safetensors, идентификатор репозитория HuggingFace или URL-адрес модели контрольной checkpoint / diffusers.", + "advanced": "Продвинутый", + "useCustomConfig": "Использовать кастомный конфиг", + "checkpointOrSafetensors": "$t(common.checkpoint) / $t(common.safetensors)", + "closeAdvanced": "Скрыть расширенные", + "modelType": "Тип модели", + "customConfigFileLocation": "Расположение пользовательского файла конфигурации", + "vaePrecision": "Точность VAE", + "noModelSelected": "Модель не выбрана" }, "parameters": { "images": "Изображения", "steps": "Шаги", - "cfgScale": "Уровень CFG", + "cfgScale": "Точность следования запросу (CFG)", "width": "Ширина", "height": "Высота", "seed": "Сид", @@ -471,7 +543,7 @@ "upscaleImage": "Увеличить изображение", "scale": "Масштаб", "otherOptions": "Другие параметры", - "seamlessTiling": "Бесшовный узор", + "seamlessTiling": "Бесшовность", "hiresOptim": "Оптимизация High Res", "imageFit": "Уместить изображение", "codeformerFidelity": "Точность", @@ -504,7 +576,8 @@ "immediate": "Отменить немедленно", "schedule": "Отменить после текущей итерации", "isScheduled": "Отмена", - "setType": "Установить тип отмены" + "setType": "Установить тип отмены", + "cancel": "Отмена" }, "general": "Основное", "hiresStrength": "Сила High Res", @@ -516,8 +589,8 @@ "copyImage": "Скопировать изображение", "showPreview": "Показать предпросмотр", "noiseSettings": "Шум", - "seamlessXAxis": "Ось X", - "seamlessYAxis": "Ось Y", + "seamlessXAxis": "Горизонтальная", + "seamlessYAxis": "Вертикальная", "scheduler": "Планировщик", "boundingBoxWidth": "Ширина ограничивающей рамки", "boundingBoxHeight": "Высота ограничивающей рамки", @@ -534,7 +607,51 @@ "coherenceSteps": "Шагов", "coherencePassHeader": "Порог Coherence", "coherenceStrength": "Сила", - "compositingSettingsHeader": "Настройки компоновки" + "compositingSettingsHeader": "Настройки компоновки", + "invoke": { + "noNodesInGraph": "Нет узлов в графе", + "noModelSelected": "Модель не выбрана", + "noPrompts": "Подсказки не создаются", + "systemBusy": "Система занята", + "noInitialImageSelected": "Исходное изображение не выбрано", + "missingInputForField": "{{nodeLabel}} -> {{fieldLabel}} отсутствует ввод", + "noControlImageForControlAdapter": "Адаптер контроля #{{number}} не имеет изображения", + "noModelForControlAdapter": "Не выбрана модель адаптера контроля #{{number}}.", + "unableToInvoke": "Невозможно вызвать", + "incompatibleBaseModelForControlAdapter": "Модель контрольного адаптера №{{number}} недействительна для основной модели.", + "systemDisconnected": "Система отключена", + "missingNodeTemplate": "Отсутствует шаблон узла", + "readyToInvoke": "Готово к вызову", + "missingFieldTemplate": "Отсутствует шаблон поля", + "addingImagesTo": "Добавление изображений в" + }, + "seamlessX&Y": "Бесшовный X & Y", + "isAllowedToUpscale": { + "useX2Model": "Изображение слишком велико для увеличения с помощью модели x4. Используйте модель x2", + "tooLarge": "Изображение слишком велико для увеличения. Выберите изображение меньшего размера" + }, + "aspectRatioFree": "Свободное", + "maskEdge": "Край маски", + "cpuNoise": "CPU шум", + "cfgRescaleMultiplier": "Множитель масштабирования CFG", + "cfgRescale": "Изменение масштаба CFG", + "patchmatchDownScaleSize": "уменьшить", + "gpuNoise": "GPU шум", + "seamlessX": "Бесшовный X", + "useCpuNoise": "Использовать шум CPU", + "clipSkipWithLayerCount": "CLIP пропуск {{layerCount}}", + "seamlessY": "Бесшовный Y", + "manualSeed": "Указанный сид", + "imageActions": "Действия с изображениями", + "randomSeed": "Случайный", + "iterations": "Кол-во", + "iterationsWithCount_one": "{{count}} Интеграция", + "iterationsWithCount_few": "{{count}} Итерации", + "iterationsWithCount_many": "{{count}} Итераций", + "useSize": "Использовать размер", + "unmasked": "Без маски", + "enableNoiseSettings": "Включить настройки шума", + "coherenceMode": "Режим" }, "settings": { "models": "Модели", @@ -543,7 +660,7 @@ "confirmOnDelete": "Подтверждать удаление", "displayHelpIcons": "Показывать значки подсказок", "enableImageDebugging": "Включить отладку", - "resetWebUI": "Сброс настроек Web UI", + "resetWebUI": "Сброс настроек веб-интерфейса", "resetWebUIDesc1": "Сброс настроек веб-интерфейса удаляет только локальный кэш браузера с вашими изображениями и настройками. Он не удаляет изображения с диска.", "resetWebUIDesc2": "Если изображения не отображаются в галерее или не работает что-то еще, пожалуйста, попробуйте сбросить настройки, прежде чем сообщать о проблеме на GitHub.", "resetComplete": "Настройки веб-интерфейса были сброшены.", @@ -563,7 +680,23 @@ "beta": "Бета", "alternateCanvasLayout": "Альтернативный слой холста", "showAdvancedOptions": "Показать доп. параметры", - "autoChangeDimensions": "Обновить Ш/В на стандартные для модели при изменении" + "autoChangeDimensions": "Обновить Ш/В на стандартные для модели при изменении", + "clearIntermediates": "Очистить промежуточные", + "clearIntermediatesDesc3": "Изображения вашей галереи не будут удалены.", + "clearIntermediatesWithCount_one": "Очистить {{count}} промежуточное", + "clearIntermediatesWithCount_few": "Очистить {{count}} промежуточных", + "clearIntermediatesWithCount_many": "Очистить {{count}} промежуточных", + "enableNSFWChecker": "Включить NSFW проверку", + "clearIntermediatesDisabled": "Очередь должна быть пуста, чтобы очистить промежуточные продукты", + "clearIntermediatesDesc2": "Промежуточные изображения — это побочные продукты генерации, отличные от результирующих изображений в галерее. Очистка промежуточных файлов освободит место на диске.", + "enableInvisibleWatermark": "Включить невидимый водяной знак", + "enableInformationalPopovers": "Включить информационные всплывающие окна", + "intermediatesCleared_one": "Очищено {{count}} промежуточное", + "intermediatesCleared_few": "Очищено {{count}} промежуточных", + "intermediatesCleared_many": "Очищено {{count}} промежуточных", + "clearIntermediatesDesc1": "Очистка промежуточных элементов приведет к сбросу состояния Canvas и ControlNet.", + "intermediatesClearedFailed": "Проблема очистки промежуточных", + "reloadingIn": "Перезагрузка через" }, "toast": { "tempFoldersEmptied": "Временная папка очищена", @@ -611,7 +744,48 @@ "nodesNotValidJSON": "Недопустимый JSON", "nodesCorruptedGraph": "Не удается загрузить. Граф, похоже, поврежден.", "nodesSaved": "Узлы сохранены", - "nodesNotValidGraph": "Недопустимый граф узлов InvokeAI" + "nodesNotValidGraph": "Недопустимый граф узлов InvokeAI", + "baseModelChangedCleared_one": "Базовая модель изменила, очистила или отключила {{count}} несовместимую подмодель", + "baseModelChangedCleared_few": "Базовая модель изменила, очистила или отключила {{count}} несовместимые подмодели", + "baseModelChangedCleared_many": "Базовая модель изменила, очистила или отключила {{count}} несовместимых подмоделей", + "imageSavingFailed": "Не удалось сохранить изображение", + "canvasSentControlnetAssets": "Холст отправлен в ControlNet и ресурсы", + "problemCopyingCanvasDesc": "Невозможно экспортировать базовый слой", + "loadedWithWarnings": "Рабочий процесс загружен с предупреждениями", + "setInitialImage": "Установить как исходное изображение", + "canvasCopiedClipboard": "Холст скопирован в буфер обмена", + "setControlImage": "Установить как контрольное изображение", + "setNodeField": "Установить как поле узла", + "problemSavingMask": "Проблема с сохранением маски", + "problemSavingCanvasDesc": "Невозможно экспортировать базовый слой", + "invalidUpload": "Неверная загрузка", + "maskSavedAssets": "Маска сохранена в ресурсах", + "modelAddFailed": "Не удалось добавить модель", + "problemDownloadingCanvas": "Проблема с скачиванием холста", + "setAsCanvasInitialImage": "Установить в качестве исходного изображения холста", + "problemMergingCanvas": "Проблема с объединением холста", + "setCanvasInitialImage": "Установить исходное изображение холста", + "imageUploaded": "Изображение загружено", + "addedToBoard": "Добавлено на доску", + "workflowLoaded": "Рабочий процесс загружен", + "problemDeletingWorkflow": "Проблема с удалением рабочего процесса", + "modelAddedSimple": "Модель добавлена", + "problemImportingMaskDesc": "Невозможно экспортировать маску", + "problemCopyingCanvas": "Проблема с копированием холста", + "workflowDeleted": "Рабочий процесс удален", + "problemSavingCanvas": "Проблема с сохранением холста", + "canvasDownloaded": "Холст скачан", + "setIPAdapterImage": "Установить как образ IP-адаптера", + "problemMergingCanvasDesc": "Невозможно экспортировать базовый слой", + "problemDownloadingCanvasDesc": "Невозможно экспортировать базовый слой", + "problemSavingMaskDesc": "Невозможно экспортировать маску", + "problemRetrievingWorkflow": "Проблема с получением рабочего процесса", + "imageSaved": "Изображение сохранено", + "maskSentControlnetAssets": "Маска отправлена в ControlNet и ресурсы", + "canvasSavedGallery": "Холст сохранен в галерею", + "imageUploadFailed": "Не удалось загрузить изображение", + "modelAdded": "Добавлена модель: {{modelName}}", + "problemImportingMask": "Проблема с импортом маски" }, "tooltip": { "feature": { @@ -686,7 +860,10 @@ "betaDarkenOutside": "Затемнить снаружи", "betaLimitToBox": "Ограничить выделением", "betaPreserveMasked": "Сохранять маскируемую область", - "antialiasing": "Не удалось скопировать ссылку на изображение" + "antialiasing": "Не удалось скопировать ссылку на изображение", + "saveMask": "Сохранить $t(unifiedCanvas.mask)", + "showResultsOn": "Показывать результаты (вкл)", + "showResultsOff": "Показывать результаты (вЫкл)" }, "accessibility": { "modelSelect": "Выбор модели", @@ -708,7 +885,12 @@ "useThisParameter": "Использовать этот параметр", "copyMetadataJson": "Скопировать метаданные JSON", "exitViewer": "Закрыть просмотрщик", - "menu": "Меню" + "menu": "Меню", + "showGalleryPanel": "Показать панель галереи", + "mode": "Режим", + "loadMore": "Загрузить больше", + "resetUI": "$t(accessibility.reset) интерфейс", + "createIssue": "Сообщить о проблеме" }, "ui": { "showProgressImages": "Показывать промежуточный итог", @@ -727,11 +909,213 @@ "hideLegendNodes": "Скрыть тип поля", "showMinimapnodes": "Показать миникарту", "loadWorkflow": "Загрузить рабочий процесс", - "resetWorkflowDesc2": "Сброс рабочего процесса очистит все узлы, ребра и детали рабочего процесса.", - "resetWorkflow": "Сбросить рабочий процесс", - "resetWorkflowDesc": "Вы уверены, что хотите сбросить этот рабочий процесс?", "reloadNodeTemplates": "Перезагрузить шаблоны узлов", - "downloadWorkflow": "Скачать JSON рабочего процесса" + "downloadWorkflow": "Скачать JSON рабочего процесса", + "booleanPolymorphicDescription": "Коллекция логических значений.", + "addNode": "Добавить узел", + "addLinearView": "Добавить в линейный вид", + "animatedEdges": "Анимированные ребра", + "booleanCollectionDescription": "Коллекция логических значений.", + "boardField": "Доска", + "animatedEdgesHelp": "Анимация выбранных ребер и ребер, соединенных с выбранными узлами", + "booleanPolymorphic": "Логическое полиморфное", + "boolean": "Логические значения", + "booleanDescription": "Логические значения могут быть только true или false.", + "cannotConnectInputToInput": "Невозможно подключить вход к входу", + "boardFieldDescription": "Доска галереи", + "cannotConnectOutputToOutput": "Невозможно подключить выход к выходу", + "booleanCollection": "Логическая коллекция", + "addNodeToolTip": "Добавить узел (Shift+A, Пробел)", + "scheduler": "Планировщик", + "inputField": "Поле ввода", + "controlFieldDescription": "Информация об управлении, передаваемая между узлами.", + "skippingUnknownOutputType": "Пропуск неизвестного типа выходного поля", + "denoiseMaskFieldDescription": "Маска шумоподавления может передаваться между узлами", + "floatCollectionDescription": "Коллекция чисел Float.", + "missingTemplate": "Недопустимый узел: узел {{node}} типа {{type}} не имеет шаблона (не установлен?)", + "outputSchemaNotFound": "Схема вывода не найдена", + "ipAdapterPolymorphicDescription": "Коллекция IP-адаптеров.", + "workflowDescription": "Краткое описание", + "inputFieldTypeParseError": "Невозможно разобрать тип поля ввода {{node}}.{{field}} ({{message}})", + "colorFieldDescription": "Цвет RGBA.", + "mainModelField": "Модель", + "unhandledInputProperty": "Необработанное входное свойство", + "unsupportedAnyOfLength": "слишком много элементов объединения ({{count}})", + "versionUnknown": " Версия неизвестна", + "ipAdapterCollection": "Коллекция IP-адаптеров", + "conditioningCollection": "Коллекция условий", + "maybeIncompatible": "Может быть несовместимо с установленным", + "unsupportedArrayItemType": "неподдерживаемый тип элемента массива \"{{type}}\"", + "ipAdapterPolymorphic": "Полиморфный IP-адаптер", + "noNodeSelected": "Узел не выбран", + "unableToValidateWorkflow": "Невозможно проверить рабочий процесс", + "enum": "Перечисления", + "updateAllNodes": "Обновить узлы", + "integerPolymorphicDescription": "Коллекция целых чисел.", + "noOutputRecorded": "Выходы не зарегистрированы", + "updateApp": "Обновить приложение", + "conditioningCollectionDescription": "Условные обозначения могут передаваться между узлами.", + "colorPolymorphic": "Полиморфный цвет", + "colorCodeEdgesHelp": "Цветовая маркировка ребер в соответствии с их связанными полями", + "float": "Float", + "workflowContact": "Контакт", + "targetNodeFieldDoesNotExist": "Неверный край: целевое/вводное поле {{node}}.{{field}} не существует", + "skippingReservedFieldType": "Пропуск зарезервированного типа поля", + "unsupportedMismatchedUnion": "несовпадение типа CollectionOrScalar с базовыми типами {{firstType}} и {{secondType}}", + "sDXLMainModelFieldDescription": "Поле модели SDXL.", + "allNodesUpdated": "Все узлы обновлены", + "conditioningPolymorphic": "Полиморфные условия", + "integer": "Целое число", + "colorField": "Цвет", + "nodeTemplate": "Шаблон узла", + "problemReadingWorkflow": "Проблема с чтением рабочего процесса из изображения", + "sourceNode": "Исходный узел", + "nodeOpacity": "Непрозрачность узла", + "sourceNodeDoesNotExist": "Недопустимое ребро: исходный/выходной узел {{node}} не существует", + "pickOne": "Выбери один", + "integerDescription": "Целые числа — это числа без запятой или точки.", + "outputField": "Поле вывода", + "unableToLoadWorkflow": "Невозможно загрузить рабочий процесс", + "unableToExtractEnumOptions": "невозможно извлечь параметры перечисления", + "snapToGrid": "Привязка к сетке", + "stringPolymorphic": "Полиморфная строка", + "conditioningPolymorphicDescription": "Условие может быть передано между узлами.", + "noFieldsLinearview": "Нет полей, добавленных в линейный вид", + "skipped": "Пропущено", + "unableToParseFieldType": "невозможно проанализировать тип поля", + "imagePolymorphic": "Полиморфное изображение", + "nodeSearch": "Поиск узлов", + "updateNode": "Обновить узел", + "imagePolymorphicDescription": "Коллекция изображений.", + "floatPolymorphic": "Полиморфные Float", + "outputFieldInInput": "Поле вывода во входных данных", + "version": "Версия", + "doesNotExist": "не найдено", + "unrecognizedWorkflowVersion": "Неизвестная версия схемы рабочего процесса {{version}}", + "ipAdapterCollectionDescription": "Коллекция IP-адаптеров.", + "stringCollectionDescription": "Коллекция строк.", + "unableToParseNode": "Невозможно разобрать узел", + "controlCollection": "Контрольная коллекция", + "validateConnections": "Проверка соединений и графика", + "stringCollection": "Коллекция строк", + "inputMayOnlyHaveOneConnection": "Вход может иметь только одно соединение", + "notes": "Заметки", + "outputFieldTypeParseError": "Невозможно разобрать тип поля вывода {{node}}.{{field}} ({{message}})", + "uNetField": "UNet", + "nodeOutputs": "Выходы узла", + "currentImageDescription": "Отображает текущее изображение в редакторе узлов", + "validateConnectionsHelp": "Предотвратить создание недопустимых соединений и вызов недопустимых графиков", + "problemSettingTitle": "Проблема с настройкой названия", + "ipAdapter": "IP-адаптер", + "integerCollection": "Коллекция целых чисел", + "collectionItem": "Элемент коллекции", + "noConnectionInProgress": "Соединение не выполняется", + "vaeModelField": "VAE", + "controlCollectionDescription": "Информация об управлении, передаваемая между узлами.", + "skippedReservedInput": "Пропущено зарезервированное поле ввода", + "workflowVersion": "Версия", + "noConnectionData": "Нет данных о соединении", + "outputFields": "Поля вывода", + "fieldTypesMustMatch": "Типы полей должны совпадать", + "workflow": "Рабочий процесс", + "edge": "Край", + "inputNode": "Входной узел", + "enumDescription": "Перечисления - это значения, которые могут быть одним из нескольких вариантов.", + "unkownInvocation": "Неизвестный тип вызова", + "sourceNodeFieldDoesNotExist": "Неверный край: поле источника/вывода {{node}}.{{field}} не существует", + "imageField": "Изображение", + "skippedReservedOutput": "Пропущено зарезервированное поле вывода", + "cannotDuplicateConnection": "Невозможно создать дубликаты соединений", + "unknownTemplate": "Неизвестный шаблон", + "noWorkflow": "Нет рабочего процесса", + "removeLinearView": "Удалить из линейного вида", + "integerCollectionDescription": "Коллекция целых чисел.", + "colorPolymorphicDescription": "Коллекция цветов.", + "sDXLMainModelField": "Модель SDXL", + "workflowTags": "Теги", + "denoiseMaskField": "Маска шумоподавления", + "missingCanvaInitImage": "Отсутствует начальное изображение холста", + "conditioningFieldDescription": "Условие может быть передано между узлами.", + "clipFieldDescription": "Подмодели Tokenizer и text_encoder.", + "fullyContainNodesHelp": "Чтобы узлы были выбраны, они должны полностью находиться в поле выбора", + "unableToGetWorkflowVersion": "Не удалось получить версию схемы рабочего процесса", + "noImageFoundState": "Начальное изображение не найдено в состоянии", + "workflowValidation": "Ошибка проверки рабочего процесса", + "nodePack": "Пакет узлов", + "stringDescription": "Строки это просто текст.", + "nodeType": "Тип узла", + "noMatchingNodes": "Нет соответствующих узлов", + "fullyContainNodes": "Выбор узлов с полным содержанием", + "integerPolymorphic": "Целочисленные полиморфные", + "executionStateInProgress": "В процессе", + "unableToExtractSchemaNameFromRef": "невозможно извлечь имя схемы из ссылки", + "noFieldType": "Нет типа поля", + "colorCollection": "Коллекция цветов.", + "executionStateError": "Ошибка", + "noOutputSchemaName": "В объекте ref не найдено имя выходной схемы", + "ipAdapterModel": "Модель IP-адаптера", + "prototypeDesc": "Этот вызов является прототипом. Он может претерпевать изменения при обновлении приложения и может быть удален в любой момент.", + "unableToMigrateWorkflow": "Невозможно перенести рабочий процесс", + "skippingInputNoTemplate": "Пропуск поля ввода без шаблона", + "ipAdapterDescription": "Image Prompt Adapter (IP-адаптер).", + "missingCanvaInitMaskImages": "Отсутствуют начальные изображения холста и маски", + "problemReadingMetadata": "Проблема с чтением метаданных с изображения", + "unknownOutput": "Неизвестный вывод: {{name}}", + "stringPolymorphicDescription": "Коллекция строк.", + "oNNXModelField": "Модель ONNX", + "executionStateCompleted": "Выполнено", + "node": "Узел", + "skippingUnknownInputType": "Пропуск неизвестного типа поля", + "workflowAuthor": "Автор", + "currentImage": "Текущее изображение", + "controlField": "Контроль", + "workflowName": "Название", + "collection": "Коллекция", + "ipAdapterModelDescription": "Поле модели IP-адаптера", + "invalidOutputSchema": "Неверная схема вывода", + "unableToUpdateNode": "Невозможно обновить узел", + "floatDescription": "Float - это числа с запятой.", + "floatPolymorphicDescription": "Коллекция Float-ов.", + "vaeField": "VAE", + "conditioningField": "Обусловленность", + "unknownErrorValidatingWorkflow": "Неизвестная ошибка при проверке рабочего процесса", + "collectionFieldType": "Коллекция {{name}}", + "unhandledOutputProperty": "Необработанное выходное свойство", + "workflowNotes": "Примечания", + "string": "Строка", + "floatCollection": "Коллекция Float", + "unknownNodeType": "Неизвестный тип узла", + "unableToUpdateNodes_one": "Невозможно обновить {{count}} узел", + "unableToUpdateNodes_few": "Невозможно обновить {{count}} узла", + "unableToUpdateNodes_many": "Невозможно обновить {{count}} узлов", + "connectionWouldCreateCycle": "Соединение создаст цикл", + "cannotConnectToSelf": "Невозможно подключиться к самому себе", + "notesDescription": "Добавляйте заметки о своем рабочем процессе", + "unknownField": "Неизвестное поле", + "inputFields": "Поля ввода", + "colorCodeEdges": "Ребра с цветовой кодировкой", + "uNetFieldDescription": "Подмодель UNet.", + "unknownNode": "Неизвестный узел", + "targetNodeDoesNotExist": "Недопустимое ребро: целевой/входной узел {{node}} не существует", + "imageCollectionDescription": "Коллекция изображений.", + "mismatchedVersion": "Недопустимый узел: узел {{node}} типа {{type}} имеет несоответствующую версию (попробовать обновить?)", + "unknownFieldType": "$t(nodes.unknownField) тип: {{type}}", + "vaeFieldDescription": "Подмодель VAE.", + "imageFieldDescription": "Изображения могут передаваться между узлами.", + "outputNode": "Выходной узел", + "collectionOrScalarFieldType": "Коллекция | Скаляр {{name}}", + "betaDesc": "Этот вызов находится в бета-версии. Пока он не станет стабильным, в нем могут происходить изменения при обновлении приложений. Мы планируем поддерживать этот вызов в течение длительного времени.", + "nodeVersion": "Версия узла", + "loadingNodes": "Загрузка узлов...", + "snapToGridHelp": "Привязка узлов к сетке при перемещении", + "workflowSettings": "Настройки редактора рабочих процессов", + "sDXLRefinerModelField": "Модель перерисовщик", + "loRAModelField": "LoRA", + "deletedInvalidEdge": "Удалено недопустимое ребро {{source}} -> {{target}}", + "unableToParseEdge": "Невозможно разобрать край", + "unknownInput": "Неизвестный вход: {{name}}", + "oNNXModelFieldDescription": "Поле модели ONNX.", + "imageCollection": "Коллекция изображений" }, "controlnet": { "amult": "a_mult", @@ -755,7 +1139,65 @@ "autoConfigure": "Автонастройка процессора", "delete": "Удалить", "canny": "Canny", - "depthZoeDescription": "Генерация карты глубины с использованием Zoe" + "depthZoeDescription": "Генерация карты глубины с использованием Zoe", + "resize": "Изменить размер", + "showAdvanced": "Показать расширенные", + "addT2IAdapter": "Добавить $t(common.t2iAdapter)", + "importImageFromCanvas": "Импортировать изображение с холста", + "lineartDescription": "Конвертация изображения в контурный рисунок", + "normalBae": "Обычный BAE", + "importMaskFromCanvas": "Импортировать маску с холста", + "hideAdvanced": "Скрыть расширенные", + "controlNetEnabledT2IDisabled": "$t(common.controlNet) включен, $t(common.t2iAdapter)s отключен", + "ipAdapterModel": "Модель адаптера", + "resetControlImage": "Сбросить контрольное изображение", + "prompt": "Запрос", + "controlnet": "$t(controlnet.controlAdapter_one) №{{number}} $t(common.controlNet)", + "openPoseDescription": "Оценка позы человека с помощью Openpose", + "resizeMode": "Режим изменения размера", + "t2iEnabledControlNetDisabled": "$t(common.t2iAdapter) включен, $t(common.controlNet)s отключен", + "weight": "Вес", + "selectModel": "Выберите модель", + "w": "В", + "processor": "Процессор", + "addControlNet": "Добавить $t(common.controlNet)", + "none": "ничего", + "incompatibleBaseModel": "Несовместимая базовая модель:", + "controlNetT2IMutexDesc": "$t(common.controlNet) и $t(common.t2iAdapter) одновременно в настоящее время не поддерживаются.", + "ip_adapter": "$t(controlnet.controlAdapter_one) №{{number}} $t(common.ipAdapter)", + "pidiDescription": "PIDI-обработка изображений", + "mediapipeFace": "Лицо Mediapipe", + "fill": "Заполнить", + "addIPAdapter": "Добавить $t(common.ipAdapter)", + "lineart": "Контурный рисунок", + "colorMapDescription": "Создает карту цветов из изображения", + "lineartAnimeDescription": "Создание контурных рисунков в стиле аниме", + "t2i_adapter": "$t(controlnet.controlAdapter_one) №{{number}} $t(common.t2iAdapter)", + "minConfidence": "Минимальная уверенность", + "imageResolution": "Разрешение изображения", + "colorMap": "Цвет", + "lowThreshold": "Низкий порог", + "highThreshold": "Высокий порог", + "normalBaeDescription": "Обычная обработка BAE", + "noneDescription": "Обработка не применяется", + "saveControlImage": "Сохранить контрольное изображение", + "toggleControlNet": "Переключить эту ControlNet", + "controlAdapter_one": "Адаптер контроля", + "controlAdapter_few": "Адаптера контроля", + "controlAdapter_many": "Адаптеров контроля", + "safe": "Безопасный", + "colorMapTileSize": "Размер плитки", + "lineartAnime": "Контурный рисунок в стиле аниме", + "ipAdapterImageFallback": "Изображение IP Adapter не выбрано", + "mediapipeFaceDescription": "Обнаружение лиц с помощью Mediapipe", + "hedDescription": "Целостное обнаружение границ", + "setControlImageDimensions": "Установите размеры контрольного изображения на Ш/В", + "scribble": "каракули", + "resetIPAdapterImage": "Сбросить изображение IP Adapter", + "handAndFace": "Руки и Лицо", + "enableIPAdapter": "Включить IP Adapter", + "maxFaces": "Макс Лица", + "mlsdDescription": "Минималистичный детектор отрезков линии" }, "boards": { "autoAddBoard": "Авто добавление Доски", @@ -772,6 +1214,439 @@ "uncategorized": "Без категории", "changeBoard": "Изменить Доску", "loading": "Загрузка...", - "clearSearch": "Очистить поиск" + "clearSearch": "Очистить поиск", + "deleteBoardOnly": "Удалить только доску", + "movingImagesToBoard_one": "Перемещаем {{count}} изображение на доску:", + "movingImagesToBoard_few": "Перемещаем {{count}} изображения на доску:", + "movingImagesToBoard_many": "Перемещаем {{count}} изображений на доску:", + "downloadBoard": "Скачать доску", + "deleteBoard": "Удалить доску", + "deleteBoardAndImages": "Удалить доску и изображения", + "deletedBoardsCannotbeRestored": "Удаленные доски не подлежат восстановлению" + }, + "dynamicPrompts": { + "seedBehaviour": { + "perPromptDesc": "Используйте разные сиды для каждого изображения", + "perIterationLabel": "Сид на итерацию", + "perIterationDesc": "Используйте разные сиды для каждой итерации", + "perPromptLabel": "Сид для каждого изображения", + "label": "Поведение сида" + }, + "enableDynamicPrompts": "Динамические запросы", + "combinatorial": "Комбинаторная генерация", + "maxPrompts": "Максимум запросов", + "promptsPreview": "Предпросмотр запросов", + "promptsWithCount_one": "{{count}} Запрос", + "promptsWithCount_few": "{{count}} Запроса", + "promptsWithCount_many": "{{count}} Запросов", + "dynamicPrompts": "Динамические запросы" + }, + "popovers": { + "noiseUseCPU": { + "paragraphs": [ + "Определяет, генерируется ли шум на CPU или на GPU.", + "Если включен шум CPU, определенное начальное число будет создавать одно и то же изображение на любом компьютере.", + "Включение шума CPU не влияет на производительность." + ], + "heading": "Использовать шум CPU" + }, + "paramScheduler": { + "paragraphs": [ + "Планировщик определяет, как итеративно добавлять шум к изображению или как обновлять образец на основе выходных данных модели." + ], + "heading": "Планировщик" + }, + "scaleBeforeProcessing": { + "paragraphs": [ + "Масштабирует выбранную область до размера, наиболее подходящего для модели перед процессом создания изображения." + ], + "heading": "Масштабирование перед обработкой" + }, + "compositingMaskAdjustments": { + "heading": "Регулировка маски", + "paragraphs": [ + "Отрегулируйте маску." + ] + }, + "paramRatio": { + "heading": "Соотношение сторон", + "paragraphs": [ + "Соотношение сторон создаваемого изображения.", + "Размер изображения (в пикселях), эквивалентный 512x512, рекомендуется для моделей SD1.5, а размер, эквивалентный 1024x1024, рекомендуется для моделей SDXL." + ] + }, + "compositingCoherenceSteps": { + "heading": "Шаги", + "paragraphs": [ + null, + "То же, что и основной параметр «Шаги»." + ] + }, + "dynamicPrompts": { + "paragraphs": [ + "Динамические запросы превращают одно приглашение на множество.", + "Базовый синтакиси: \"a {red|green|blue} ball\". В итоге будет 3 запроса: \"a red ball\", \"a green ball\" и \"a blue ball\".", + "Вы можете использовать синтаксис столько раз, сколько захотите в одном запросе, но обязательно контролируйте количество генерируемых запросов с помощью параметра «Максимальное количество запросов»." + ], + "heading": "Динамические запросы" + }, + "paramVAE": { + "paragraphs": [ + "Модель, используемая для преобразования вывода AI в конечное изображение." + ], + "heading": "VAE" + }, + "compositingBlur": { + "heading": "Размытие", + "paragraphs": [ + "Радиус размытия маски." + ] + }, + "paramIterations": { + "paragraphs": [ + "Количество изображений, которые нужно сгенерировать.", + "Если динамические подсказки включены, каждое из подсказок будет генерироваться столько раз." + ], + "heading": "Итерации" + }, + "paramVAEPrecision": { + "heading": "Точность VAE", + "paragraphs": [ + "Точность, используемая во время кодирования и декодирования VAE. Точность FP16/половина более эффективна за счет незначительных изменений изображения." + ] + }, + "compositingCoherenceMode": { + "heading": "Режим" + }, + "paramSeed": { + "paragraphs": [ + "Управляет стартовым шумом, используемым для генерации.", + "Отключите «Случайное начальное число», чтобы получить идентичные результаты с теми же настройками генерации." + ], + "heading": "Сид" + }, + "controlNetResizeMode": { + "heading": "Режим изменения размера", + "paragraphs": [ + "Как изображение ControlNet будет соответствовать размеру выходного изображения." + ] + }, + "controlNetBeginEnd": { + "paragraphs": [ + "На каких этапах процесса шумоподавления будет применена ControlNet.", + "ControlNet, применяемые в начале процесса, направляют композицию, а ControlNet, применяемые в конце, направляют детали." + ], + "heading": "Процент начала/конца шага" + }, + "dynamicPromptsSeedBehaviour": { + "paragraphs": [ + "Управляет использованием сида при создании запросов.", + "Для каждой итерации будет использоваться уникальный сид. Используйте это, чтобы изучить варианты запросов для одного сида.", + "Например, если у вас 5 запросов, каждое изображение будет использовать один и то же сид.", + "для каждого изображения будет использоваться уникальный сид. Это обеспечивает большую вариативность." + ], + "heading": "Поведение сида" + }, + "clipSkip": { + "paragraphs": [ + "Выберите, сколько слоев модели CLIP нужно пропустить.", + "Некоторые модели работают лучше с определенными настройками пропуска CLIP.", + "Более высокое значение обычно приводит к менее детализированному изображению." + ], + "heading": "CLIP пропуск" + }, + "paramModel": { + "heading": "Модель", + "paragraphs": [ + "Модель, используемая для шагов шумоподавления.", + "Различные модели обычно обучаются, чтобы специализироваться на достижении определенных эстетических результатов и содержания." + ] + }, + "compositingCoherencePass": { + "heading": "Согласованность", + "paragraphs": [ + "Второй этап шумоподавления помогает исправить шов между изначальным изображением и перерисованной или расширенной частью." + ] + }, + "paramDenoisingStrength": { + "paragraphs": [ + "Количество шума, добавляемого к входному изображению.", + "0 приведет к идентичному изображению, а 1 - к совершенно новому." + ], + "heading": "Шумоподавление" + }, + "compositingStrength": { + "heading": "Сила", + "paragraphs": [ + null, + "То же, что параметр «Сила шумоподавления img2img»." + ] + }, + "paramNegativeConditioning": { + "paragraphs": [ + "Stable Diffusion пытается избежать указанных в отрицательном запросе концепций. Используйте это, чтобы исключить качества или объекты из вывода.", + "Поддерживает синтаксис Compel и встраивания." + ], + "heading": "Негативный запрос" + }, + "compositingBlurMethod": { + "heading": "Метод размытия", + "paragraphs": [ + "Метод размытия, примененный к замаскированной области." + ] + }, + "dynamicPromptsMaxPrompts": { + "heading": "Макс. запросы", + "paragraphs": [ + "Ограничивает количество запросов, которые могут быть созданы с помощью динамических запросов." + ] + }, + "paramCFGRescaleMultiplier": { + "heading": "Множитель масштабирования CFG", + "paragraphs": [ + "Множитель масштабирования CFG, используемый для моделей, обученных с использованием нулевого терминального SNR (ztsnr). Рекомендуемое значение 0,7." + ] + }, + "infillMethod": { + "paragraphs": [ + "Метод заполнения выбранной области." + ], + "heading": "Метод заполнения" + }, + "controlNetWeight": { + "heading": "Вес", + "paragraphs": [ + "Насколько сильно ControlNet повлияет на созданное изображение." + ] + }, + "controlNet": { + "heading": "ControlNet", + "paragraphs": [ + "Сети ControlNets обеспечивают руководство процессом генерации, помогая создавать изображения с контролируемой композицией, структурой или стилем, в зависимости от выбранной модели." + ] + }, + "paramCFGScale": { + "heading": "Шкала точности (CFG)", + "paragraphs": [ + "Контролирует, насколько ваш запрос влияет на процесс генерации." + ] + }, + "controlNetControlMode": { + "paragraphs": [ + "Придает больший вес либо запросу, либо ControlNet." + ], + "heading": "Режим управления" + }, + "paramSteps": { + "heading": "Шаги", + "paragraphs": [ + "Количество шагов, которые будут выполнены в ходе генерации.", + "Большее количество шагов обычно приводит к созданию более качественных изображений, но требует больше времени на создание." + ] + }, + "paramPositiveConditioning": { + "heading": "Запрос", + "paragraphs": [ + "Направляет процесс генерации. Вы можете использовать любые слова и фразы.", + "Большинство моделей Stable Diffusion работают только с запросом на английском языке, но бывают исключения." + ] + }, + "lora": { + "heading": "Вес LoRA", + "paragraphs": [ + "Более высокий вес LoRA приведет к большему влиянию на конечное изображение." + ] + } + }, + "metadata": { + "seamless": "Бесшовность", + "positivePrompt": "Запрос", + "negativePrompt": "Негативный запрос", + "generationMode": "Режим генерации", + "Threshold": "Шумовой порог", + "metadata": "Метаданные", + "strength": "Сила img2img", + "seed": "Сид", + "imageDetails": "Детали изображения", + "perlin": "Шум Перлига", + "model": "Модель", + "noImageDetails": "Детали изображения не найдены", + "hiresFix": "Оптимизация высокого разрешения", + "cfgScale": "Шкала точности", + "fit": "Соответствие изображения к изображению", + "initImage": "Исходное изображение", + "recallParameters": "Вызов параметров", + "height": "Высота", + "variations": "Пары сид-высота", + "noMetaData": "Метаданные не найдены", + "width": "Ширина", + "vae": "VAE", + "createdBy": "Сделано", + "workflow": "Рабочий процесс", + "steps": "Шаги", + "scheduler": "Планировщик", + "noRecallParameters": "Параметры для вызова не найдены", + "cfgRescaleMultiplier": "$t(parameters.cfgRescaleMultiplier)" + }, + "queue": { + "status": "Статус", + "pruneSucceeded": "Из очереди удалено {{item_count}} выполненных элементов", + "cancelTooltip": "Отменить текущий элемент", + "queueEmpty": "Очередь пуста", + "pauseSucceeded": "Рендеринг приостановлен", + "in_progress": "В процессе", + "queueFront": "Добавить в начало очереди", + "notReady": "Невозможно поставить в очередь", + "batchFailedToQueue": "Не удалось поставить пакет в очередь", + "completed": "Выполнено", + "queueBack": "Добавить в очередь", + "batchValues": "Пакетные значения", + "cancelFailed": "Проблема с отменой элемента", + "queueCountPrediction": "Добавить {{predicted}} в очередь", + "batchQueued": "Пакетная очередь", + "pauseFailed": "Проблема с приостановкой рендеринга", + "clearFailed": "Проблема с очисткой очереди", + "queuedCount": "{{pending}} Ожидание", + "front": "передний", + "clearSucceeded": "Очередь очищена", + "pause": "Пауза", + "pruneTooltip": "Удалить {{item_count}} выполненных задач", + "cancelSucceeded": "Элемент отменен", + "batchQueuedDesc_one": "Добавлен {{count}} сеанс в {{direction}} очереди", + "batchQueuedDesc_few": "Добавлено {{count}} сеанса в {{direction}} очереди", + "batchQueuedDesc_many": "Добавлено {{count}} сеансов в {{direction}} очереди", + "graphQueued": "График поставлен в очередь", + "queue": "Очередь", + "batch": "Пакет", + "clearQueueAlertDialog": "Очистка очереди немедленно отменяет все элементы обработки и полностью очищает очередь.", + "pending": "В ожидании", + "completedIn": "Завершено за", + "resumeFailed": "Проблема с возобновлением рендеринга", + "clear": "Очистить", + "prune": "Сократить", + "total": "Всего", + "canceled": "Отменено", + "pruneFailed": "Проблема с сокращением очереди", + "cancelBatchSucceeded": "Пакет отменен", + "clearTooltip": "Отменить все и очистить очередь", + "current": "Текущий", + "pauseTooltip": "Приостановить рендеринг", + "failed": "Неудачно", + "cancelItem": "Отменить элемент", + "next": "Следующий", + "cancelBatch": "Отменить пакет", + "back": "задний", + "batchFieldValues": "Пакетные значения полей", + "cancel": "Отмена", + "session": "Сессия", + "time": "Время", + "queueTotal": "{{total}} Всего", + "resumeSucceeded": "Рендеринг возобновлен", + "enqueueing": "Пакетная очередь", + "resumeTooltip": "Возобновить рендеринг", + "queueMaxExceeded": "Превышено максимальное значение {{max_queue_size}}, будет пропущен {{skip}}", + "resume": "Продолжить", + "cancelBatchFailed": "Проблема с отменой пакета", + "clearQueueAlertDialog2": "Вы уверены, что хотите очистить очередь?", + "item": "Элемент", + "graphFailedToQueue": "Не удалось поставить график в очередь" + }, + "sdxl": { + "refinerStart": "Запуск перерисовщика", + "selectAModel": "Выберите модель", + "scheduler": "Планировщик", + "cfgScale": "Шкала точности (CFG)", + "negStylePrompt": "Негативный запрос стиля", + "noModelsAvailable": "Нет доступных моделей", + "refiner": "Перерисовщик", + "negAestheticScore": "Отрицательная эстетическая оценка", + "useRefiner": "Использовать перерисовщик", + "denoisingStrength": "Шумоподавление", + "refinermodel": "Модель перерисовщик", + "posAestheticScore": "Положительная эстетическая оценка", + "concatPromptStyle": "Объединение запроса и стиля", + "loading": "Загрузка...", + "steps": "Шаги", + "posStylePrompt": "Запрос стиля" + }, + "invocationCache": { + "useCache": "Использовать кэш", + "disable": "Отключить", + "misses": "Промахи в кэше", + "enableFailed": "Проблема с включением кэша вызовов", + "invocationCache": "Кэш вызовов", + "clearSucceeded": "Кэш вызовов очищен", + "enableSucceeded": "Кэш вызовов включен", + "clearFailed": "Проблема с очисткой кэша вызовов", + "hits": "Попадания в кэш", + "disableSucceeded": "Кэш вызовов отключен", + "disableFailed": "Проблема с отключением кэша вызовов", + "enable": "Включить", + "clear": "Очистить", + "maxCacheSize": "Максимальный размер кэша", + "cacheSize": "Размер кэша" + }, + "workflows": { + "saveWorkflowAs": "Сохранить рабочий процесс как", + "workflowEditorMenu": "Меню редактора рабочего процесса", + "noSystemWorkflows": "Нет системных рабочих процессов", + "workflowName": "Имя рабочего процесса", + "noUserWorkflows": "Нет пользовательских рабочих процессов", + "defaultWorkflows": "Рабочие процессы по умолчанию", + "saveWorkflow": "Сохранить рабочий процесс", + "openWorkflow": "Открытый рабочий процесс", + "clearWorkflowSearchFilter": "Очистить фильтр поиска рабочих процессов", + "workflowLibrary": "Библиотека", + "downloadWorkflow": "Скачать рабочий процесс", + "noRecentWorkflows": "Нет недавних рабочих процессов", + "workflowSaved": "Рабочий процесс сохранен", + "workflowIsOpen": "Рабочий процесс открыт", + "unnamedWorkflow": "Безымянный рабочий процесс", + "savingWorkflow": "Сохранение рабочего процесса...", + "problemLoading": "Проблема с загрузкой рабочих процессов", + "loading": "Загрузка рабочих процессов", + "searchWorkflows": "Поиск рабочих процессов", + "problemSavingWorkflow": "Проблема с сохранением рабочего процесса", + "deleteWorkflow": "Удалить рабочий процесс", + "workflows": "Рабочие процессы", + "noDescription": "Без описания", + "uploadWorkflow": "Загрузить рабочий процесс", + "userWorkflows": "Мои рабочие процессы" + }, + "embedding": { + "noEmbeddingsLoaded": "встраивания не загружены", + "noMatchingEmbedding": "Нет подходящих встраиваний", + "addEmbedding": "Добавить встраивание", + "incompatibleModel": "Несовместимая базовая модель:" + }, + "hrf": { + "enableHrf": "Включить исправление высокого разрешения", + "upscaleMethod": "Метод увеличения", + "enableHrfTooltip": "Сгенерируйте с более низким начальным разрешением, увеличьте его до базового разрешения, а затем запустите Image-to-Image.", + "metadata": { + "strength": "Сила исправления высокого разрешения", + "enabled": "Исправление высокого разрешения включено", + "method": "Метод исправления высокого разрешения" + }, + "hrf": "Исправление высокого разрешения", + "hrfStrength": "Сила исправления высокого разрешения", + "strengthTooltip": "Более низкие значения приводят к меньшему количеству деталей, что может уменьшить потенциальные артефакты." + }, + "models": { + "noLoRAsLoaded": "LoRA не загружены", + "noMatchingModels": "Нет подходящих моделей", + "esrganModel": "Модель ESRGAN", + "loading": "загрузка", + "noMatchingLoRAs": "Нет подходящих LoRA", + "noLoRAsAvailable": "Нет доступных LoRA", + "noModelsAvailable": "Нет доступных моделей", + "addLora": "Добавить LoRA", + "selectModel": "Выберите модель", + "noRefinerModelsInstalled": "Модели SDXL Refiner не установлены", + "noLoRAsInstalled": "Нет установленных LoRA", + "selectLoRA": "Выберите LoRA" + }, + "app": { + "storeNotInitialized": "Магазин не инициализирован" } } diff --git a/invokeai/frontend/web/public/locales/zh_CN.json b/invokeai/frontend/web/public/locales/zh_CN.json index b1b6852f25..b0151b8e76 100644 --- a/invokeai/frontend/web/public/locales/zh_CN.json +++ b/invokeai/frontend/web/public/locales/zh_CN.json @@ -110,7 +110,17 @@ "copyError": "$t(gallery.copy) 错误", "input": "输入", "notInstalled": "非 $t(common.installed)", - "delete": "删除" + "delete": "删除", + "updated": "已上传", + "save": "保存", + "created": "已创建", + "prevPage": "上一页", + "unknownError": "未知错误", + "direction": "指向", + "orderBy": "排序方式:", + "nextPage": "下一页", + "saveAs": "保存为", + "unsaved": "未保存" }, "gallery": { "generations": "生成的图像", @@ -146,7 +156,11 @@ "image": "图像", "drop": "弃用", "dropOrUpload": "$t(gallery.drop) 或上传", - "dropToUpload": "$t(gallery.drop) 以上传" + "dropToUpload": "$t(gallery.drop) 以上传", + "problemDeletingImagesDesc": "有一张或多张图像无法被删除", + "problemDeletingImages": "删除图像时出现问题", + "unstarImage": "取消收藏图像", + "starImage": "收藏图像" }, "hotkeys": { "keyboardShortcuts": "键盘快捷键", @@ -724,7 +738,7 @@ "nodesUnrecognizedTypes": "无法加载。节点图有无法识别的节点类型", "nodesNotValidJSON": "无效的 JSON", "nodesNotValidGraph": "无效的 InvokeAi 节点图", - "nodesLoadedFailed": "节点图加载失败", + "nodesLoadedFailed": "节点加载失败", "modelAddedSimple": "已添加模型", "modelAdded": "已添加模型: {{modelName}}", "imageSavingFailed": "图像保存失败", @@ -760,7 +774,10 @@ "problemImportingMask": "导入遮罩时出现问题", "baseModelChangedCleared_other": "基础模型已更改, 已清除或禁用 {{count}} 个不兼容的子模型", "setAsCanvasInitialImage": "设为画布初始图像", - "invalidUpload": "无效的上传" + "invalidUpload": "无效的上传", + "problemDeletingWorkflow": "删除工作流时出现问题", + "workflowDeleted": "已删除工作流", + "problemRetrievingWorkflow": "检索工作流时发生问题" }, "unifiedCanvas": { "layer": "图层", @@ -875,11 +892,8 @@ }, "nodes": { "zoomInNodes": "放大", - "resetWorkflowDesc": "是否确定要清空节点图?", - "resetWorkflow": "清空节点图", - "loadWorkflow": "读取节点图", + "loadWorkflow": "加载工作流", "zoomOutNodes": "缩小", - "resetWorkflowDesc2": "重置节点图将清除所有节点、边际和节点图详情.", "reloadNodeTemplates": "重载节点模板", "hideGraphNodes": "隐藏节点图信息", "fitViewportNodes": "自适应视图", @@ -888,7 +902,7 @@ "showLegendNodes": "显示字段类型图例", "hideLegendNodes": "隐藏字段类型图例", "showGraphNodes": "显示节点图信息", - "downloadWorkflow": "下载节点图 JSON", + "downloadWorkflow": "下载工作流 JSON", "workflowDescription": "简述", "versionUnknown": " 未知版本", "noNodeSelected": "无选中的节点", @@ -1103,7 +1117,9 @@ "collectionOrScalarFieldType": "{{name}} 合集 | 标量", "nodeVersion": "节点版本", "deletedInvalidEdge": "已删除无效的边缘 {{source}} -> {{target}}", - "unknownInput": "未知输入:{{name}}" + "unknownInput": "未知输入:{{name}}", + "prototypeDesc": "此调用是一个原型 (prototype)。它可能会在本项目更新期间发生破坏性更改,并且随时可能被删除。", + "betaDesc": "此调用尚处于测试阶段。在稳定之前,它可能会在项目更新期间发生破坏性更改。本项目计划长期支持这种调用。" }, "controlnet": { "resize": "直接缩放", @@ -1607,5 +1623,35 @@ "hrf": "高分辨率修复", "hrfStrength": "高分辨率修复强度", "strengthTooltip": "值越低细节越少,但可以减少部分潜在的伪影。" + }, + "workflows": { + "saveWorkflowAs": "保存工作流为", + "workflowEditorMenu": "工作流编辑器菜单", + "noSystemWorkflows": "无系统工作流", + "workflowName": "工作流名称", + "noUserWorkflows": "无用户工作流", + "defaultWorkflows": "默认工作流", + "saveWorkflow": "保存工作流", + "openWorkflow": "打开工作流", + "clearWorkflowSearchFilter": "清除工作流检索过滤器", + "workflowLibrary": "工作流库", + "downloadWorkflow": "下载工作流", + "noRecentWorkflows": "无最近工作流", + "workflowSaved": "已保存工作流", + "workflowIsOpen": "工作流已打开", + "unnamedWorkflow": "未命名的工作流", + "savingWorkflow": "保存工作流中...", + "problemLoading": "加载工作流时出现问题", + "loading": "加载工作流中", + "searchWorkflows": "检索工作流", + "problemSavingWorkflow": "保存工作流时出现问题", + "deleteWorkflow": "删除工作流", + "workflows": "工作流", + "noDescription": "无描述", + "uploadWorkflow": "上传工作流", + "userWorkflows": "我的工作流" + }, + "app": { + "storeNotInitialized": "商店尚未初始化" } } diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasImageToImageGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasImageToImageGraph.ts index 978e6bfaaa..f8d29ffa1d 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasImageToImageGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasImageToImageGraph.ts @@ -144,6 +144,7 @@ export const buildCanvasImageToImageGraph = ( type: 'l2i', id: CANVAS_OUTPUT, is_intermediate, + use_cache: false, }, }, edges: [ @@ -255,6 +256,7 @@ export const buildCanvasImageToImageGraph = ( is_intermediate, width: width, height: height, + use_cache: false, }; graph.edges.push( @@ -295,6 +297,7 @@ export const buildCanvasImageToImageGraph = ( id: CANVAS_OUTPUT, is_intermediate, fp32, + use_cache: false, }; (graph.nodes[IMAGE_TO_LATENTS] as ImageToLatentsInvocation).image = diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasInpaintGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasInpaintGraph.ts index 6253ce1f18..a05c46ad09 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasInpaintGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasInpaintGraph.ts @@ -191,6 +191,7 @@ export const buildCanvasInpaintGraph = ( id: CANVAS_OUTPUT, is_intermediate, reference: canvasInitImage, + use_cache: false, }, }, edges: [ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasOutpaintGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasOutpaintGraph.ts index 11573c21c2..dcbf563cf9 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasOutpaintGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasOutpaintGraph.ts @@ -199,6 +199,7 @@ export const buildCanvasOutpaintGraph = ( type: 'color_correct', id: CANVAS_OUTPUT, is_intermediate, + use_cache: false, }, }, edges: [ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLImageToImageGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLImageToImageGraph.ts index 73c03ee98f..b023295646 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLImageToImageGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLImageToImageGraph.ts @@ -266,6 +266,7 @@ export const buildCanvasSDXLImageToImageGraph = ( is_intermediate, width: width, height: height, + use_cache: false, }; graph.edges.push( @@ -306,6 +307,7 @@ export const buildCanvasSDXLImageToImageGraph = ( id: CANVAS_OUTPUT, is_intermediate, fp32, + use_cache: false, }; (graph.nodes[IMAGE_TO_LATENTS] as ImageToLatentsInvocation).image = diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLInpaintGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLInpaintGraph.ts index de1dd0dfd2..10eecde0d9 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLInpaintGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLInpaintGraph.ts @@ -196,6 +196,7 @@ export const buildCanvasSDXLInpaintGraph = ( id: CANVAS_OUTPUT, is_intermediate, reference: canvasInitImage, + use_cache: false, }, }, edges: [ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLOutpaintGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLOutpaintGraph.ts index 2f8b4fd653..7c75bea2be 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLOutpaintGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLOutpaintGraph.ts @@ -204,6 +204,7 @@ export const buildCanvasSDXLOutpaintGraph = ( type: 'color_correct', id: CANVAS_OUTPUT, is_intermediate, + use_cache: false, }, }, edges: [ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLTextToImageGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLTextToImageGraph.ts index 3ddff5a9a1..e91d556476 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLTextToImageGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasSDXLTextToImageGraph.ts @@ -258,6 +258,7 @@ export const buildCanvasSDXLTextToImageGraph = ( is_intermediate, width: width, height: height, + use_cache: false, }; graph.edges.push( @@ -288,6 +289,7 @@ export const buildCanvasSDXLTextToImageGraph = ( id: CANVAS_OUTPUT, is_intermediate, fp32, + use_cache: false, }; graph.edges.push({ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasTextToImageGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasTextToImageGraph.ts index 95e5763449..933365ce23 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasTextToImageGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildCanvasTextToImageGraph.ts @@ -246,6 +246,7 @@ export const buildCanvasTextToImageGraph = ( is_intermediate, width: width, height: height, + use_cache: false, }; graph.edges.push( @@ -276,6 +277,7 @@ export const buildCanvasTextToImageGraph = ( id: CANVAS_OUTPUT, is_intermediate, fp32, + use_cache: false, }; graph.edges.push({ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearImageToImageGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearImageToImageGraph.ts index 454c9f31a0..6b7b8f05c6 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearImageToImageGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearImageToImageGraph.ts @@ -143,6 +143,7 @@ export const buildLinearImageToImageGraph = ( // }, fp32, is_intermediate, + use_cache: false, }, }, edges: [ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearSDXLImageToImageGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearSDXLImageToImageGraph.ts index 6b1a55f7d0..d589116c17 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearSDXLImageToImageGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearSDXLImageToImageGraph.ts @@ -154,6 +154,7 @@ export const buildLinearSDXLImageToImageGraph = ( // }, fp32, is_intermediate, + use_cache: false, }, }, edges: [ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearSDXLTextToImageGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearSDXLTextToImageGraph.ts index bf01facf64..238a9054b1 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearSDXLTextToImageGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearSDXLTextToImageGraph.ts @@ -127,6 +127,7 @@ export const buildLinearSDXLTextToImageGraph = ( id: LATENTS_TO_IMAGE, fp32, is_intermediate, + use_cache: false, }, }, edges: [ diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearTextToImageGraph.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearTextToImageGraph.ts index aea14c2fe4..487157c869 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearTextToImageGraph.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearTextToImageGraph.ts @@ -146,6 +146,7 @@ export const buildLinearTextToImageGraph = ( id: LATENTS_TO_IMAGE, fp32, is_intermediate, + use_cache: false, }, }, edges: [ diff --git a/invokeai/frontend/web/src/features/nodes/util/workflow/migrations.ts b/invokeai/frontend/web/src/features/nodes/util/workflow/migrations.ts index fddd4de495..a8212f06b6 100644 --- a/invokeai/frontend/web/src/features/nodes/util/workflow/migrations.ts +++ b/invokeai/frontend/web/src/features/nodes/util/workflow/migrations.ts @@ -64,7 +64,10 @@ const migrateV1toV2 = (workflowToMigrate: WorkflowV1): WorkflowV2 => { const nodePack = invocationTemplate ? invocationTemplate.nodePack : t('common.unknown'); + (node.data as unknown as InvocationNodeData).nodePack = nodePack; + // Fallback to 1.0.0 if not specified - this matches the behavior of the backend + node.data.version ||= '1.0.0'; } }); // Bump version diff --git a/invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/ResetWorkflowEditorMenuItem.tsx b/invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/NewWorkflowMenuItem.tsx similarity index 63% rename from invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/ResetWorkflowEditorMenuItem.tsx rename to invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/NewWorkflowMenuItem.tsx index 5d7e58e198..c4497c5ddb 100644 --- a/invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/ResetWorkflowEditorMenuItem.tsx +++ b/invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/NewWorkflowMenuItem.tsx @@ -11,44 +11,48 @@ import { Text, useDisclosure, } from '@chakra-ui/react'; -import { useAppDispatch } from 'app/store/storeHooks'; +import { useAppDispatch, useAppSelector } from 'app/store/storeHooks'; import { nodeEditorReset } from 'features/nodes/store/nodesSlice'; import { addToast } from 'features/system/store/systemSlice'; import { makeToast } from 'features/system/util/makeToast'; import { memo, useCallback, useRef } from 'react'; import { useTranslation } from 'react-i18next'; -import { FaTrash } from 'react-icons/fa'; +import { FaCircleNodes } from 'react-icons/fa6'; -const ResetWorkflowEditorMenuItem = () => { +const NewWorkflowMenuItem = () => { const { t } = useTranslation(); const dispatch = useAppDispatch(); const { isOpen, onOpen, onClose } = useDisclosure(); const cancelRef = useRef(null); + const isTouched = useAppSelector((state) => state.workflow.isTouched); - const handleConfirmClear = useCallback(() => { + const handleNewWorkflow = useCallback(() => { dispatch(nodeEditorReset()); dispatch( addToast( makeToast({ - title: t('workflows.workflowEditorReset'), + title: t('workflows.newWorkflowCreated'), status: 'success', }) ) ); onClose(); - }, [dispatch, t, onClose]); + }, [dispatch, onClose, t]); + + const onClick = useCallback(() => { + if (!isTouched) { + handleNewWorkflow(); + return; + } + onOpen(); + }, [handleNewWorkflow, isTouched, onOpen]); return ( <> - } - sx={{ color: 'error.600', _dark: { color: 'error.300' } }} - onClick={onOpen} - > - {t('nodes.resetWorkflow')} + } onClick={onClick}> + {t('nodes.newWorkflow')} { - {t('nodes.resetWorkflow')} + {t('nodes.newWorkflow')} - {t('nodes.resetWorkflowDesc')} - {t('nodes.resetWorkflowDesc2')} + {t('nodes.newWorkflowDesc')} + {t('nodes.newWorkflowDesc2')} @@ -75,7 +79,7 @@ const ResetWorkflowEditorMenuItem = () => { - @@ -85,4 +89,4 @@ const ResetWorkflowEditorMenuItem = () => { ); }; -export default memo(ResetWorkflowEditorMenuItem); +export default memo(NewWorkflowMenuItem); diff --git a/invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/WorkflowLibraryMenu.tsx b/invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/WorkflowLibraryMenu.tsx index a19b9cd6b5..c07bb8f5cd 100644 --- a/invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/WorkflowLibraryMenu.tsx +++ b/invokeai/frontend/web/src/features/workflowLibrary/components/WorkflowLibraryMenu/WorkflowLibraryMenu.tsx @@ -9,7 +9,7 @@ import IAIIconButton from 'common/components/IAIIconButton'; import { useGlobalMenuCloseTrigger } from 'common/hooks/useGlobalMenuCloseTrigger'; import { useFeatureStatus } from 'features/system/hooks/useFeatureStatus'; import DownloadWorkflowMenuItem from 'features/workflowLibrary/components/WorkflowLibraryMenu/DownloadWorkflowMenuItem'; -import ResetWorkflowEditorMenuItem from 'features/workflowLibrary/components/WorkflowLibraryMenu/ResetWorkflowEditorMenuItem'; +import NewWorkflowMenuItem from 'features/workflowLibrary/components/WorkflowLibraryMenu/NewWorkflowMenuItem'; import SaveWorkflowAsMenuItem from 'features/workflowLibrary/components/WorkflowLibraryMenu/SaveWorkflowAsMenuItem'; import SaveWorkflowMenuItem from 'features/workflowLibrary/components/WorkflowLibraryMenu/SaveWorkflowMenuItem'; import SettingsMenuItem from 'features/workflowLibrary/components/WorkflowLibraryMenu/SettingsMenuItem'; @@ -39,7 +39,7 @@ const WorkflowLibraryMenu = () => { {isWorkflowLibraryEnabled && } - + diff --git a/mkdocs.yml b/mkdocs.yml index d030e3d6fc..7c67a2777a 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -101,6 +101,8 @@ plugins: extra_javascript: - https://unpkg.com/tablesort@5.3.0/dist/tablesort.min.js - javascripts/tablesort.js + - https://widget.kapa.ai/kapa-widget.bundle.js + - javascript/init_kapa_widget.js extra: analytics: diff --git a/pyproject.toml b/pyproject.toml index 5a07946e05..0b8d258e7d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,7 +32,7 @@ classifiers = [ 'Topic :: Scientific/Engineering :: Image Processing', ] dependencies = [ - "accelerate~=0.24.0", + "accelerate~=0.25.0", "albumentations", "basicsr", "click", @@ -41,15 +41,15 @@ dependencies = [ "controlnet-aux>=0.0.6", "timm==0.6.13", # needed to override timm latest in controlnet_aux, see https://github.com/isl-org/ZoeDepth/issues/26 "datasets", - "diffusers[torch]~=0.23.0", + "diffusers[torch]~=0.24.0", "dnspython~=2.4.0", "dynamicprompts", "easing-functions", "einops", "facexlib", - "fastapi~=0.104.1", + "fastapi~=0.105.0", "fastapi-events~=0.9.1", - "huggingface-hub~=0.16.4", + "huggingface-hub~=0.19.4", "imohash", "invisible-watermark~=0.2.0", # needed to install SDXL base and refiner using their repo_ids "matplotlib", # needed for plotting of Penner easing functions @@ -80,11 +80,11 @@ dependencies = [ "semver~=3.0.1", "send2trash", "test-tube~=0.7.5", - "torch==2.1.0", - "torchvision==0.16.0", + "torch==2.1.1", + "torchvision==0.16.1", "torchmetrics~=0.11.0", "torchsde~=0.2.5", - "transformers~=4.35.0", + "transformers~=4.36.0", "uvicorn[standard]~=0.21.1", "windows-curses; sys_platform=='win32'", ] @@ -107,7 +107,7 @@ dependencies = [ "pytest-datadir", ] "xformers" = [ - "xformers==0.0.22post7; sys_platform!='darwin'", + "xformers==0.0.23; sys_platform!='darwin'", "triton; sys_platform=='linux'", ] "onnx" = ["onnxruntime"] diff --git a/tests/aa_nodes/test_graph_execution_state.py b/tests/aa_nodes/test_graph_execution_state.py index 5fd0b4d70d..04dc6af621 100644 --- a/tests/aa_nodes/test_graph_execution_state.py +++ b/tests/aa_nodes/test_graph_execution_state.py @@ -28,8 +28,8 @@ from invokeai.app.services.shared.graph import ( IterateInvocation, LibraryGraph, ) -from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase from invokeai.backend.util.logging import InvokeAILogger +from tests.fixtures.sqlite_database import create_mock_sqlite_database from .test_invoker import create_edge @@ -49,7 +49,8 @@ def simple_graph(): @pytest.fixture def mock_services() -> InvocationServices: configuration = InvokeAIAppConfig(use_memory_db=True, node_cache_size=0) - db = SqliteDatabase(configuration, InvokeAILogger.get_logger()) + logger = InvokeAILogger.get_logger() + db = create_mock_sqlite_database(configuration, logger) # NOTE: none of these are actually called by the test invocations graph_execution_manager = SqliteItemStorage[GraphExecutionState](db=db, table_name="graph_executions") return InvocationServices( diff --git a/tests/aa_nodes/test_invoker.py b/tests/aa_nodes/test_invoker.py index efa5f27a74..288beb6e0b 100644 --- a/tests/aa_nodes/test_invoker.py +++ b/tests/aa_nodes/test_invoker.py @@ -4,6 +4,7 @@ import pytest from invokeai.app.services.config.config_default import InvokeAIAppConfig from invokeai.backend.util.logging import InvokeAILogger +from tests.fixtures.sqlite_database import create_mock_sqlite_database # This import must happen before other invoke imports or test in other files(!!) break from .test_nodes import ( # isort: split @@ -24,7 +25,6 @@ from invokeai.app.services.invoker import Invoker from invokeai.app.services.item_storage.item_storage_sqlite import SqliteItemStorage from invokeai.app.services.session_queue.session_queue_common import DEFAULT_QUEUE_ID from invokeai.app.services.shared.graph import Graph, GraphExecutionState, GraphInvocation, LibraryGraph -from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase @pytest.fixture @@ -52,8 +52,9 @@ def graph_with_subgraph(): # the test invocations. @pytest.fixture def mock_services() -> InvocationServices: - db = SqliteDatabase(InvokeAIAppConfig(use_memory_db=True), InvokeAILogger.get_logger()) configuration = InvokeAIAppConfig(use_memory_db=True, node_cache_size=0) + logger = InvokeAILogger.get_logger() + db = create_mock_sqlite_database(configuration, logger) # NOTE: none of these are actually called by the test invocations graph_execution_manager = SqliteItemStorage[GraphExecutionState](db=db, table_name="graph_executions") diff --git a/tests/aa_nodes/test_sqlite.py b/tests/aa_nodes/test_sqlite.py index 439f6d0dd2..32e8c98ac1 100644 --- a/tests/aa_nodes/test_sqlite.py +++ b/tests/aa_nodes/test_sqlite.py @@ -15,8 +15,11 @@ class TestModel(BaseModel): @pytest.fixture def db() -> SqliteItemStorage[TestModel]: - sqlite_db = SqliteDatabase(InvokeAIAppConfig(use_memory_db=True), InvokeAILogger.get_logger()) - sqlite_item_storage = SqliteItemStorage[TestModel](db=sqlite_db, table_name="test", id_field="id") + config = InvokeAIAppConfig(use_memory_db=True) + logger = InvokeAILogger.get_logger() + db_path = None if config.use_memory_db else config.db_path + db = SqliteDatabase(db_path=db_path, logger=logger, verbose=config.log_sql) + sqlite_item_storage = SqliteItemStorage[TestModel](db=db, table_name="test", id_field="id") return sqlite_item_storage diff --git a/tests/app/services/model_install/test_model_install.py b/tests/app/services/model_install/test_model_install.py index eec6e033e5..310bcfa0c1 100644 --- a/tests/app/services/model_install/test_model_install.py +++ b/tests/app/services/model_install/test_model_install.py @@ -18,9 +18,9 @@ from invokeai.app.services.model_install import ( ModelInstallServiceBase, ) from invokeai.app.services.model_records import ModelRecordServiceBase, ModelRecordServiceSQL, UnknownModelException -from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase from invokeai.backend.model_manager.config import BaseModelType, ModelType from invokeai.backend.util.logging import InvokeAILogger +from tests.fixtures.sqlite_database import create_mock_sqlite_database @pytest.fixture @@ -37,9 +37,12 @@ def app_config(datadir: Path) -> InvokeAIAppConfig: @pytest.fixture -def store(app_config: InvokeAIAppConfig) -> ModelRecordServiceBase: - database = SqliteDatabase(app_config, InvokeAILogger.get_logger(config=app_config)) - store: ModelRecordServiceBase = ModelRecordServiceSQL(database) +def store( + app_config: InvokeAIAppConfig, +) -> ModelRecordServiceBase: + logger = InvokeAILogger.get_logger(config=app_config) + db = create_mock_sqlite_database(app_config, logger) + store: ModelRecordServiceBase = ModelRecordServiceSQL(db) return store diff --git a/tests/app/services/model_records/test_model_records_sql.py b/tests/app/services/model_records/test_model_records_sql.py index 5c8bbb4048..a13442dc26 100644 --- a/tests/app/services/model_records/test_model_records_sql.py +++ b/tests/app/services/model_records/test_model_records_sql.py @@ -3,6 +3,7 @@ Test the refactored model config classes. """ from hashlib import sha256 +from typing import Any import pytest @@ -13,7 +14,6 @@ from invokeai.app.services.model_records import ( ModelRecordServiceSQL, UnknownModelException, ) -from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase from invokeai.backend.model_manager.config import ( BaseModelType, MainCheckpointConfig, @@ -23,13 +23,16 @@ from invokeai.backend.model_manager.config import ( VaeDiffusersConfig, ) from invokeai.backend.util.logging import InvokeAILogger +from tests.fixtures.sqlite_database import create_mock_sqlite_database @pytest.fixture -def store(datadir) -> ModelRecordServiceBase: +def store( + datadir: Any, +) -> ModelRecordServiceBase: config = InvokeAIAppConfig(root=datadir) logger = InvokeAILogger.get_logger(config=config) - db = SqliteDatabase(config, logger) + db = create_mock_sqlite_database(config, logger) return ModelRecordServiceSQL(db) diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/fixtures/sqlite_database.py b/tests/fixtures/sqlite_database.py new file mode 100644 index 0000000000..7b2dd029a9 --- /dev/null +++ b/tests/fixtures/sqlite_database.py @@ -0,0 +1,13 @@ +from logging import Logger +from unittest import mock + +from invokeai.app.services.config.config_default import InvokeAIAppConfig +from invokeai.app.services.image_files.image_files_base import ImageFileStorageBase +from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase +from invokeai.app.services.shared.sqlite.sqlite_util import init_db + + +def create_mock_sqlite_database(config: InvokeAIAppConfig, logger: Logger) -> SqliteDatabase: + image_files = mock.Mock(spec=ImageFileStorageBase) + db = init_db(config=config, logger=logger, image_files=image_files) + return db diff --git a/tests/test_sqlite_migrator.py b/tests/test_sqlite_migrator.py new file mode 100644 index 0000000000..816b8b6a10 --- /dev/null +++ b/tests/test_sqlite_migrator.py @@ -0,0 +1,272 @@ +import sqlite3 +from contextlib import closing +from logging import Logger +from pathlib import Path +from tempfile import TemporaryDirectory + +import pytest +from pydantic import ValidationError + +from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase +from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_common import ( + MigrateCallback, + Migration, + MigrationError, + MigrationSet, + MigrationVersionError, +) +from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_impl import ( + SqliteMigrator, +) + + +@pytest.fixture +def logger() -> Logger: + return Logger("test_sqlite_migrator") + + +@pytest.fixture +def memory_db_conn() -> sqlite3.Connection: + return sqlite3.connect(":memory:") + + +@pytest.fixture +def memory_db_cursor(memory_db_conn: sqlite3.Connection) -> sqlite3.Cursor: + return memory_db_conn.cursor() + + +@pytest.fixture +def migrator(logger: Logger) -> SqliteMigrator: + db = SqliteDatabase(db_path=None, logger=logger, verbose=False) + return SqliteMigrator(db=db) + + +@pytest.fixture +def no_op_migrate_callback() -> MigrateCallback: + def no_op_migrate(cursor: sqlite3.Cursor, **kwargs) -> None: + pass + + return no_op_migrate + + +@pytest.fixture +def migration_no_op(no_op_migrate_callback: MigrateCallback) -> Migration: + return Migration(from_version=0, to_version=1, callback=no_op_migrate_callback) + + +@pytest.fixture +def migrate_callback_create_table_of_name() -> MigrateCallback: + def migrate(cursor: sqlite3.Cursor, **kwargs) -> None: + table_name = kwargs["table_name"] + cursor.execute(f"CREATE TABLE {table_name} (id INTEGER PRIMARY KEY);") + + return migrate + + +@pytest.fixture +def migrate_callback_create_test_table() -> MigrateCallback: + def migrate(cursor: sqlite3.Cursor, **kwargs) -> None: + cursor.execute("CREATE TABLE test (id INTEGER PRIMARY KEY);") + + return migrate + + +@pytest.fixture +def migration_create_test_table(migrate_callback_create_test_table: MigrateCallback) -> Migration: + return Migration(from_version=0, to_version=1, callback=migrate_callback_create_test_table) + + +@pytest.fixture +def failing_migration() -> Migration: + def failing_migration(cursor: sqlite3.Cursor, **kwargs) -> None: + raise Exception("Bad migration") + + return Migration(from_version=0, to_version=1, callback=failing_migration) + + +@pytest.fixture +def failing_migrate_callback() -> MigrateCallback: + def failing_migrate(cursor: sqlite3.Cursor, **kwargs) -> None: + raise Exception("Bad migration") + + return failing_migrate + + +def create_migrate(i: int) -> MigrateCallback: + def migrate(cursor: sqlite3.Cursor, **kwargs) -> None: + cursor.execute(f"CREATE TABLE test{i} (id INTEGER PRIMARY KEY);") + + return migrate + + +def test_migration_to_version_is_one_gt_from_version(no_op_migrate_callback: MigrateCallback) -> None: + with pytest.raises(ValidationError, match="to_version must be one greater than from_version"): + Migration(from_version=0, to_version=2, callback=no_op_migrate_callback) + # not raising is sufficient + Migration(from_version=1, to_version=2, callback=no_op_migrate_callback) + + +def test_migration_hash(no_op_migrate_callback: MigrateCallback) -> None: + migration = Migration(from_version=0, to_version=1, callback=no_op_migrate_callback) + assert hash(migration) == hash((0, 1)) + + +def test_migration_set_add_migration(migrator: SqliteMigrator, migration_no_op: Migration) -> None: + migration = migration_no_op + migrator._migration_set.register(migration) + assert migration in migrator._migration_set._migrations + + +def test_migration_set_may_not_register_dupes( + migrator: SqliteMigrator, no_op_migrate_callback: MigrateCallback +) -> None: + migrate_0_to_1_a = Migration(from_version=0, to_version=1, callback=no_op_migrate_callback) + migrate_0_to_1_b = Migration(from_version=0, to_version=1, callback=no_op_migrate_callback) + migrator._migration_set.register(migrate_0_to_1_a) + with pytest.raises(MigrationVersionError, match=r"Migration with from_version or to_version already registered"): + migrator._migration_set.register(migrate_0_to_1_b) + migrate_1_to_2_a = Migration(from_version=1, to_version=2, callback=no_op_migrate_callback) + migrate_1_to_2_b = Migration(from_version=1, to_version=2, callback=no_op_migrate_callback) + migrator._migration_set.register(migrate_1_to_2_a) + with pytest.raises(MigrationVersionError, match=r"Migration with from_version or to_version already registered"): + migrator._migration_set.register(migrate_1_to_2_b) + + +def test_migration_set_gets_migration(migration_no_op: Migration) -> None: + migration_set = MigrationSet() + migration_set.register(migration_no_op) + assert migration_set.get(0) == migration_no_op + assert migration_set.get(1) is None + + +def test_migration_set_validates_migration_chain(no_op_migrate_callback: MigrateCallback) -> None: + migration_set = MigrationSet() + migration_set.register(Migration(from_version=1, to_version=2, callback=no_op_migrate_callback)) + with pytest.raises(MigrationError, match="Migration chain is fragmented"): + # no migration from 0 to 1 + migration_set.validate_migration_chain() + migration_set.register(Migration(from_version=0, to_version=1, callback=no_op_migrate_callback)) + migration_set.validate_migration_chain() + migration_set.register(Migration(from_version=2, to_version=3, callback=no_op_migrate_callback)) + migration_set.validate_migration_chain() + migration_set.register(Migration(from_version=4, to_version=5, callback=no_op_migrate_callback)) + with pytest.raises(MigrationError, match="Migration chain is fragmented"): + # no migration from 3 to 4 + migration_set.validate_migration_chain() + + +def test_migration_set_counts_migrations(no_op_migrate_callback: MigrateCallback) -> None: + migration_set = MigrationSet() + assert migration_set.count == 0 + migration_set.register(Migration(from_version=0, to_version=1, callback=no_op_migrate_callback)) + assert migration_set.count == 1 + migration_set.register(Migration(from_version=1, to_version=2, callback=no_op_migrate_callback)) + assert migration_set.count == 2 + + +def test_migration_set_gets_latest_version(no_op_migrate_callback: MigrateCallback) -> None: + migration_set = MigrationSet() + assert migration_set.latest_version == 0 + migration_set.register(Migration(from_version=1, to_version=2, callback=no_op_migrate_callback)) + assert migration_set.latest_version == 2 + migration_set.register(Migration(from_version=0, to_version=1, callback=no_op_migrate_callback)) + assert migration_set.latest_version == 2 + + +def test_migration_runs(memory_db_cursor: sqlite3.Cursor, migrate_callback_create_test_table: MigrateCallback) -> None: + migration = Migration( + from_version=0, + to_version=1, + callback=migrate_callback_create_test_table, + ) + migration.callback(memory_db_cursor) + memory_db_cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='test';") + assert memory_db_cursor.fetchone() is not None + + +def test_migrator_registers_migration(migrator: SqliteMigrator, migration_no_op: Migration) -> None: + migration = migration_no_op + migrator.register_migration(migration) + assert migration in migrator._migration_set._migrations + + +def test_migrator_creates_migrations_table(migrator: SqliteMigrator) -> None: + cursor = migrator._db.conn.cursor() + migrator._create_migrations_table(cursor) + cursor.execute("SELECT * FROM sqlite_master WHERE type='table' AND name='migrations';") + assert cursor.fetchone() is not None + + +def test_migrator_migration_sets_version(migrator: SqliteMigrator, migration_no_op: Migration) -> None: + cursor = migrator._db.conn.cursor() + migrator._create_migrations_table(cursor) + migrator.register_migration(migration_no_op) + migrator.run_migrations() + cursor.execute("SELECT MAX(version) FROM migrations;") + assert cursor.fetchone()[0] == 1 + + +def test_migrator_gets_current_version(migrator: SqliteMigrator, migration_no_op: Migration) -> None: + cursor = migrator._db.conn.cursor() + assert migrator._get_current_version(cursor) == 0 + migrator._create_migrations_table(cursor) + assert migrator._get_current_version(cursor) == 0 + migrator.register_migration(migration_no_op) + migrator.run_migrations() + assert migrator._get_current_version(cursor) == 1 + + +def test_migrator_runs_single_migration(migrator: SqliteMigrator, migration_create_test_table: Migration) -> None: + cursor = migrator._db.conn.cursor() + migrator._create_migrations_table(cursor) + migrator._run_migration(migration_create_test_table) + assert migrator._get_current_version(cursor) == 1 + cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='test';") + assert cursor.fetchone() is not None + + +def test_migrator_runs_all_migrations_in_memory(migrator: SqliteMigrator) -> None: + cursor = migrator._db.conn.cursor() + migrations = [Migration(from_version=i, to_version=i + 1, callback=create_migrate(i)) for i in range(0, 3)] + for migration in migrations: + migrator.register_migration(migration) + migrator.run_migrations() + assert migrator._get_current_version(cursor) == 3 + + +def test_migrator_runs_all_migrations_file(logger: Logger) -> None: + with TemporaryDirectory() as tempdir: + original_db_path = Path(tempdir) / "invokeai.db" + db = SqliteDatabase(db_path=original_db_path, logger=logger, verbose=False) + migrator = SqliteMigrator(db=db) + migrations = [Migration(from_version=i, to_version=i + 1, callback=create_migrate(i)) for i in range(0, 3)] + for migration in migrations: + migrator.register_migration(migration) + migrator.run_migrations() + with closing(sqlite3.connect(original_db_path)) as original_db_conn: + original_db_cursor = original_db_conn.cursor() + assert SqliteMigrator._get_current_version(original_db_cursor) == 3 + # Must manually close else we get an error on Windows + db.conn.close() + + +def test_migrator_makes_no_changes_on_failed_migration( + migrator: SqliteMigrator, migration_no_op: Migration, failing_migrate_callback: MigrateCallback +) -> None: + cursor = migrator._db.conn.cursor() + migrator.register_migration(migration_no_op) + migrator.run_migrations() + assert migrator._get_current_version(cursor) == 1 + migrator.register_migration(Migration(from_version=1, to_version=2, callback=failing_migrate_callback)) + with pytest.raises(MigrationError, match="Bad migration"): + migrator.run_migrations() + assert migrator._get_current_version(cursor) == 1 + + +def test_idempotent_migrations(migrator: SqliteMigrator, migration_create_test_table: Migration) -> None: + cursor = migrator._db.conn.cursor() + migrator.register_migration(migration_create_test_table) + migrator.run_migrations() + # not throwing is sufficient + migrator.run_migrations() + assert migrator._get_current_version(cursor) == 1