Merge branch 'main' into feat/db/graceful-migrate-workflows

This commit is contained in:
psychedelicious 2023-12-20 21:54:21 +11:00 committed by GitHub
commit 21ed2d42cd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 194 additions and 60 deletions

View File

@ -59,14 +59,16 @@ RUN --mount=type=cache,target=/root/.cache/pip \
# #### Build the Web UI ------------------------------------
FROM node:18 AS web-builder
FROM node:18-slim AS web-builder
ENV PNPM_HOME="/pnpm"
ENV PATH="$PNPM_HOME:$PATH"
RUN corepack enable
WORKDIR /build
COPY invokeai/frontend/web/ ./
RUN --mount=type=cache,target=/usr/lib/node_modules \
npm install --include dev
RUN --mount=type=cache,target=/usr/lib/node_modules \
yarn vite build
RUN --mount=type=cache,target=/pnpm/store \
pnpm install --frozen-lockfile
RUN pnpm run build
#### Runtime stage ---------------------------------------

View File

@ -5,6 +5,7 @@ from invokeai.app.services.image_files.image_files_base import ImageFileStorageB
from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase
from invokeai.app.services.shared.sqlite_migrator.migrations.migration_1 import build_migration_1
from invokeai.app.services.shared.sqlite_migrator.migrations.migration_2 import build_migration_2
from invokeai.app.services.shared.sqlite_migrator.migrations.migration_3 import build_migration_3
from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_impl import SqliteMigrator
@ -27,6 +28,7 @@ def init_db(config: InvokeAIAppConfig, logger: Logger, image_files: ImageFileSto
migrator = SqliteMigrator(db=db)
migrator.register_migration(build_migration_1())
migrator.register_migration(build_migration_2(image_files=image_files, logger=logger))
migrator.register_migration(build_migration_3())
migrator.run_migrations()
return db

View File

@ -11,6 +11,8 @@ from invokeai.app.services.workflow_records.workflow_records_common import (
UnsafeWorkflowWithVersionValidator,
)
from .util.migrate_yaml_config_1 import MigrateModelYamlToDb1
class Migration2Callback:
def __init__(self, image_files: ImageFileStorageBase, logger: Logger):
@ -24,6 +26,7 @@ class Migration2Callback:
self._add_workflow_library(cursor)
self._drop_model_manager_metadata(cursor)
self._recreate_model_config(cursor)
self._migrate_model_config_records(cursor)
self._migrate_embedded_workflows(cursor)
def _add_images_has_workflow(self, cursor: sqlite3.Cursor) -> None:
@ -131,6 +134,11 @@ class Migration2Callback:
"""
)
def _migrate_model_config_records(self, cursor: sqlite3.Cursor) -> None:
"""After updating the model config table, we repopulate it."""
model_record_migrator = MigrateModelYamlToDb1(cursor)
model_record_migrator.migrate()
def _migrate_embedded_workflows(self, cursor: sqlite3.Cursor) -> None:
"""
In the v3.5.0 release, InvokeAI changed how it handles embedded workflows. The `images` table in

View File

@ -0,0 +1,75 @@
import sqlite3
from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_common import Migration
from .util.migrate_yaml_config_1 import MigrateModelYamlToDb1
class Migration3Callback:
def __init__(self) -> None:
pass
def __call__(self, cursor: sqlite3.Cursor) -> None:
self._drop_model_manager_metadata(cursor)
self._recreate_model_config(cursor)
self._migrate_model_config_records(cursor)
def _drop_model_manager_metadata(self, cursor: sqlite3.Cursor) -> None:
"""Drops the `model_manager_metadata` table."""
cursor.execute("DROP TABLE IF EXISTS model_manager_metadata;")
def _recreate_model_config(self, cursor: sqlite3.Cursor) -> None:
"""
Drops the `model_config` table, recreating it.
In 3.4.0, this table used explicit columns but was changed to use json_extract 3.5.0.
Because this table is not used in production, we are able to simply drop it and recreate it.
"""
cursor.execute("DROP TABLE IF EXISTS model_config;")
cursor.execute(
"""--sql
CREATE TABLE IF NOT EXISTS model_config (
id TEXT NOT NULL PRIMARY KEY,
-- The next 3 fields are enums in python, unrestricted string here
base TEXT GENERATED ALWAYS as (json_extract(config, '$.base')) VIRTUAL NOT NULL,
type TEXT GENERATED ALWAYS as (json_extract(config, '$.type')) VIRTUAL NOT NULL,
name TEXT GENERATED ALWAYS as (json_extract(config, '$.name')) VIRTUAL NOT NULL,
path TEXT GENERATED ALWAYS as (json_extract(config, '$.path')) VIRTUAL NOT NULL,
format TEXT GENERATED ALWAYS as (json_extract(config, '$.format')) VIRTUAL NOT NULL,
original_hash TEXT, -- could be null
-- Serialized JSON representation of the whole config object,
-- which will contain additional fields from subclasses
config TEXT NOT NULL,
created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
-- Updated via trigger
updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
-- unique constraint on combo of name, base and type
UNIQUE(name, base, type)
);
"""
)
def _migrate_model_config_records(self, cursor: sqlite3.Cursor) -> None:
"""After updating the model config table, we repopulate it."""
model_record_migrator = MigrateModelYamlToDb1(cursor)
model_record_migrator.migrate()
def build_migration_3() -> Migration:
"""
Build the migration from database version 2 to 3.
This migration does the following:
- Drops the `model_config` table, recreating it
- Migrates data from `models.yaml` into the `model_config` table
"""
migration_3 = Migration(
from_version=2,
to_version=3,
callback=Migration3Callback(),
)
return migration_3

View File

@ -1,8 +1,12 @@
# Copyright (c) 2023 Lincoln D. Stein
"""Migrate from the InvokeAI v2 models.yaml format to the v3 sqlite format."""
import json
import sqlite3
from hashlib import sha1
from logging import Logger
from pathlib import Path
from typing import Optional
from omegaconf import DictConfig, OmegaConf
from pydantic import TypeAdapter
@ -10,13 +14,12 @@ from pydantic import TypeAdapter
from invokeai.app.services.config import InvokeAIAppConfig
from invokeai.app.services.model_records import (
DuplicateModelException,
ModelRecordServiceSQL,
UnknownModelException,
)
from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase
from invokeai.backend.model_manager.config import (
AnyModelConfig,
BaseModelType,
ModelConfigFactory,
ModelType,
)
from invokeai.backend.model_manager.hash import FastModelHash
@ -25,9 +28,9 @@ from invokeai.backend.util.logging import InvokeAILogger
ModelsValidator = TypeAdapter(AnyModelConfig)
class MigrateModelYamlToDb:
class MigrateModelYamlToDb1:
"""
Migrate the InvokeAI models.yaml format (VERSION 3.0.0) to SQL3 database format (VERSION 3.2.0)
Migrate the InvokeAI models.yaml format (VERSION 3.0.0) to SQL3 database format (VERSION 3.5.0).
The class has one externally useful method, migrate(), which scans the
currently models.yaml file and imports all its entries into invokeai.db.
@ -41,17 +44,13 @@ class MigrateModelYamlToDb:
config: InvokeAIAppConfig
logger: Logger
cursor: sqlite3.Cursor
def __init__(self) -> None:
def __init__(self, cursor: sqlite3.Cursor = None) -> None:
self.config = InvokeAIAppConfig.get_config()
self.config.parse_args()
self.logger = InvokeAILogger.get_logger()
def get_db(self) -> ModelRecordServiceSQL:
"""Fetch the sqlite3 database for this installation."""
db_path = None if self.config.use_memory_db else self.config.db_path
db = SqliteDatabase(db_path=db_path, logger=self.logger, verbose=self.config.log_sql)
return ModelRecordServiceSQL(db)
self.cursor = cursor
def get_yaml(self) -> DictConfig:
"""Fetch the models.yaml DictConfig for this installation."""
@ -62,8 +61,10 @@ class MigrateModelYamlToDb:
def migrate(self) -> None:
"""Do the migration from models.yaml to invokeai.db."""
db = self.get_db()
try:
yaml = self.get_yaml()
except OSError:
return
for model_key, stanza in yaml.items():
if model_key == "__metadata__":
@ -86,22 +87,62 @@ class MigrateModelYamlToDb:
new_config: AnyModelConfig = ModelsValidator.validate_python(stanza) # type: ignore # see https://github.com/pydantic/pydantic/discussions/7094
try:
if original_record := db.search_by_path(stanza.path):
key = original_record[0].key
if original_record := self._search_by_path(stanza.path):
key = original_record.key
self.logger.info(f"Updating model {model_name} with information from models.yaml using key {key}")
db.update_model(key, new_config)
self._update_model(key, new_config)
else:
self.logger.info(f"Adding model {model_name} with key {model_key}")
db.add_model(new_key, new_config)
self._add_model(new_key, new_config)
except DuplicateModelException:
self.logger.warning(f"Model {model_name} is already in the database")
except UnknownModelException:
self.logger.warning(f"Model at {stanza.path} could not be found in database")
def _search_by_path(self, path: Path) -> Optional[AnyModelConfig]:
self.cursor.execute(
"""--sql
SELECT config FROM model_config
WHERE path=?;
""",
(str(path),),
)
results = [ModelConfigFactory.make_config(json.loads(x[0])) for x in self.cursor.fetchall()]
return results[0] if results else None
def main():
MigrateModelYamlToDb().migrate()
def _update_model(self, key: str, config: AnyModelConfig) -> None:
record = ModelConfigFactory.make_config(config, key=key) # ensure it is a valid config obect
json_serialized = record.model_dump_json() # and turn it into a json string.
self.cursor.execute(
"""--sql
UPDATE model_config
SET
config=?
WHERE id=?;
""",
(json_serialized, key),
)
if self.cursor.rowcount == 0:
raise UnknownModelException("model not found")
if __name__ == "__main__":
main()
def _add_model(self, key: str, config: AnyModelConfig) -> None:
record = ModelConfigFactory.make_config(config, key=key) # ensure it is a valid config obect.
json_serialized = record.model_dump_json() # and turn it into a json string.
try:
self.cursor.execute(
"""--sql
INSERT INTO model_config (
id,
original_hash,
config
)
VALUES (?,?,?);
""",
(
key,
record.original_hash,
json_serialized,
),
)
except sqlite3.IntegrityError as exc:
raise DuplicateModelException(f"{record.name}: model is already in database") from exc

View File

@ -389,7 +389,7 @@ class TextualInversionCheckpointProbe(CheckpointProbeBase):
elif "clip_g" in checkpoint:
token_dim = checkpoint["clip_g"].shape[-1]
else:
token_dim = list(checkpoint.values())[0].shape[0]
token_dim = list(checkpoint.values())[0].shape[-1]
if token_dim == 768:
return BaseModelType.StableDiffusion1
elif token_dim == 1024:

View File

@ -4,6 +4,7 @@ pip install <path_to_git_source>.
"""
import os
import platform
from distutils.version import LooseVersion
import pkg_resources
import psutil
@ -31,10 +32,6 @@ else:
console = Console(style=Style(color="grey74", bgcolor="grey19"))
def get_versions() -> dict:
return requests.get(url=INVOKE_AI_REL).json()
def invokeai_is_running() -> bool:
for p in psutil.process_iter():
try:
@ -50,6 +47,20 @@ def invokeai_is_running() -> bool:
return False
def get_pypi_versions():
url = "https://pypi.org/pypi/invokeai/json"
try:
data = requests.get(url).json()
except Exception:
raise Exception("Unable to fetch version information from PyPi")
versions = list(data["releases"].keys())
versions.sort(key=LooseVersion, reverse=True)
latest_version = [v for v in versions if "rc" not in v][0]
latest_release_candidate = [v for v in versions if "rc" in v][0]
return latest_version, latest_release_candidate, versions
def welcome(latest_release: str, latest_prerelease: str):
@group()
def text():
@ -63,8 +74,7 @@ def welcome(latest_release: str, latest_prerelease: str):
yield "[bold yellow]Options:"
yield f"""[1] Update to the latest [bold]official release[/bold] ([italic]{latest_release}[/italic])
[2] Update to the latest [bold]pre-release[/bold] (may be buggy; caveat emptor!) ([italic]{latest_prerelease}[/italic])
[3] Manually enter the [bold]tag name[/bold] for the version you wish to update to
[4] Manually enter the [bold]branch name[/bold] for the version you wish to update to"""
[3] Manually enter the [bold]version[/bold] you wish to update to"""
console.rule()
print(
@ -92,44 +102,35 @@ def get_extras():
def main():
versions = get_versions()
released_versions = [x for x in versions if not (x["draft"] or x["prerelease"])]
prerelease_versions = [x for x in versions if not x["draft"] and x["prerelease"]]
latest_release = released_versions[0]["tag_name"] if len(released_versions) else None
latest_prerelease = prerelease_versions[0]["tag_name"] if len(prerelease_versions) else None
if invokeai_is_running():
print(":exclamation: [bold red]Please terminate all running instances of InvokeAI before updating.[/red bold]")
input("Press any key to continue...")
return
latest_release, latest_prerelease, versions = get_pypi_versions()
welcome(latest_release, latest_prerelease)
tag = None
branch = None
release = None
choice = Prompt.ask("Choice:", choices=["1", "2", "3", "4"], default="1")
release = latest_release
choice = Prompt.ask("Choice:", choices=["1", "2", "3"], default="1")
if choice == "1":
release = latest_release
elif choice == "2":
release = latest_prerelease
elif choice == "3":
while not tag:
tag = Prompt.ask("Enter an InvokeAI tag name")
elif choice == "4":
while not branch:
branch = Prompt.ask("Enter an InvokeAI branch name")
while True:
release = Prompt.ask("Enter an InvokeAI version")
release.strip()
if release in versions:
break
print(f":exclamation: [bold red]'{release}' is not a recognized InvokeAI release.[/red bold]")
extras = get_extras()
print(f":crossed_fingers: Upgrading to [yellow]{tag or release or branch}[/yellow]")
if release:
cmd = f'pip install "invokeai{extras} @ {INVOKE_AI_SRC}/{release}.zip" --use-pep517 --upgrade'
elif tag:
cmd = f'pip install "invokeai{extras} @ {INVOKE_AI_TAG}/{tag}.zip" --use-pep517 --upgrade'
else:
cmd = f'pip install "invokeai{extras} @ {INVOKE_AI_BRANCH}/{branch}.zip" --use-pep517 --upgrade'
print(f":crossed_fingers: Upgrading to [yellow]{release}[/yellow]")
cmd = f'pip install "invokeai{extras}=={release}" --use-pep517 --upgrade'
print("")
print("")
if os.system(cmd) == 0:

View File

@ -1109,7 +1109,10 @@
"deletedInvalidEdge": "Eliminata connessione non valida {{source}} -> {{target}}",
"unknownInput": "Input sconosciuto: {{name}}",
"prototypeDesc": "Questa invocazione è un prototipo. Potrebbe subire modifiche sostanziali durante gli aggiornamenti dell'app e potrebbe essere rimossa in qualsiasi momento.",
"betaDesc": "Questa invocazione è in versione beta. Fino a quando non sarà stabile, potrebbe subire modifiche importanti durante gli aggiornamenti dell'app. Abbiamo intenzione di supportare questa invocazione a lungo termine."
"betaDesc": "Questa invocazione è in versione beta. Fino a quando non sarà stabile, potrebbe subire modifiche importanti durante gli aggiornamenti dell'app. Abbiamo intenzione di supportare questa invocazione a lungo termine.",
"newWorkflow": "Nuovo flusso di lavoro",
"newWorkflowDesc": "Creare un nuovo flusso di lavoro?",
"newWorkflowDesc2": "Il flusso di lavoro attuale presenta modifiche non salvate."
},
"boards": {
"autoAddBoard": "Aggiungi automaticamente bacheca",
@ -1629,7 +1632,10 @@
"deleteWorkflow": "Elimina flusso di lavoro",
"workflows": "Flussi di lavoro",
"noDescription": "Nessuna descrizione",
"userWorkflows": "I miei flussi di lavoro"
"userWorkflows": "I miei flussi di lavoro",
"newWorkflowCreated": "Nuovo flusso di lavoro creato",
"downloadWorkflow": "Salva su file",
"uploadWorkflow": "Carica da file"
},
"app": {
"storeNotInitialized": "Il negozio non è inizializzato"

View File

@ -138,7 +138,6 @@ dependencies = [
"invokeai-node-web" = "invokeai.app.api_app:invoke_api"
"invokeai-import-images" = "invokeai.frontend.install.import_images:main"
"invokeai-db-maintenance" = "invokeai.backend.util.db_maintenance:main"
"invokeai-migrate-models-to-db" = "invokeai.backend.model_manager.migrate_to_db:main"
[project.urls]
"Homepage" = "https://invoke-ai.github.io/InvokeAI/"