From a626ca3e1c211f844637d7755b87d0509bf25261 Mon Sep 17 00:00:00 2001 From: Lincoln Stein Date: Wed, 20 Dec 2023 17:58:34 -0500 Subject: [PATCH] add unit tests and documentation --- docs/contributing/MODEL_MANAGER.md | 250 +++++++++++++++++- .../app/services/shared/sqlite/sqlite_util.py | 2 +- .../sqlite_migrator/migrations/migration_2.py | 43 --- .../sqlite_migrator/migrations/migration_3.py | 14 +- .../migrations/util/migrate_yaml_config_1.py | 8 +- .../model_manager/metadata/__init__.py | 3 + .../metadata/fetch/fetch_civitai.py | 10 +- .../metadata/fetch/fetch_huggingface.py | 3 +- .../model_manager/metadata/metadata_base.py | 14 +- .../model_manager/metadata/metadata_store.py | 35 ++- .../model_metadata/metadata_examples.py | 17 ++ .../model_metadata/test_model_metadata.py | 224 +++++++++++++--- 12 files changed, 518 insertions(+), 105 deletions(-) create mode 100644 tests/app/services/model_metadata/metadata_examples.py diff --git a/docs/contributing/MODEL_MANAGER.md b/docs/contributing/MODEL_MANAGER.md index ce55a4d0ad..1716469b72 100644 --- a/docs/contributing/MODEL_MANAGER.md +++ b/docs/contributing/MODEL_MANAGER.md @@ -15,7 +15,12 @@ model. These are the: their metadata, and `ModelRecordServiceBase` to store that information. It is also responsible for managing the InvokeAI `models` directory and its contents. - + +* _ModelMetadataStore_ and _ModelMetaDataFetch_ Backend modules that + are able to retrieve metadata from online model repositories, + transform them into Pydantic models, and cache them to the InvokeAI + SQL database. + * _DownloadQueueServiceBase_ (**CURRENTLY UNDER DEVELOPMENT - NOT IMPLEMENTED**) A multithreaded downloader responsible for downloading models from a remote source to disk. The download @@ -1184,3 +1189,246 @@ other resources that it might have been using. This will start/pause/cancel all jobs that have been submitted to the queue and have not yet reached a terminal state. +*** + +## This Meta be Good: Model Metadata Storage + +The modules found under `invokeai.backend.model_manager.metadata` +provide a straightforward API for fetching model metadatda from online +repositories. Currently two repositories are supported: HuggingFace +and Civitai. However, the modules are easily extended for additional +repos, provided that they have defined APIs for metadata access. + +Metadata comprises any descriptive information that is not essential +for getting the model to run. For example "author" is metadata, while +"type", "base" and "format" are not. The latter fields are part of the +model's config, as defined in `invokeai.backend.model_manager.config`. + +### Example Usage: + +``` +from invokeai.backend.model_manager.metadata import ( + AnyModelRepoMetadata, + CivitaiMetadataFetch, + CivitaiMetadata + ModelMetadataStore, +) +# to access the initialized sql database +from invokeai.app.api.dependencies import ApiDependencies + +civitai = CivitaiMetadataFetch() + +# fetch the metadata +model_metadata = civitai.from_url("https://civitai.com/models/215796") + +# get some common metadata fields +author = model_metadata.author +tags = model_metadata.tags + +# get some Civitai-specific fields +assert isinstance(model_metadata, CivitaiMetadata) + +trained_words = model_metadata.trained_words +base_model = model_metadata.base_model_trained_on +thumbnail = model_metadata.thumbnail_url + +# cache the metadata to the database using the key corresponding to +# an existing model config record in the `model_config` table +sql_cache = ModelMetadataStore(ApiDependencies.invoker.services.db) +sql_cache.add_metadata('fb237ace520b6716adc98bcb16e8462c', model_metadata) + +# now we can search the database by tag, author or model name +# matches will contain a list of model keys that match the search +matches = sql_cache.search_by_tag({"tool", "turbo"}) +``` + +### Structure of the Metadata objects + +There is a short class hierarchy of Metadata objects, all of which +descend from the Pydantic `BaseModel`. + +#### `ModelMetadataBase` + +This is the common base class for metadata: + +| **Field Name** | **Type** | **Description** | +|----------------|-----------------|------------------| +| `name` | str | Repository's name for the model | +| `author` | str | Model's author | +| `tags` | Set[str] | Model tags | + + +Note that the model config record also has a `name` field. It is +intended that the config record version be locally customizable, while +the metadata version is read-only. However, enforcing this is expected +to be part of the business logic. + +Descendents of the base add additional fields. + +#### `HuggingFaceMetadata` + +This descends from `ModelMetadataBase` and adds the following fields: + +| **Field Name** | **Type** | **Description** | +|----------------|-----------------|------------------| +| `type` | Literal["huggingface"] | Used for the discriminated union of metadata classes| +| `id` | str | HuggingFace repo_id | +| `tag_dict` | Dict[str, Any] | A dictionary of tag/value pairs provided in addition to `tags` | +| `last_modified`| datetime | Date of last commit of this model to the repo | +| `files` | List[Path] | List of the files in the model repo | + + +#### `CivitaiMetadata` + +This descends from `ModelMetadataBase` and adds the following fields: + +| **Field Name** | **Type** | **Description** | +|----------------|-----------------|------------------| +| `type` | Literal["civitai"] | Used for the discriminated union of metadata classes| +| `id` | int | Civitai model id | +| `version_name` | str | Name of this version of the model (distinct from model name) | +| `version_id` | int | Civitai model version id (distinct from model id) | +| `created` | datetime | Date the model was uploaded to Civitai; no modification date provided | +| `description` | str | Model description. Quite verbose and contains HTML tags | +| `version_description` | str | Model version description, usually describes changes to the model | +| `nsfw` | bool | Whether the model tends to generate NSFW content | +| `restrictions` | LicenseRestrictions | An object that describes what is and isn't allowed with this model | +| `trained_words`| Set[str] | Trigger words for this model, if any | +| `download_url` | AnyHttpUrl | URL for downloading this version of the model | +| `base_model_trained_on` | str | Name of the model that this version was trained on | +| `thumbnail_url` | AnyHttpUrl | URL to access a representative thumbnail image of the model's output | +| `weight_min` | int | For LoRA sliders, the minimum suggested weight to apply | +| `weight_max` | int | For LoRA sliders, the maximum suggested weight to apply | + +Note that `weight_min` and `weight_max` are not currently populated +and take the default values of (-1.0, +2.0). The issue is that these +values aren't part of the structured data but appear in the text +description. Some regular expression or LLM coding may be able to +extract these values. + +Also be aware that `base_model_trained_on` is free text and doesn't +correspond to our `ModelType` enum. + +`CivitaiMetadata` also defines some convenience properties relating to +licensing restrictions: `credit_required`, `allow_commercial_use`, +`allow_derivatives` and `allow_different_license`. + +#### `AnyModelRepoMetadata` + +This is a discriminated Union of `CivitaiMetadata` and +`HuggingFaceMetadata`. + +### Fetching Metadata from Online Repos + +The `HuggingFaceMetadataFetch` and `CivitaiMetadataFetch` classes will +retrieve metadata from their corresponding repositories and return +`AnyModelRepoMetadata` objects. Their base class +`ModelMetadataFetchBase` is an abstract class that defines two +methods: `from_url()` and `from_id()`. The former accepts the type of +model URLs that the user will try to cut and paste into the model +import form. The latter accepts a string ID in the format recognized +by the repository of choice. Both methods return an +`AnyModelRepoMetadata`. + +The base class also has a class method `from_json()` which will take +the JSON representation of a `ModelMetadata` object, validate it, and +return the corresponding `AnyModelRepoMetadata` object. + +When initializing one of the metadata fetching classes, you may +provide a `requests.Session` argument. This allows you to customize +the low-level HTTP fetch requests and is used, for instance, in the +testing suite to avoid hitting the internet. + +The HuggingFace and Civitai fetcher subclasses add additional +repo-specific fetching methods: + + +#### HuggingFaceMetadataFetch + +This overrides its base class `from_json()` method to return a +`HuggingFaceMetadata` object directly. + +#### CivitaiMetadataFetch + +This adds the following methods: + +`from_civitai_modelid()` This takes the ID of a model, finds the +default version of the model, and then retrieves the metadata for +that version, returning a `CivitaiMetadata` object directly. + +`from_civitai_versionid()` This takes the ID of a model version and +retrieves its metadata. Functionally equivalent to `from_id()`, the +only difference is that it returna a `CivitaiMetadata` object rather +than an `AnyModelRepoMetadata`. + + +### Metadata Storage + +The `ModelMetadataStore` provides a simple facility to store model +metadata in the `invokeai.db` database. The data is stored as a JSON +blob, with a few common fields (`name`, `author`, `tags`) broken out +to be searchable. + +When a metadata object is saved to the database, it is identified +using the model key, _and this key must correspond to an existing +model key in the model_config table_. There is a foreign key integrity +constraint between the `model_config.id` field and the +`model_metadata.id` field such that if you attempt to save metadata +under an unknown key, the attempt will result in an +`UnknownModelException`. Likewise, when a model is deleted from +`model_config`, the deletion of the corresponding metadata record will +be triggered. + +Tags are stored in a normalized fashion in the tables `model_tags` and +`tags`. Triggers keep the tag table in sync with the `model_metadata` +table. + +To create the storage object, initialize it with the InvokeAI +`SqliteDatabase` object. This is often done this way: + +``` +from invokeai.app.api.dependencies import ApiDependencies +metadata_store = ModelMetadataStore(ApiDependencies.invoker.services.db) +``` + +You can then access the storage with the following methods: + +#### `add_metadata(key, metadata)` + +Add the metadata using a previously-defined model key. + +There is currently no `delete_metadata()` method. The metadata will +persist until the matching config is deleted from the `model_config` +table. + +#### `get_metadata(key) -> AnyModelRepoMetadata` + +Retrieve the metadata corresponding to the model key. + +#### `update_metadata(key, new_metadata)` + +Update an existing metadata record with new metadata. + +#### `search_by_tag(tags: Set[str]) -> Set[str]` + +Given a set of tags, find models that are tagged with them. If +multiple tags are provided then a matching model must be tagged with +*all* the tags in the set. This method returns a set of model keys and +is intended to be used in conjunction with the `ModelRecordService`: + +``` +model_config_store = ApiDependencies.invoker.services.model_records +matches = metadata_store.search_by_tag({'license:other'}) +models = [model_config_store.get(x) for x in matches] +``` + +#### `search_by_name(name: str) -> Set[str] + +Find all model metadata records that have the given name and return a +set of keys to the corresponding model config objects. + +#### `search_by_author(author: str) -> Set[str] + +Find all model metadata records that have the given author and return +a set of keys to the corresponding model config objects. + diff --git a/invokeai/app/services/shared/sqlite/sqlite_util.py b/invokeai/app/services/shared/sqlite/sqlite_util.py index 1d1d26c6cd..202513fb40 100644 --- a/invokeai/app/services/shared/sqlite/sqlite_util.py +++ b/invokeai/app/services/shared/sqlite/sqlite_util.py @@ -29,7 +29,7 @@ def init_db(config: InvokeAIAppConfig, logger: Logger, image_files: ImageFileSto migrator = SqliteMigrator(db=db) migrator.register_migration(build_migration_1()) migrator.register_migration(build_migration_2(image_files=image_files, logger=logger)) - migrator.register_migration(build_migration_3()) + migrator.register_migration(build_migration_3(app_config=config, logger=logger)) migrator.register_migration(build_migration_4()) migrator.run_migrations() diff --git a/invokeai/app/services/shared/sqlite_migrator/migrations/migration_2.py b/invokeai/app/services/shared/sqlite_migrator/migrations/migration_2.py index 99922e2fc1..1435f66e8d 100644 --- a/invokeai/app/services/shared/sqlite_migrator/migrations/migration_2.py +++ b/invokeai/app/services/shared/sqlite_migrator/migrations/migration_2.py @@ -11,8 +11,6 @@ from invokeai.app.services.workflow_records.workflow_records_common import ( UnsafeWorkflowWithVersionValidator, ) -from .util.migrate_yaml_config_1 import MigrateModelYamlToDb1 - class Migration2Callback: def __init__(self, image_files: ImageFileStorageBase, logger: Logger): @@ -25,8 +23,6 @@ class Migration2Callback: self._drop_old_workflow_tables(cursor) self._add_workflow_library(cursor) self._drop_model_manager_metadata(cursor) - self._recreate_model_config(cursor) - self._migrate_model_config_records(cursor) self._migrate_embedded_workflows(cursor) def _add_images_has_workflow(self, cursor: sqlite3.Cursor) -> None: @@ -100,45 +96,6 @@ class Migration2Callback: """Drops the `model_manager_metadata` table.""" cursor.execute("DROP TABLE IF EXISTS model_manager_metadata;") - def _recreate_model_config(self, cursor: sqlite3.Cursor) -> None: - """ - Drops the `model_config` table, recreating it. - - In 3.4.0, this table used explicit columns but was changed to use json_extract 3.5.0. - - Because this table is not used in production, we are able to simply drop it and recreate it. - """ - - cursor.execute("DROP TABLE IF EXISTS model_config;") - - cursor.execute( - """--sql - CREATE TABLE IF NOT EXISTS model_config ( - id TEXT NOT NULL PRIMARY KEY, - -- The next 3 fields are enums in python, unrestricted string here - base TEXT GENERATED ALWAYS as (json_extract(config, '$.base')) VIRTUAL NOT NULL, - type TEXT GENERATED ALWAYS as (json_extract(config, '$.type')) VIRTUAL NOT NULL, - name TEXT GENERATED ALWAYS as (json_extract(config, '$.name')) VIRTUAL NOT NULL, - path TEXT GENERATED ALWAYS as (json_extract(config, '$.path')) VIRTUAL NOT NULL, - format TEXT GENERATED ALWAYS as (json_extract(config, '$.format')) VIRTUAL NOT NULL, - original_hash TEXT, -- could be null - -- Serialized JSON representation of the whole config object, - -- which will contain additional fields from subclasses - config TEXT NOT NULL, - created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- Updated via trigger - updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')), - -- unique constraint on combo of name, base and type - UNIQUE(name, base, type) - ); - """ - ) - - def _migrate_model_config_records(self, cursor: sqlite3.Cursor) -> None: - """After updating the model config table, we repopulate it.""" - model_record_migrator = MigrateModelYamlToDb1(cursor) - model_record_migrator.migrate() - def _migrate_embedded_workflows(self, cursor: sqlite3.Cursor) -> None: """ In the v3.5.0 release, InvokeAI changed how it handles embedded workflows. The `images` table in diff --git a/invokeai/app/services/shared/sqlite_migrator/migrations/migration_3.py b/invokeai/app/services/shared/sqlite_migrator/migrations/migration_3.py index 2ffef13dd4..87bbcf856c 100644 --- a/invokeai/app/services/shared/sqlite_migrator/migrations/migration_3.py +++ b/invokeai/app/services/shared/sqlite_migrator/migrations/migration_3.py @@ -1,13 +1,16 @@ import sqlite3 +from logging import Logger +from invokeai.app.services.config import InvokeAIAppConfig from invokeai.app.services.shared.sqlite_migrator.sqlite_migrator_common import Migration from .util.migrate_yaml_config_1 import MigrateModelYamlToDb1 class Migration3Callback: - def __init__(self) -> None: - pass + def __init__(self, app_config: InvokeAIAppConfig, logger: Logger) -> None: + self._app_config = app_config + self._logger = logger def __call__(self, cursor: sqlite3.Cursor) -> None: self._drop_model_manager_metadata(cursor) @@ -54,11 +57,12 @@ class Migration3Callback: def _migrate_model_config_records(self, cursor: sqlite3.Cursor) -> None: """After updating the model config table, we repopulate it.""" - model_record_migrator = MigrateModelYamlToDb1(cursor) + self._logger.info("Migrating model config records from models.yaml to database") + model_record_migrator = MigrateModelYamlToDb1(self._app_config, self._logger, cursor) model_record_migrator.migrate() -def build_migration_3() -> Migration: +def build_migration_3(app_config: InvokeAIAppConfig, logger: Logger) -> Migration: """ Build the migration from database version 2 to 3. @@ -69,7 +73,7 @@ def build_migration_3() -> Migration: migration_3 = Migration( from_version=2, to_version=3, - callback=Migration3Callback(), + callback=Migration3Callback(app_config=app_config, logger=logger), ) return migration_3 diff --git a/invokeai/app/services/shared/sqlite_migrator/migrations/util/migrate_yaml_config_1.py b/invokeai/app/services/shared/sqlite_migrator/migrations/util/migrate_yaml_config_1.py index f2476ed0f6..7a7565ba77 100644 --- a/invokeai/app/services/shared/sqlite_migrator/migrations/util/migrate_yaml_config_1.py +++ b/invokeai/app/services/shared/sqlite_migrator/migrations/util/migrate_yaml_config_1.py @@ -23,7 +23,6 @@ from invokeai.backend.model_manager.config import ( ModelType, ) from invokeai.backend.model_manager.hash import FastModelHash -from invokeai.backend.util.logging import InvokeAILogger ModelsValidator = TypeAdapter(AnyModelConfig) @@ -46,10 +45,9 @@ class MigrateModelYamlToDb1: logger: Logger cursor: sqlite3.Cursor - def __init__(self, cursor: sqlite3.Cursor = None) -> None: - self.config = InvokeAIAppConfig.get_config() - self.config.parse_args() - self.logger = InvokeAILogger.get_logger() + def __init__(self, config: InvokeAIAppConfig, logger: Logger, cursor: sqlite3.Cursor = None) -> None: + self.config = config + self.logger = logger self.cursor = cursor def get_yaml(self) -> DictConfig: diff --git a/invokeai/backend/model_manager/metadata/__init__.py b/invokeai/backend/model_manager/metadata/__init__.py index f634d01cb1..d376d9f561 100644 --- a/invokeai/backend/model_manager/metadata/__init__.py +++ b/invokeai/backend/model_manager/metadata/__init__.py @@ -19,6 +19,7 @@ if data.allow_commercial_use: print("Commercial use of this model is allowed") """ +from .fetch import CivitaiMetadataFetch, HuggingFaceMetadataFetch from .metadata_base import ( AnyModelRepoMetadata, AnyModelRepoMetadataValidator, @@ -37,4 +38,6 @@ __all__ = [ "HuggingFaceMetadata", "CivitaiMetadata", "ModelMetadataStore", + "CivitaiMetadataFetch", + "HuggingFaceMetadataFetch", ] diff --git a/invokeai/backend/model_manager/metadata/fetch/fetch_civitai.py b/invokeai/backend/model_manager/metadata/fetch/fetch_civitai.py index 6928765f99..491094f55f 100644 --- a/invokeai/backend/model_manager/metadata/fetch/fetch_civitai.py +++ b/invokeai/backend/model_manager/metadata/fetch/fetch_civitai.py @@ -77,10 +77,10 @@ class CivitaiMetadataFetch(ModelMetadataFetchBase): return self.from_civitai_modelid(int(model_id)) elif match := re.match(CIVITAI_VERSION_PAGE_RE, str(url)): version_id = match.group(1) - return self._from_civitai_versionid(int(version_id)) + return self.from_civitai_versionid(int(version_id)) elif match := re.match(CIVITAI_DOWNLOAD_RE, str(url)): version_id = match.group(1) - return self._from_civitai_versionid(int(version_id)) + return self.from_civitai_versionid(int(version_id)) raise UnknownModelException("The url '{url}' does not match any known Civitai URL patterns") def from_id(self, id: str) -> AnyModelRepoMetadata: @@ -89,7 +89,7 @@ class CivitaiMetadataFetch(ModelMetadataFetchBase): May raise an `UnknownModelException`. """ - return self._from_civitai_versionid(int(id)) + return self.from_civitai_versionid(int(id)) def from_civitai_modelid(self, model_id: int) -> CivitaiMetadata: """ @@ -100,9 +100,9 @@ class CivitaiMetadataFetch(ModelMetadataFetchBase): model_url = CIVITAI_MODEL_ENDPOINT + str(model_id) model = self._requests.get(model_url).json() default_version = model["modelVersions"][0]["id"] - return self._from_civitai_versionid(default_version, model) + return self.from_civitai_versionid(default_version, model) - def _from_civitai_versionid( + def from_civitai_versionid( self, version_id: int, model_metadata: Optional[Dict[str, Any]] = None ) -> CivitaiMetadata: version_url = CIVITAI_VERSION_ENDPOINT + str(version_id) diff --git a/invokeai/backend/model_manager/metadata/fetch/fetch_huggingface.py b/invokeai/backend/model_manager/metadata/fetch/fetch_huggingface.py index 3eb4cf37a5..00e94d4c2c 100644 --- a/invokeai/backend/model_manager/metadata/fetch/fetch_huggingface.py +++ b/invokeai/backend/model_manager/metadata/fetch/fetch_huggingface.py @@ -28,6 +28,8 @@ from invokeai.app.services.model_records import UnknownModelException from ..metadata_base import AnyModelRepoMetadata, AnyModelRepoMetadataValidator, HuggingFaceMetadata from .fetch_base import ModelMetadataFetchBase +HF_MODEL_RE = r"https?://huggingface.co/([\w\-.]+/[\w\-.]+)" + class HuggingFaceMetadataFetch(ModelMetadataFetchBase): """Fetch model metadata from HuggingFace.""" @@ -68,7 +70,6 @@ class HuggingFaceMetadataFetch(ModelMetadataFetchBase): In the case of an invalid or missing URL, raises a ModelNotFound exception. """ - HF_MODEL_RE = r"https?://huggingface.co/([\w\-.]+/[\w\-.]+)" if match := re.match(HF_MODEL_RE, str(url)): repo_id = match.group(1) return self.from_id(repo_id) diff --git a/invokeai/backend/model_manager/metadata/metadata_base.py b/invokeai/backend/model_manager/metadata/metadata_base.py index 0ef289c43e..a19b28ac92 100644 --- a/invokeai/backend/model_manager/metadata/metadata_base.py +++ b/invokeai/backend/model_manager/metadata/metadata_base.py @@ -1,15 +1,17 @@ # Copyright (c) 2023 Lincoln D. Stein and the InvokeAI Development Team -""" -This module defines core text-to-image model metadata fields. +"""This module defines core text-to-image model metadata fields. Metadata comprises any descriptive information that is not essential for getting the model to run. For example "author" is metadata, while "type", "base" and "format" are not. The latter fields are part of the model's config, as defined in invokeai.backend.model_manager.config. -Note that the "name" and "description" are also present in `config`. -This may need reworking. +Note that the "name" and "description" are also present in `config` +records. This is intentional. The config record fields are intended to +be editable by the user as a form of customization. The metadata +versions of these fields are intended to be kept in sync with the +remote repo. """ from datetime import datetime @@ -78,7 +80,7 @@ class CivitaiMetadata(ModelMetadataBase): description="text description of the model's reversion; usually change history; may contain HTML" ) nsfw: bool = Field(description="whether the model tends to generate NSFW content", default=False) - restrictions: LicenseRestrictions = Field(description="license terms", default=LicenseRestrictions) + restrictions: LicenseRestrictions = Field(description="license terms", default_factory=LicenseRestrictions) trained_words: Set[str] = Field(description="words to trigger the model", default_factory=set) download_url: AnyHttpUrl = Field(description="download URL for this model") base_model_trained_on: str = Field(description="base model on which this model was trained (currently not an enum)") @@ -98,7 +100,7 @@ class CivitaiMetadata(ModelMetadataBase): @property def allow_commercial_use(self) -> bool: """Return True if commercial use is allowed.""" - return self.restrictions.AllowCommercialUse == CommercialUsage("None") + return self.restrictions.AllowCommercialUse != CommercialUsage("None") @property def allow_derivatives(self) -> bool: diff --git a/invokeai/backend/model_manager/metadata/metadata_store.py b/invokeai/backend/model_manager/metadata/metadata_store.py index f42058802a..1a05c456f9 100644 --- a/invokeai/backend/model_manager/metadata/metadata_store.py +++ b/invokeai/backend/model_manager/metadata/metadata_store.py @@ -4,7 +4,7 @@ SQL Storage for Model Metadata """ import sqlite3 -from typing import Set +from typing import Optional, Set from invokeai.app.services.model_records import UnknownModelException from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase @@ -59,9 +59,12 @@ class ModelMetadataStore: ) self._update_tags(model_key, metadata.tags) self._db.conn.commit() - except sqlite3.Error as e: + except sqlite3.IntegrityError as excp: # FOREIGN KEY error: the key was not in model_config table self._db.conn.rollback() - raise e + raise UnknownModelException from excp + except sqlite3.Error as excp: + self._db.conn.rollback() + raise excp def get_metadata(self, model_key: str) -> AnyModelRepoMetadata: """Retrieve the ModelRepoMetadata corresponding to model key.""" @@ -111,19 +114,21 @@ class ModelMetadataStore: """Return the keys of models containing all of the listed tags.""" with self._db.lock: try: - matches: Set[str] = set() + matches: Optional[Set[str]] = None for tag in tags: self._cursor.execute( """--sql SELECT a.id FROM model_tags AS a, - tags AS b + tags AS b WHERE a.tag_id=b.tag_id AND b.tag_text=?; """, (tag,), ) model_keys = {x[0] for x in self._cursor.fetchall()} - matches = matches.intersection(model_keys) if len(matches) > 0 else model_keys + if matches is None: + matches = model_keys + matches = matches.intersection(model_keys) except sqlite3.Error as e: raise e return matches @@ -139,6 +144,24 @@ class ModelMetadataStore: ) return {x[0] for x in self._cursor.fetchall()} + def search_by_name(self, name: str) -> Set[str]: + """ + Return the keys of models with the indicated name. + + Note that this is the name of the model given to it by + the remote source. The user may have changed the local + name. The local name will be located in the model config + record object. + """ + self._cursor.execute( + """--sql + SELECT id FROM model_metadata + WHERE name=?; + """, + (name,), + ) + return {x[0] for x in self._cursor.fetchall()} + def _update_tags(self, model_key: str, tags: Set[str]) -> None: """Update tags for the model referenced by model_key.""" # remove previous tags from this model diff --git a/tests/app/services/model_metadata/metadata_examples.py b/tests/app/services/model_metadata/metadata_examples.py new file mode 100644 index 0000000000..9b684ff9de --- /dev/null +++ b/tests/app/services/model_metadata/metadata_examples.py @@ -0,0 +1,17 @@ +# from stabilityai/sdxl-turbo, via the HF API +# This was derived by examination of the outgoing and incoming request.Session +RepoHFMetadata1 = b""" +{"_id":"6564b36f4eb2f55240230f48","id":"stabilityai/sdxl-turbo","modelId":"stabilityai/sdxl-turbo","author":"test_author","sha":"f4b0486b498f84668e828044de1d0c8ba486e05b","lastModified":"2023-12-07T18:04:49.000Z","private":false,"disabled":false,"gated":false,"pipeline_tag":"text-to-image","tags":["diffusers","onnx","safetensors","text-to-image","license:other","has_space","diffusers:StableDiffusionXLPipeline","region:us"],"downloads":588989,"library_name":"diffusers","likes":1297,"model-index":null,"config":{"diffusers":{"class_name":"StableDiffusionXLPipeline"}},"cardData":{"pipeline_tag":"text-to-image","inference":false,"license":"other","license_name":"sai-nc-community","license_link":"https://huggingface.co/stabilityai/sdxl-turbo/blob/main/LICENSE.TXT"},"spaces":["radames/Real-Time-Latent-Consistency-Model","diffusers/unofficial-SDXL-Turbo-i2i-t2i","radames/Real-Time-Latent-Consistency-Model-Text-To-Image","Manjushri/SDXL-Turbo-Img2Img-CPU","latent-consistency/Real-Time-LCM-ControlNet-Lora-SD1.5","radames/Real-Time-SD-Turbo","baulab/ConceptSliders","Krebzonide/SDXL-Turbo-With-Refiner","cocktailpeanut/sdxl-turbo","latent-consistency/Real-Time-LCM-Text-to-Image-Lora-SD1.5","Ekimetrics/Guess-the-prompt","imatag/stable-signature-bzh","robo-bonobo/sdxl-turbo","esafwan/SDXL-Turbo","sub314xxl/unofficial-SDXL-Turbo-i2i-t2i","JoPmt/Txt2Img_SDXL_Trb","Manjushri/SDXL-Turbo-CPU","Omnibus/sdxl-turbo","daily-co/RealtimeSDWebRTC","Ashrafb/IIm2","PayPeer/SDXL-Turbo","coldn00dl3s/sdxl-turbo-demo-interface","jbilcke-hf/faster-image-server","Dorjzodovsuren/Real-Time-SD-Turbo","Geek7/models","HusseinHE/Magic","LouDogNation/Real-Time-Latent-Consistency-Model","RTLAI/SDXLTurbo","TogetherAI/sdxl-turbo","asdsteven/streamlit","hahahafofo/Qwen-Prompt-SDXL-Turbo","hillman2000hk/Real-Time-Latent-Consistency-Model","jensinjames/Real-Time-SD-Turbo","michaelj/vega-Img2Img-CPU","tsi-org/sdxl-turbo","tsi-org/pixio-turbo-xl","vhly/turbo-demo","vloikas/NEW-Mycelium"],"siblings":[{"rfilename":".gitattributes","blobId":"a6344aac8c09253b3b630fb776ae94478aa0275b","size":1519},{"rfilename":"LICENSE.TXT","blobId":"03fe74dc12ab9c5abf698bceb8ac40668787a937","size":7493},{"rfilename":"README.md","blobId":"ce7cdfb6d66b271e92fecf88940726310e5e9013","size":5545},{"rfilename":"image_quality_one_step.png","blobId":"13cb17a7934a33001f393d08f98504cbb2bc914f","size":203741},{"rfilename":"model_index.json","blobId":"f857cf2f828fff2ee319b1a47e6ce820e8affb9d","size":685},{"rfilename":"output_tile.jpg","blobId":"d5fce5ecd2ef568d689419271d9ffabfb0856c36","size":744087},{"rfilename":"prompt_alignment_one_step.png","blobId":"3490b19d78d804b8273b93e803f23a3b04689be3","size":204304},{"rfilename":"scheduler/scheduler_config.json","blobId":"0359d7abb0b9c7b4a433be2db87cefea03c06ea5","size":459},{"rfilename":"sd_xl_turbo_1.0.safetensors","blobId":"e35c7fd734789e0f9b3cf16839431216b86bef34","size":13875761366,"lfs":{"sha256":"2e58e3704b4c0831bf848e0507c9b5ff2cd8d007b8d0719dba3874156f631050","size":13875761366,"pointerSize":136}},{"rfilename":"sd_xl_turbo_1.0_fp16.safetensors","blobId":"bc5009cfdd6444d8d9f8557efca5b582054ec77d","size":6938081905,"lfs":{"sha256":"e869ac7d6942cb327d68d5ed83a40447aadf20e0c3358d98b2cc9e270db0da26","size":6938081905,"pointerSize":135}},{"rfilename":"text_encoder/config.json","blobId":"8e91c97936ad0b2c1356f03de8d47589b5232704","size":565},{"rfilename":"text_encoder/model.fp16.safetensors","blobId":"e224fcda070dc105fd83c64ed2074f47a1b0ff7b","size":246144152,"lfs":{"sha256":"660c6f5b1abae9dc498ac2d21e1347d2abdb0cf6c0c0c8576cd796491d9a6cdd","size":246144152,"pointerSize":134}},{"rfilename":"text_encoder/model.onnx","blobId":"447dc336b66ec45c07e90c45ef4a88c9ba6ed293","size":492587458,"lfs":{"sha256":"5522ae5f760e4864c20424c514cfa585cb4afb672b71b962f29a97c4f457383c","size":492587458,"pointerSize":134}},{"rfilename":"text_encoder/model.safetensors","blobId":"34cca545bfc07f566fa79345178b6db761f9d53b","size":492265168,"lfs":{"sha256":"778d02eb9e707c3fbaae0b67b79ea0d1399b52e624fb634f2f19375ae7c047c3","size":492265168,"pointerSize":134}},{"rfilename":"text_encoder_2/config.json","blobId":"f9e084535c55110233f44ccc6c7f9d0e1540f8be","size":575},{"rfilename":"text_encoder_2/model.fp16.safetensors","blobId":"1a41ba6f6d67ebaaea453978332a941d0bc5dae5","size":1389382176,"lfs":{"sha256":"ec310df2af79c318e24d20511b601a591ca8cd4f1fce1d8dff822a356bcdb1f4","size":1389382176,"pointerSize":135}},{"rfilename":"text_encoder_2/model.onnx","blobId":"e0994f1a87d7c5be9cf9fd8ce800748042319a52","size":1042018,"lfs":{"sha256":"03856bf14b5d23b17eae141648ef98652e0a6d01b37c2fbb3fc0af5e9a1aff34","size":1042018,"pointerSize":132}},{"rfilename":"text_encoder_2/model.onnx_data","blobId":"ce1e69d7005d3742f687d96b922e32daf45a891f","size":2778639360,"lfs":{"sha256":"0c29d6ace4f348ccbcd302ab0d858994e64240a5b54d7c5ef431a88f2f287e2c","size":2778639360,"pointerSize":135}},{"rfilename":"text_encoder_2/model.safetensors","blobId":"203ca05f5c62a86d4961b479897c9eee1f1701dd","size":2778702264,"lfs":{"sha256":"fa5b2e6f4c2efc2d82e4b8312faec1a5540eabfc6415126c9a05c8436a530ef4","size":2778702264,"pointerSize":135}},{"rfilename":"tokenizer/merges.txt","blobId":"76e821f1b6f0a9709293c3b6b51ed90980b3166b","size":524619},{"rfilename":"tokenizer/special_tokens_map.json","blobId":"1f467f3b057c46a21f88d9ab2f1070af9916c78c","size":586},{"rfilename":"tokenizer/tokenizer_config.json","blobId":"1bf819b6621086dc92428e2c9c8bbab39211fd55","size":704},{"rfilename":"tokenizer/vocab.json","blobId":"469be27c5c010538f845f518c4f5e8574c78f7c8","size":1059962},{"rfilename":"tokenizer_2/merges.txt","blobId":"76e821f1b6f0a9709293c3b6b51ed90980b3166b","size":524619},{"rfilename":"tokenizer_2/special_tokens_map.json","blobId":"ae0c5be6f35217e51c4c000fd325d8de0294e99c","size":460},{"rfilename":"tokenizer_2/tokenizer_config.json","blobId":"bd2abe19377557ff5771584921f9b65fa041fef0","size":855},{"rfilename":"tokenizer_2/vocab.json","blobId":"469be27c5c010538f845f518c4f5e8574c78f7c8","size":1059962},{"rfilename":"unet/config.json","blobId":"220d7ae3e59ce3484c7eeb47ef2ac9db5097e29a","size":1776},{"rfilename":"unet/diffusion_pytorch_model.fp16.safetensors","blobId":"eb3be59c594d3ff91d7b52e689418897b6a1887e","size":5135149760,"lfs":{"sha256":"48fa46161a745f48d4054df3fe13804ee255486bca893403b60373c188fd1bdb","size":5135149760,"pointerSize":135}},{"rfilename":"unet/diffusion_pytorch_model.safetensors","blobId":"8bb9382a26cc05f6c7857b68406c54b0e1bcfdd9","size":10270077736,"lfs":{"sha256":"1968fc61aa8449ab3d3f9b9a05bce88c611760c01e0c4a7a3785911b546fe582","size":10270077736,"pointerSize":136}},{"rfilename":"unet/model.onnx","blobId":"cb00b7fe8722237955fd4f613366663005e12005","size":7291775,"lfs":{"sha256":"3771e8ac1315aab02b2c0183d3d10406ec4b1399414149751ee67b420fbc3a6d","size":7291775,"pointerSize":132}},{"rfilename":"unet/model.onnx_data","blobId":"0fa44a5ec9ce0f0d84010cfb1dd64e368b6c7b82","size":10269854720,"lfs":{"sha256":"479e70b02ced4312debca3a12506ac928f80b2ce95ad48755c9c789bd8e80ac2","size":10269854720,"pointerSize":136}},{"rfilename":"vae/config.json","blobId":"ae14cf90e29b12134a53383691c98c73dee5d422","size":607},{"rfilename":"vae/diffusion_pytorch_model.fp16.safetensors","blobId":"3a7c83dd497bf1669af2ea276080b72ed8ed45c0","size":167335342,"lfs":{"sha256":"02ee4bd18e5d16e7fe5fc5b85b4aefa2cba6db28897f674226c9d6ddd2f34f06","size":167335342,"pointerSize":134}},{"rfilename":"vae/diffusion_pytorch_model.safetensors","blobId":"6e43f25b75d7936044c8cad00d1ec9d7bf75a67b","size":334643268,"lfs":{"sha256":"716971093e3428c9156906fcbcc5500abf005317c5f4d3a5bb3fa28c45e1e071","size":334643268,"pointerSize":134}},{"rfilename":"vae_decoder/config.json","blobId":"ae14cf90e29b12134a53383691c98c73dee5d422","size":607},{"rfilename":"vae_decoder/model.onnx","blobId":"0a7100c22e53d565456988ffa7472d09b4ae4986","size":198093688,"lfs":{"sha256":"558225daaa98ae7e67594d10a1ac3e546c67b12094836f8788036aaba652e159","size":198093688,"pointerSize":134}},{"rfilename":"vae_encoder/config.json","blobId":"ae14cf90e29b12134a53383691c98c73dee5d422","size":607},{"rfilename":"vae_encoder/model.onnx","blobId":"9b886ba1b2b63f6ad9c1d5b8d2c809d3f62da06b","size":136775724,"lfs":{"sha256":"6be23fdf564e179787f66056a0fa94d489a1d88528d48d1bbef875166d1bd0bb","size":136775724,"pointerSize":134}}],"createdAt":"2023-11-27T15:19:11.000Z"} +""" + +# copied from https://civitai.com/api/v1/model-versions/242807 +# Yeah, this is ridiculously bloated, but that's what you get. +RepoCivitaiVersionMetadata1 = b""" +{"id":242807,"modelId":215485,"name":"v1-128dim","createdAt":"2023-11-29T01:21:22.372Z","updatedAt":"2023-12-16T17:46:14.313Z","status":"Published","publishedAt":"2023-11-29T01:28:10.937Z","trainedWords":[],"trainingStatus":null,"trainingDetails":null,"baseModel":"SDXL Turbo","baseModelType":null,"earlyAccessTimeFrame":0,"description":"

v1: First version(128 dim)

","stats":{"downloadCount":2586,"ratingCount":2,"rating":5},"model":{"name":"SDXL Turbo-LoRA-Stable Diffusion XL faster than light","type":"LORA","nsfw":false,"poi":false},"files":[{"id":187907,"sizeKB":768888.859375,"name":"sd_xl_turbo_lora_v1.safetensors","type":"Model","metadata":{"fp":null,"size":null,"format":"SafeTensor"},"pickleScanResult":"Success","pickleScanMessage":"No Pickle imports","virusScanResult":"Success","virusScanMessage":null,"scannedAt":"2023-11-29T01:30:55.512Z","hashes":{"AutoV1":"6DC5F47A","AutoV2":"A599C42A9F","SHA256":"A599C42A9F4F7494C7F410DBC0FD432CF0242720509E9D52FA41AAC7A88D1B69","CRC32":"BF967A66","BLAKE3":"930E9A5CA9D31D2A35CE5A719120813F09CA927C61B5867C35CB547F9A279F62","AutoV3":"813EA5FB1C67F3CBF1C4DFF5D392EE5E073A2C41DCF3AB36CF6CDD872C4E7690"},"primary":true,"downloadUrl":"https://civitai.com/api/download/models/242807"}],"images":[{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bbf39031-15e6-4f2c-b531-c2b5108a1a9d/width=450/4080480.jpeg","nsfw":"None","width":768,"height":1024,"hash":"U9G9BnR,GI%N00kC4-s:5soz4TMx?wV@^*o#","type":"image","metadata":{"hash":"U9G9BnR,GI%N00kC4-s:5soz4TMx?wV@^*o#","size":1098842,"width":768,"height":1024},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"hashes":{"model":"31e35c80fc"},"prompt":"1 girl, snow","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"clipSkip":2,"resources":[{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, bad_prompt_version2, jpeg artifacts, signature, watermark, text, error, missing fingers","\\"sd_xl_turbo_lora_v1-128dim":"813ea5fb1c67\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/06a2e73e-594c-4a07-b4a2-495922153589/width=450/4081177.jpeg","nsfw":"None","width":2304,"height":1187,"hash":"UeIOY5V@j[ax?bj@ayj[.7ogj@kC_Nfkj[j]","type":"image","metadata":{"hash":"UeIOY5V@j[ax?bj@ayj[.7ogj@kC_Nfkj[j]","size":3214188,"width":2304,"height":1187},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"Script":"X/Y/Z plot","X Type":"Prompt S/R","hashes":{"model":"31e35c80fc"},"prompt":"1girl, wind","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"resources":[{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, jpeg artifacts, signature, watermark, text, error, missing fingers","sd_xl_turbo_lora_v1-16dim":"1>\\"","sd_xl_turbo_lora_v1-64dim":"1>","sd_xl_turbo_lora_v1-128dim":"1>","\\"sd_xl_turbo_lora_v1-128dim":"813ea5fb1c67\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6ab69760-60bd-4247-817e-1da98e8089f8/width=450/4008748.jpeg","nsfw":"None","width":1536,"height":1187,"hash":"UbIX~%RnjFWA_2RjjZof?vWUozof~qbba#of","type":"image","metadata":{"hash":"UbIX~%RnjFWA_2RjjZof?vWUozof~qbba#of","size":1818611,"width":1536,"height":1187},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"Script":"X/Y/Z plot","X Type":"Prompt S/R","hashes":{"model":"31e35c80fc"},"prompt":"1 girl, wind","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"500ea30284","cfgScale":1.5,"clipSkip":2,"resources":[{"name":"sd_xl_turbo_lora_v1","type":"lora","weight":1},{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, bad_prompt_version2, jpeg artifacts, signature, watermark, text, error, missing fingers","sd_xl_turbo_lora_v1":"1>,\\"","\\"sd_xl_turbo_lora_v1":"813ea5fb1c67\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fb1cb844-d642-4154-bf0c-2bef3af6dc5c/width=450/4009285.jpeg","nsfw":"None","width":768,"height":1024,"hash":"UKHxgQIT9u-oMxV?-=s.emxvskE0%%s;NHS2","type":"image","metadata":{"hash":"UKHxgQIT9u-oMxV?-=s.emxvskE0%%s;NHS2","size":972827,"width":768,"height":1024},"meta":{"Size":"768x1024","seed":2494203713,"Model":"juggernautXL_version6Rundiffusion","steps":4,"hashes":{"model":"1fe6c7ec54"},"prompt":"1 girl, wind","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","cfgScale":1.5,"clipSkip":2,"resources":[{"name":"sd_xl_turbo_lora_v1","type":"lora","weight":1},{"hash":"1fe6c7ec54","name":"juggernautXL_version6Rundiffusion","type":"model"}],"Model hash":"1fe6c7ec54","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, bad_prompt_version2, jpeg artifacts, signature, watermark, text, error, missing fingers","\\"sd_xl_turbo_lora_v1":"813ea5fb1c67\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6ba8c956-967f-4e7e-ab50-85d064206c6d/width=450/4024412.jpeg","nsfw":"None","width":768,"height":768,"hash":"U5Hn8c05kY-o^j0LE0V=1nIm^4Ip0r~4bMR-","type":"image","metadata":{"hash":"U5Hn8c05kY-o^j0LE0V=1nIm^4Ip0r~4bMR-","size":832579,"width":768,"height":768},"meta":null},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/659dc67e-3246-468e-a75e-996e6fa5e87e/width=450/4024238.jpeg","nsfw":"None","width":512,"height":512,"hash":"U9G*N=0iTM56;L9sw@I:0[=[$j=w00XT5Z=^","type":"image","metadata":{"hash":"U9G*N=0iTM56;L9sw@I:0[=[$j=w00XT5Z=^","size":384630,"width":512,"height":512},"meta":null},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc9110d-9e19-4cdc-b67d-46de66848bc4/width=450/4025914.jpeg","nsfw":"None","width":1024,"height":768,"hash":"UPGSGwbc00IU~qIV9Zozo~M{SO%L%gt7X9nh","type":"image","metadata":{"hash":"UPGSGwbc00IU~qIV9Zozo~M{SO%L%gt7X9nh","size":1165953,"width":1024,"height":768},"meta":{"VAE":"sdxl_vae.safetensors","Size":"1024x768","seed":149324391,"Model":"sdxlUnstableDiffusers_v4Grimorium","steps":4,"hashes":{"model":"565af52b8e"},"prompt":"bag, black_hair, blue_sky, building, car, city, cloud, cloudy_sky, day, ground_vehicle, hat, house, lamppost, long_hair, motor_vehicle, mountain, mountainous_horizon, multiple_girls, outdoors, palm_tree, plant, potted_plant, road, road_sign, scenery, sky, snow, standing, street, tree","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"clipSkip":2,"resources":[{"name":"sd_xl_turbo_lora_v1","type":"lora","weight":1},{"hash":"565af52b8e","name":"sdxlUnstableDiffusers_v4Grimorium","type":"model"}],"Model hash":"565af52b8e","negativePrompt":"(worst quality, low quality:1.4)","\\"sd_xl_turbo_lora_v1":"813ea5fb1c67\\""}}],"downloadUrl":"https://civitai.com/api/download/models/242807"} +""" + +# copied from https://civitai.com/api/v1/models/215485 +# Yeah, this is ridiculously bloated, but that's what you get. +RepoCivitaiModelMetadata1 = b""" +{"id":215485,"name":"SDXL Turbo-LoRA-Stable Diffusion XL faster than light","description":"

v1: 128 dim

v1-64dim: half dimension, similar quality

v1-16dim: 94MB, similar quality(but different results in some prompts... I suggest the 128 dim or 64 dim version, but if you have a gpu with 6 GB vram try this version)

LoRA based on sdxl turbo, you can use the TURBO with any stable diffusion xl checkpoint, few seconds = 1 image(3 seconds with a nvidia rtx 3060 12 GB with 1024x768 resolution)

Tested on webui 1111 v1.6.0-2-g4afaaf8a

Tested on ComfyUI v1754 [777f6b15]: workflow --download here--

(new version of workflow November 30, 2023 remove strange violet fog, keep sgm_uniform as scheduler and you can use every sampler)

1-Select your favourite stable diffusion xl checkpoint

2-Download this LoRA, use my workflow for ComfyUI or any workflow with LoRA loader

For webui 1111 write in the prompt <lora:sd_xl_turbo_lora_v1:1>

3-Sampling method on webui 1111: LCM(install animatediff extension if you don't see it in sampling list)

Sampling method on ComfyUI: all, with the workflow of November 30, 2023

4-CFG Scale: from 1 to 2.5

5-Sampling steps: 4

Hugginface page: https://huggingface.co/shiroppo/sd_xl_turbo_lora

---

Ko-fi: https://ko-fi.com/shiroppo

Patreon: https://www.patreon.com/Shiroppo

Pixiv: https://www.pixiv.net/users/7058273

Twitter: https://twitter.com/ShiroppoTwit

Instagram: https://www.instagram.com/shiroppo_real/

My pages: https://solo.to/shiroppo

---

","type":"LORA","poi":false,"nsfw":false,"allowNoCredit":false,"allowCommercialUse":"RentCivit","allowDerivatives":false,"allowDifferentLicense":true,"stats":{"downloadCount":4842,"favoriteCount":252,"commentCount":25,"ratingCount":4,"rating":5,"tippedAmountCount":2653},"creator":{"username":"test_author","image":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d0b4b9d4-f32c-441a-99e1-9b2d73679bd7/width=96/shiroppo.jpeg"},"tags":["tool","turbo","sdxl turbo"],"modelVersions":[{"id":242807,"modelId":215485,"name":"v1-128dim","createdAt":"2023-11-29T01:21:22.372Z","updatedAt":"2023-12-16T17:46:14.313Z","status":"Published","publishedAt":"2023-11-29T01:28:10.937Z","trainedWords":[],"trainingStatus":null,"trainingDetails":null,"baseModel":"SDXL Turbo","baseModelType":null,"earlyAccessTimeFrame":0,"description":"

v1: First version(128 dim)

","vaeId":null,"stats":{"downloadCount":2586,"ratingCount":2,"rating":5},"files":[{"id":187907,"sizeKB":768888.859375,"name":"sd_xl_turbo_lora_v1.safetensors","type":"Model","metadata":{"fp":null,"size":null,"format":"SafeTensor"},"pickleScanResult":"Success","pickleScanMessage":"No Pickle imports","virusScanResult":"Success","virusScanMessage":null,"scannedAt":"2023-11-29T01:30:55.512Z","hashes":{"AutoV1":"6DC5F47A","AutoV2":"A599C42A9F","SHA256":"A599C42A9F4F7494C7F410DBC0FD432CF0242720509E9D52FA41AAC7A88D1B69","CRC32":"BF967A66","BLAKE3":"930E9A5CA9D31D2A35CE5A719120813F09CA927C61B5867C35CB547F9A279F62","AutoV3":"813EA5FB1C67F3CBF1C4DFF5D392EE5E073A2C41DCF3AB36CF6CDD872C4E7690"},"downloadUrl":"https://civitai.com/api/download/models/242807","primary":true}],"images":[{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bbf39031-15e6-4f2c-b531-c2b5108a1a9d/width=450/4080480.jpeg","nsfw":"None","width":768,"height":1024,"hash":"U9G9BnR,GI%N00kC4-s:5soz4TMx?wV@^*o#","type":"image","metadata":{"hash":"U9G9BnR,GI%N00kC4-s:5soz4TMx?wV@^*o#","size":1098842,"width":768,"height":1024},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"hashes":{"model":"31e35c80fc"},"prompt":"1 girl, snow","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"clipSkip":2,"resources":[{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, bad_prompt_version2, jpeg artifacts, signature, watermark, text, error, missing fingers","\\"sd_xl_turbo_lora_v1-128dim":"813ea5fb1c67\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/06a2e73e-594c-4a07-b4a2-495922153589/width=450/4081177.jpeg","nsfw":"None","width":2304,"height":1187,"hash":"UeIOY5V@j[ax?bj@ayj[.7ogj@kC_Nfkj[j]","type":"image","metadata":{"hash":"UeIOY5V@j[ax?bj@ayj[.7ogj@kC_Nfkj[j]","size":3214188,"width":2304,"height":1187},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"Script":"X/Y/Z plot","X Type":"Prompt S/R","hashes":{"model":"31e35c80fc"},"prompt":"1girl, wind","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"resources":[{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, jpeg artifacts, signature, watermark, text, error, missing fingers","sd_xl_turbo_lora_v1-16dim":"1>\\"","sd_xl_turbo_lora_v1-64dim":"1>","sd_xl_turbo_lora_v1-128dim":"1>","\\"sd_xl_turbo_lora_v1-128dim":"813ea5fb1c67\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6ab69760-60bd-4247-817e-1da98e8089f8/width=450/4008748.jpeg","nsfw":"None","width":1536,"height":1187,"hash":"UbIX~%RnjFWA_2RjjZof?vWUozof~qbba#of","type":"image","metadata":{"hash":"UbIX~%RnjFWA_2RjjZof?vWUozof~qbba#of","size":1818611,"width":1536,"height":1187},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"Script":"X/Y/Z plot","X Type":"Prompt S/R","hashes":{"model":"31e35c80fc"},"prompt":"1 girl, wind","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"500ea30284","cfgScale":1.5,"clipSkip":2,"resources":[{"name":"sd_xl_turbo_lora_v1","type":"lora","weight":1},{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, bad_prompt_version2, jpeg artifacts, signature, watermark, text, error, missing fingers","sd_xl_turbo_lora_v1":"1>,\\"","\\"sd_xl_turbo_lora_v1":"813ea5fb1c67\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/fb1cb844-d642-4154-bf0c-2bef3af6dc5c/width=450/4009285.jpeg","nsfw":"None","width":768,"height":1024,"hash":"UKHxgQIT9u-oMxV?-=s.emxvskE0%%s;NHS2","type":"image","metadata":{"hash":"UKHxgQIT9u-oMxV?-=s.emxvskE0%%s;NHS2","size":972827,"width":768,"height":1024},"meta":{"Size":"768x1024","seed":2494203713,"Model":"juggernautXL_version6Rundiffusion","steps":4,"hashes":{"model":"1fe6c7ec54"},"prompt":"1 girl, wind","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","cfgScale":1.5,"clipSkip":2,"resources":[{"name":"sd_xl_turbo_lora_v1","type":"lora","weight":1},{"hash":"1fe6c7ec54","name":"juggernautXL_version6Rundiffusion","type":"model"}],"Model hash":"1fe6c7ec54","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, bad_prompt_version2, jpeg artifacts, signature, watermark, text, error, missing fingers","\\"sd_xl_turbo_lora_v1":"813ea5fb1c67\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/6ba8c956-967f-4e7e-ab50-85d064206c6d/width=450/4024412.jpeg","nsfw":"None","width":768,"height":768,"hash":"U5Hn8c05kY-o^j0LE0V=1nIm^4Ip0r~4bMR-","type":"image","metadata":{"hash":"U5Hn8c05kY-o^j0LE0V=1nIm^4Ip0r~4bMR-","size":832579,"width":768,"height":768},"meta":null},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/659dc67e-3246-468e-a75e-996e6fa5e87e/width=450/4024238.jpeg","nsfw":"None","width":512,"height":512,"hash":"U9G*N=0iTM56;L9sw@I:0[=[$j=w00XT5Z=^","type":"image","metadata":{"hash":"U9G*N=0iTM56;L9sw@I:0[=[$j=w00XT5Z=^","size":384630,"width":512,"height":512},"meta":null},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/abc9110d-9e19-4cdc-b67d-46de66848bc4/width=450/4025914.jpeg","nsfw":"None","width":1024,"height":768,"hash":"UPGSGwbc00IU~qIV9Zozo~M{SO%L%gt7X9nh","type":"image","metadata":{"hash":"UPGSGwbc00IU~qIV9Zozo~M{SO%L%gt7X9nh","size":1165953,"width":1024,"height":768},"meta":{"VAE":"sdxl_vae.safetensors","Size":"1024x768","seed":149324391,"Model":"sdxlUnstableDiffusers_v4Grimorium","steps":4,"hashes":{"model":"565af52b8e"},"prompt":"bag, black_hair, blue_sky, building, car, city, cloud, cloudy_sky, day, ground_vehicle, hat, house, lamppost, long_hair, motor_vehicle, mountain, mountainous_horizon, multiple_girls, outdoors, palm_tree, plant, potted_plant, road, road_sign, scenery, sky, snow, standing, street, tree","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"clipSkip":2,"resources":[{"name":"sd_xl_turbo_lora_v1","type":"lora","weight":1},{"hash":"565af52b8e","name":"sdxlUnstableDiffusers_v4Grimorium","type":"model"}],"Model hash":"565af52b8e","negativePrompt":"(worst quality, low quality:1.4)","\\"sd_xl_turbo_lora_v1":"813ea5fb1c67\\""}}],"downloadUrl":"https://civitai.com/api/download/models/242807"},{"id":243508,"modelId":215485,"name":"v1-64dim","createdAt":"2023-11-29T14:44:26.562Z","updatedAt":"2023-12-16T17:46:36.863Z","status":"Published","publishedAt":"2023-11-29T14:49:26.079Z","trainedWords":[],"trainingStatus":null,"trainingDetails":null,"baseModel":"SDXL 1.0","baseModelType":null,"earlyAccessTimeFrame":0,"description":"

v1-64dim: half dimension, similar quality

","vaeId":null,"stats":{"downloadCount":2046,"ratingCount":2,"rating":5},"files":[{"id":189448,"sizeKB":1.4140625,"name":"sdxl_turbo_lora_workflow_comfyui_v1.zip","type":"Training Data","metadata":{"fp":null,"size":null,"format":"Other"},"pickleScanResult":"Success","pickleScanMessage":"No Pickle imports","virusScanResult":"Success","virusScanMessage":null,"scannedAt":"2023-11-30T15:45:36.773Z","hashes":{"AutoV2":"52DE4E494B","SHA256":"52DE4E494B919073071B6BDCA5E6A6B58B28267F8508646A7CA0D29230991B1D","CRC32":"A8B62FA5","BLAKE3":"AFE94AE62DBFAFDBA9497BBCB8F4D9E08327E51277A460AA8017222BEF530249"},"downloadUrl":"https://civitai.com/api/download/models/243508?type=Training%20Data"},{"id":188487,"sizeKB":384606.671875,"name":"sd_xl_turbo_lora_v1-64dim.safetensors","type":"Model","metadata":{"fp":null,"size":null,"format":"SafeTensor"},"pickleScanResult":"Success","pickleScanMessage":"No Pickle imports","virusScanResult":"Success","virusScanMessage":null,"scannedAt":"2023-11-29T14:51:02.892Z","hashes":{"AutoV1":"F128131B","AutoV2":"FE223AFC1A","SHA256":"FE223AFC1A165834BDD9CF2D3BFC82374468DED5F3EA8B6AB8018F02F380C557","CRC32":"8303EE37","BLAKE3":"8E3616A9114A8180A904B3FFFABD90B19A57ED3E8CB71CBE232737C34C24EC90","AutoV3":"D60E731380E2C401791D30021EC63E17B12978FCC2DBC9097FB1B735B6BFEF9F"},"downloadUrl":"https://civitai.com/api/download/models/243508","primary":true}],"images":[{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/d883f87d-5267-472a-827e-f0c554f5e081/width=450/4080474.jpeg","nsfw":"None","width":768,"height":1024,"hash":"U9GIp7NKL3xu00xu4-xa5tR*8wMx?wV[^*S2","type":"image","metadata":{"hash":"U9GIp7NKL3xu00xu4-xa5tR*8wMx?wV[^*S2","size":1059564,"width":768,"height":1024},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"hashes":{"model":"31e35c80fc"},"prompt":"1 girl, snow","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"clipSkip":2,"resources":[{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, bad_prompt_version2, jpeg artifacts, signature, watermark, text, error, missing fingers","\\"sd_xl_turbo_lora_v1-64dim":"d60e731380e2\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/aa9f243d-b695-476d-a50a-b88a38ff8134/width=450/4081183.jpeg","nsfw":"None","width":2304,"height":1187,"hash":"UeIOY5V@j[ax?bj@ayj[.7ogj@kC_Nfkj[j]","type":"image","metadata":{"hash":"UeIOY5V@j[ax?bj@ayj[.7ogj@kC_Nfkj[j]","size":3214188,"width":2304,"height":1187},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"Script":"X/Y/Z plot","X Type":"Prompt S/R","hashes":{"model":"31e35c80fc"},"prompt":"1girl, wind","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"resources":[{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, jpeg artifacts, signature, watermark, text, error, missing fingers","sd_xl_turbo_lora_v1-16dim":"1>\\"","sd_xl_turbo_lora_v1-64dim":"1>","sd_xl_turbo_lora_v1-128dim":"1>","\\"sd_xl_turbo_lora_v1-128dim":"813ea5fb1c67\\""}}],"downloadUrl":"https://civitai.com/api/download/models/243508"},{"id":245733,"modelId":215485,"name":"v1-16dim","createdAt":"2023-12-01T12:08:27.252Z","updatedAt":"2023-12-16T17:46:52.924Z","status":"Published","publishedAt":"2023-12-01T12:09:51.046Z","trainedWords":[],"trainingStatus":null,"trainingDetails":null,"baseModel":"SD 1.5","baseModelType":null,"earlyAccessTimeFrame":0,"description":"

94MB, similar quality(but different results in some prompts)

","vaeId":null,"stats":{"downloadCount":210,"ratingCount":0,"rating":0},"files":[{"id":190274,"sizeKB":96392.859375,"name":"sd_xl_turbo_lora_v1-16dim.safetensors","type":"Model","metadata":{"fp":null,"size":null,"format":"SafeTensor"},"pickleScanResult":"Success","pickleScanMessage":"No Pickle imports","virusScanResult":"Success","virusScanMessage":null,"scannedAt":"2023-12-01T13:00:52.623Z","hashes":{"AutoV1":"D079F12E","AutoV2":"65605FC5C3","SHA256":"65605FC5C31CFC3E35C90CEC1FCA493A74D438C357327636750591C79A0B6EB9","CRC32":"4AEBF056","BLAKE3":"6C83D51D57C21185D9AA8012D240999DD42CBF5FF1BA6C8BABB90C77FFEE8DF0","AutoV3":"9CBD5006993C2F4FF4BED107FED487900754A25816D34883FD2BA3185D7DF0BD"},"downloadUrl":"https://civitai.com/api/download/models/245733","primary":true}],"images":[{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/bc1f930d-e499-44ca-9d29-1bab91b568ff/width=450/4080469.jpeg","nsfw":"None","width":768,"height":1024,"hash":"UBHLlIX9u6aK00%M9Z%NE,M{DNIT.SoM%1Io","type":"image","metadata":{"hash":"UBHLlIX9u6aK00%M9Z%NE,M{DNIT.SoM%1Io","size":1045153,"width":768,"height":1024},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"hashes":{"model":"31e35c80fc"},"prompt":"1 girl, snow","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"clipSkip":2,"resources":[{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, bad_prompt_version2, jpeg artifacts, signature, watermark, text, error, missing fingers","\\"sd_xl_turbo_lora_v1-16dim":"9cbd5006993c\\""}},{"url":"https://image.civitai.com/xG1nkqKTMzGDvpLrqFT7WA/2f0149fc-43f6-40af-9aae-dc27956cf293/width=450/4081196.jpeg","nsfw":"None","width":2304,"height":1187,"hash":"UeIOY5V@j[ax?bj@ayj[.7ogj@kC_Nfkj[j]","type":"image","metadata":{"hash":"UeIOY5V@j[ax?bj@ayj[.7ogj@kC_Nfkj[j]","size":3214188,"width":2304,"height":1187},"meta":{"VAE":"sd_xl_base_1.0_fixvae_fp16.vae.safetensors","Size":"768x1024","seed":2494203713,"Model":"sd_xl_base_1.0","steps":4,"Script":"X/Y/Z plot","X Type":"Prompt S/R","hashes":{"model":"31e35c80fc"},"prompt":"1girl, wind","Version":"v1.6.0-2-g4afaaf8a","sampler":"LCM","VAE hash":"235745af8d","cfgScale":1.5,"resources":[{"hash":"31e35c80fc","name":"sd_xl_base_1.0","type":"model"}],"Model hash":"31e35c80fc","negativePrompt":"blurry, blurry_image, Lowres, Low_resolution, Low_picture_quality, Low_picture_anime, extra_anatomy, extra_body, extra_navel, extra_face, extra_eyes, extra_chest, extra_nipples, extra_hips, extra_arms, extra_hands, extra_fingers, extra_legs, extra_feet, extra_toe, missing_anatomy, missing_body, missing_navel, missing_face, missing_eyes, missing_chest, missing_nipples, missing_hips, missing_arms, missing_hands, missing_fingers, missing_legs, missing_feet, missing_toe, sketch, sketching, (worst quality:2), (low quality:2), ((monochrome)), ((grayscale)), inpainting, HDR, jpeg artifacts, signature, watermark, text, error, missing fingers","sd_xl_turbo_lora_v1-16dim":"1>\\"","sd_xl_turbo_lora_v1-64dim":"1>","sd_xl_turbo_lora_v1-128dim":"1>","\\"sd_xl_turbo_lora_v1-128dim":"813ea5fb1c67\\""}}],"downloadUrl":"https://civitai.com/api/download/models/245733"}]} +""" diff --git a/tests/app/services/model_metadata/test_model_metadata.py b/tests/app/services/model_metadata/test_model_metadata.py index 7bed0ac429..a34466c94c 100644 --- a/tests/app/services/model_metadata/test_model_metadata.py +++ b/tests/app/services/model_metadata/test_model_metadata.py @@ -1,34 +1,39 @@ """ Test model metadata fetching and storage. """ -import pytest import datetime - from pathlib import Path -from typing import Any, Dict, List -from pydantic import BaseModel, ValidationError + +import pytest +import requests +from pydantic.networks import HttpUrl +from requests.sessions import Session +from requests_testadapter import TestAdapter from invokeai.app.services.config import InvokeAIAppConfig -from invokeai.backend.util.logging import InvokeAILogger -from invokeai.app.services.model_records import ModelRecordServiceBase, ModelRecordServiceSQL, UnknownModelException +from invokeai.app.services.model_records import ModelRecordServiceSQL, UnknownModelException from invokeai.backend.model_manager.config import ( BaseModelType, - MainCheckpointConfig, - MainDiffusersConfig, + ModelFormat, ModelType, - TextualInversionConfig, - VaeDiffusersConfig, ) from invokeai.backend.model_manager.metadata import ( - ModelMetadataStore, - AnyModelRepoMetadata, - CommercialUsage, - LicenseRestrictions, - HuggingFaceMetadata, CivitaiMetadata, + CivitaiMetadataFetch, + CommercialUsage, + HuggingFaceMetadata, + HuggingFaceMetadataFetch, + ModelMetadataStore, +) +from invokeai.backend.util.logging import InvokeAILogger +from tests.app.services.model_metadata.metadata_examples import ( + RepoCivitaiModelMetadata1, + RepoCivitaiVersionMetadata1, + RepoHFMetadata1, ) from tests.fixtures.sqlite_database import create_mock_sqlite_database + @pytest.fixture def app_config(datadir: Path) -> InvokeAIAppConfig: return InvokeAIAppConfig( @@ -36,49 +41,204 @@ def app_config(datadir: Path) -> InvokeAIAppConfig: models_dir=datadir / "root/models", ) + @pytest.fixture def record_store(app_config: InvokeAIAppConfig) -> ModelRecordServiceSQL: logger = InvokeAILogger.get_logger(config=app_config) db = create_mock_sqlite_database(app_config, logger) store = ModelRecordServiceSQL(db) - # add two config records to the database + # add three simple config records to the database raw1 = { - "path": "/tmp/foo2.ckpt", + "path": "/tmp/foo1", + "format": ModelFormat("diffusers"), "name": "test2", "base": BaseModelType("sd-2"), "type": ModelType("vae"), - "original_hash":"111222333444", + "original_hash": "111222333444", "source": "stabilityai/sdxl-vae", } raw2 = { - "path": "/tmp/foo1.ckpt", + "path": "/tmp/foo2.ckpt", "name": "model1", + "format": ModelFormat("checkpoint"), "base": BaseModelType("sd-1"), "type": "main", "config": "/tmp/foo.yaml", "variant": "normal", - "format": "checkpoint", "original_hash": "111222333444", "source": "https://civitai.com/models/206883/split", } - store.add_model('test_config_1', raw1) - store.add_model('test_config_2', raw2) + raw3 = { + "path": "/tmp/foo3", + "format": ModelFormat("diffusers"), + "name": "test3", + "base": BaseModelType("sdxl"), + "type": ModelType("main"), + "original_hash": "111222333444", + "source": "author3/model3", + } + store.add_model("test_config_1", raw1) + store.add_model("test_config_2", raw2) + store.add_model("test_config_3", raw3) return store + +@pytest.fixture +def session() -> Session: + sess = requests.Session() + sess.mount( + "https://huggingface.co/api/models/stabilityai/sdxl-turbo", + TestAdapter( + RepoHFMetadata1, + headers={"Content-Type": "application/json; charset=utf-8", "Content-Length": len(RepoHFMetadata1)}, + ), + ) + sess.mount( + "https://civitai.com/api/v1/model-versions/242807", + TestAdapter( + RepoCivitaiVersionMetadata1, + headers={ + "Content-Length": len(RepoCivitaiVersionMetadata1), + }, + ), + ) + sess.mount( + "https://civitai.com/api/v1/models/215485", + TestAdapter( + RepoCivitaiModelMetadata1, + headers={ + "Content-Length": len(RepoCivitaiModelMetadata1), + }, + ), + ) + return sess + + @pytest.fixture def metadata_store(record_store: ModelRecordServiceSQL) -> ModelMetadataStore: - db = record_store._db # to ensure we are sharing the same database + db = record_store._db # to ensure we are sharing the same database return ModelMetadataStore(db) + def test_metadata_store_put_get(metadata_store: ModelMetadataStore) -> None: - input_metadata = HuggingFaceMetadata(name="sdxl-vae", - author="stabilityai", - tags={"text-to-image","diffusers"}, - id="stabilityai/sdxl-vae", - tag_dict={"license":"other"}, - last_modified=datetime.datetime.now(), - ) - metadata_store.add_metadata('test_config_1',input_metadata) - output_metadata = metadata_store.get_metadata('test_config_1') + input_metadata = HuggingFaceMetadata( + name="sdxl-vae", + author="stabilityai", + tags={"text-to-image", "diffusers"}, + id="stabilityai/sdxl-vae", + tag_dict={"license": "other"}, + last_modified=datetime.datetime.now(), + ) + metadata_store.add_metadata("test_config_1", input_metadata) + output_metadata = metadata_store.get_metadata("test_config_1") + assert input_metadata == output_metadata + with pytest.raises(UnknownModelException): + metadata_store.add_metadata("unknown_key", input_metadata) + + +def test_metadata_store_update(metadata_store: ModelMetadataStore) -> None: + input_metadata = HuggingFaceMetadata( + name="sdxl-vae", + author="stabilityai", + tags={"text-to-image", "diffusers"}, + id="stabilityai/sdxl-vae", + tag_dict={"license": "other"}, + last_modified=datetime.datetime.now(), + ) + metadata_store.add_metadata("test_config_1", input_metadata) + input_metadata.name = "new-name" + metadata_store.update_metadata("test_config_1", input_metadata) + output_metadata = metadata_store.get_metadata("test_config_1") + assert output_metadata.name == "new-name" assert input_metadata == output_metadata + +def test_metadata_search(metadata_store: ModelMetadataStore) -> None: + metadata1 = HuggingFaceMetadata( + name="sdxl-vae", + author="stabilityai", + tags={"text-to-image", "diffusers"}, + id="stabilityai/sdxl-vae", + tag_dict={"license": "other"}, + last_modified=datetime.datetime.now(), + ) + metadata2 = HuggingFaceMetadata( + name="model2", + author="stabilityai", + tags={"text-to-image", "diffusers", "community-contributed"}, + id="author2/model2", + tag_dict={"license": "other"}, + last_modified=datetime.datetime.now(), + ) + metadata3 = HuggingFaceMetadata( + name="model3", + author="author3", + tags={"text-to-image", "checkpoint", "community-contributed"}, + id="author3/model3", + tag_dict={"license": "other"}, + last_modified=datetime.datetime.now(), + ) + metadata_store.add_metadata("test_config_1", metadata1) + metadata_store.add_metadata("test_config_2", metadata2) + metadata_store.add_metadata("test_config_3", metadata3) + + matches = metadata_store.search_by_author("stabilityai") + assert len(matches) == 2 + assert "test_config_1" in matches + assert "test_config_2" in matches + matches = metadata_store.search_by_author("Sherlock Holmes") + assert not matches + + matches = metadata_store.search_by_name("model3") + assert len(matches) == 1 + assert "test_config_3" in matches + + matches = metadata_store.search_by_tag({"text-to-image"}) + assert len(matches) == 3 + + matches = metadata_store.search_by_tag({"text-to-image", "diffusers"}) + assert len(matches) == 2 + assert "test_config_1" in matches + assert "test_config_2" in matches + + matches = metadata_store.search_by_tag({"checkpoint", "community-contributed"}) + assert len(matches) == 1 + assert "test_config_3" in matches + + # does the tag table update correctly? + matches = metadata_store.search_by_tag({"checkpoint", "licensed-for-commercial-use"}) + assert not matches + metadata3.tags.add("licensed-for-commercial-use") + metadata_store.update_metadata("test_config_3", metadata3) + matches = metadata_store.search_by_tag({"checkpoint", "licensed-for-commercial-use"}) + assert len(matches) == 1 + + +def test_metadata_civitai_fetch(session: Session) -> None: + fetcher = CivitaiMetadataFetch(session) + metadata = fetcher.from_url(HttpUrl("https://civitai.com/models/215485/SDXL-turbo")) + assert isinstance(metadata, CivitaiMetadata) + assert metadata.id == 215485 + assert metadata.author == "test_author" # note that this is not the same as the original from Civitai + assert metadata.allow_commercial_use # changed to make sure we are reading locally not remotely + assert metadata.restrictions.AllowCommercialUse == CommercialUsage("RentCivit") + assert metadata.version_id == 242807 + assert metadata.tags == {"tool", "turbo", "sdxl turbo"} + + +def test_metadata_hf_fetch(session: Session) -> None: + fetcher = HuggingFaceMetadataFetch(session) + metadata = fetcher.from_url(HttpUrl("https://huggingface.co/stabilityai/sdxl-turbo")) + assert isinstance(metadata, HuggingFaceMetadata) + assert metadata.author == "test_author" # this is not the same as the original + assert metadata.files + assert metadata.tags == { + "diffusers", + "onnx", + "safetensors", + "text-to-image", + "license:other", + "has_space", + "diffusers:StableDiffusionXLPipeline", + "region:us", + }