allow user to select main database or external file for model record/config db

This commit is contained in:
Lincoln Stein
2023-10-07 13:31:21 -04:00
parent 8e06088152
commit 6303f74616
9 changed files with 111 additions and 30 deletions

View File

@ -145,17 +145,6 @@ class InvokeAISettings(BaseSettings):
return [
"type",
"initconf",
"version",
"from_file",
"model",
"root",
"max_cache_size",
"max_vram_cache_size",
"always_use_cpu",
"free_gpu_mem",
"xformers_enabled",
"tiled_decode",
"conf_path",
]
class Config:

View File

@ -223,7 +223,7 @@ class InvokeAIAppConfig(InvokeAISettings):
lora_dir : Path = Field(default=None, description='Path to a directory of LoRA/LyCORIS models to be imported on startup.', category='Paths')
embedding_dir : Path = Field(default=None, description='Path to a directory of Textual Inversion embeddings to be imported on startup.', category='Paths')
controlnet_dir : Path = Field(default=None, description='Path to a directory of ControlNet embeddings to be imported on startup.', category='Paths')
model_config_db : Union[Path, Literal['auto']] = Field(default='auto', description='Path to a sqlite .db file or .yaml file for storing model config records; "auto" will reuse the main sqlite db', category='Paths')
model_config_db : Union[Path, Literal['auto'], None] = Field(default=None, description='Path to a sqlite .db file or .yaml file for storing model config records; "auto" will reuse the main sqlite db', category='Paths')
models_dir : Path = Field(default='models', description='Path to the models directory', category='Paths')
legacy_conf_dir : Path = Field(default='configs/stable-diffusion', description='Path to directory of legacy checkpoint config files', category='Paths')
db_dir : Path = Field(default='databases', description='Path to InvokeAI databases directory', category='Paths')
@ -314,9 +314,7 @@ class InvokeAIAppConfig(InvokeAISettings):
@classmethod
def get_config(cls, **kwargs) -> InvokeAIAppConfig:
"""
This returns a singleton InvokeAIAppConfig configuration object.
"""
"""This returns a singleton InvokeAIAppConfig configuration object."""
if (
cls.singleton_config is None
or type(cls.singleton_config) is not cls
@ -326,6 +324,26 @@ class InvokeAIAppConfig(InvokeAISettings):
cls.singleton_init = kwargs
return cls.singleton_config
@classmethod
def _excluded_from_yaml(cls) -> List[str]:
el = super()._excluded_from_yaml()
el.extend(
[
"version",
"from_file",
"model",
"root",
"max_cache_size",
"max_vram_cache_size",
"always_use_cpu",
"free_gpu_mem",
"xformers_enabled",
"tiled_decode",
"conf_path",
]
)
return el
@property
def root_path(self) -> Path:
"""

View File

@ -51,14 +51,22 @@ class ModelRecordServiceBase(ModelConfigStore):
a. if the path looks like a .db file, open a new sqlite3 connection and return a ModelRecordServiceSQL
b. if the path looks like a .yaml file, return a new ModelRecordServiceFile
c. otherwise bail
2. if config.model_config_db is the literal 'auto', then reuse the sqlite3 connection and lock passed
2. if config.model_config_db is the literal 'auto', then use the passed sqlite3 connection and thread lock.
a. if either of these is missing, then we create our own connection to the invokeai.db file, which *should*
be a safe thing to do - sqlite3 will use file-level locking.
3. if config.model_config_db is None, then fall back to config.conf_path, using a yaml file
"""
logger = InvokeAILogger.get_logger()
db = config.model_config_db
if db is None:
return ModelRecordServiceFile.from_db_file(config.model_conf_path)
if str(db) == "auto":
assert (conn is not None) and (lock is not None)
logger.info("Model config storage = main InvokeAI database")
return ModelRecordServiceSQL.from_connection(conn, lock)
return (
ModelRecordServiceSQL.from_connection(conn, lock)
if (conn and lock)
else ModelRecordServiceSQL.from_db_file(config.db_path)
)
assert isinstance(db, Path)
suffix = db.suffix
if suffix == ".yaml":

View File

@ -12,8 +12,8 @@ from tqdm import tqdm
import invokeai.configs as configs
from invokeai.app.services.config import InvokeAIAppConfig
from invokeai.backend.model_manager import BaseModelType, ModelInstall, ModelInstallJob, ModelType
from invokeai.backend.model_manager.install import ModelSourceMetadata
from invokeai.backend.model_manager import BaseModelType, ModelType
from invokeai.backend.model_manager.install import ModelInstall, ModelInstallJob, ModelSourceMetadata
# name of the starter models file
INITIAL_MODELS = "INITIAL_MODELS.yaml"

View File

@ -37,8 +37,10 @@ from transformers import AutoFeatureExtractor, BertTokenizerFast, CLIPTextConfig
import invokeai.configs as configs
from invokeai.app.services.config import InvokeAIAppConfig
from invokeai.backend.install.install_helper import InstallHelper, InstallSelections
from invokeai.backend.install.legacy_arg_parsing import legacy_parser
from invokeai.backend.model_manager import BaseModelType, ModelType
from invokeai.backend.model_manager.storage import ConfigFileVersionMismatchException, migrate_models_store
from invokeai.backend.util import choose_precision, choose_torch_device
from invokeai.backend.util.logging import InvokeAILogger
from invokeai.frontend.install.model_install import addModelsForm
@ -48,16 +50,16 @@ from invokeai.frontend.install.widgets import (
MIN_COLS,
MIN_LINES,
CenteredButtonPress,
CheckboxWithChanged,
CyclingForm,
FileBox,
MultiSelectColumns,
SingleSelectColumnsSimple,
SingleSelectWithChanged,
WindowTooSmallException,
set_min_terminal_size,
)
from .install_helper import InstallHelper, InstallSelections
warnings.filterwarnings("ignore")
transformers.logging.set_verbosity_error()
@ -529,6 +531,45 @@ Use cursor arrows to make a checkbox selection, and space to toggle.
)
else:
self.vram = DummyWidgetValue.zero
self.nextrely += 1
self.add_widget_intelligent(
npyscreen.FixedText,
value="Location of the database used to store model path and configuration information:",
editable=False,
color="CONTROL",
)
self.nextrely += 1
if first_time:
old_opts.model_config_db = "auto"
self.model_conf_auto = self.add_widget_intelligent(
CheckboxWithChanged,
value=str(old_opts.model_config_db) == "auto",
name="Main database",
relx=2,
max_width=25,
scroll_exit=True,
)
self.nextrely -= 2
config_db = str(old_opts.model_config_db or old_opts.conf_path)
self.model_conf_override = self.add_widget_intelligent(
FileBox,
value=str(old_opts.root_path / config_db)
if config_db != "auto"
else str(old_opts.root_path / old_opts.conf_path),
name="Specify models config database manually",
select_dir=False,
must_exist=False,
use_two_lines=False,
labelColor="GOOD",
# begin_entry_at=40,
relx=30,
max_height=3,
max_width=100,
scroll_exit=True,
hidden=str(old_opts.model_config_db) == "auto",
)
self.model_conf_auto.on_changed = self.show_hide_model_conf_override
self.nextrely += 1
self.outdir = self.add_widget_intelligent(
FileBox,
@ -540,6 +581,7 @@ Use cursor arrows to make a checkbox selection, and space to toggle.
labelColor="GOOD",
begin_entry_at=40,
max_height=3,
max_width=127,
scroll_exit=True,
)
self.autoimport_dirs = {}
@ -553,6 +595,7 @@ Use cursor arrows to make a checkbox selection, and space to toggle.
labelColor="GOOD",
begin_entry_at=32,
max_height=3,
max_width=127,
scroll_exit=True,
)
self.nextrely += 1
@ -589,6 +632,10 @@ https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/blob/main/LICENS
self.attention_slice_label.hidden = not show
self.attention_slice_size.hidden = not show
def show_hide_model_conf_override(self, value):
self.model_conf_override.hidden = value
self.model_conf_override.display()
def on_ok(self):
options = self.marshall_arguments()
if self.validate_field_values(options):
@ -636,6 +683,7 @@ https://huggingface.co/stabilityai/stable-diffusion-xl-base-1.0/blob/main/LICENS
directory = directory.relative_to(config.root_path)
setattr(new_opts, attr, directory)
new_opts.model_config_db = "auto" if self.model_conf_auto.value else self.model_conf_override.value
new_opts.hf_token = self.hf_token.value
new_opts.license_acceptance = self.license_acceptance.value
new_opts.precision = PRECISION_CHOICES[self.precision.value[0]]
@ -934,7 +982,11 @@ def main():
initialize_rootdir(config.root_path, opt.yes_to_all)
# this will initialize the models.yaml file if not present
install_helper = InstallHelper(config)
try:
install_helper = InstallHelper(config)
except ConfigFileVersionMismatchException:
config.model_config_db = migrate_models_store(config)
install_helper = InstallHelper(config)
models_to_download = default_user_selections(opt, install_helper)
new_init_file = config.root_path / "invokeai.yaml"

View File

@ -54,12 +54,13 @@ import tempfile
from abc import ABC, abstractmethod
from pathlib import Path
from shutil import move, rmtree
from typing import Any, Callable, Dict, List, Optional, Set, Type, Union
from typing import Any, Callable, Dict, List, Optional, Set, Union
from pydantic import Field
from pydantic.networks import AnyHttpUrl
from invokeai.app.services.config import InvokeAIAppConfig
from invokeai.app.services.model_record_service import ModelRecordServiceBase
from invokeai.backend.util import Chdir, InvokeAILogger, Logger
from .config import (
@ -77,7 +78,7 @@ from .hash import FastModelHash
from .models import InvalidModelException
from .probe import ModelProbe, ModelProbeInfo
from .search import ModelSearch
from .storage import DuplicateModelException, ModelConfigStore, get_config_store
from .storage import DuplicateModelException, ModelConfigStore
class ModelInstallJob(DownloadJobBase):
@ -380,7 +381,7 @@ class ModelInstall(ModelInstallBase):
): # noqa D107 - use base class docstrings
self._app_config = config or InvokeAIAppConfig.get_config()
self._logger = logger or InvokeAILogger.get_logger(config=self._app_config)
self._store = store or get_config_store(config.root_path / config.model_conf_path)
self._store = store or ModelRecordServiceBase.get_impl(self._app_config)
self._download_queue = download or DownloadQueue(config=self._app_config, event_handlers=event_handlers)
self._async_installs: Dict[Union[str, Path, AnyHttpUrl], Union[str, None]] = dict()
self._installed = set()

View File

@ -12,10 +12,10 @@ from ..config import BaseModelType, MainCheckpointConfig, MainConfig, ModelType
from .base import CONFIG_FILE_VERSION
def migrate_models_store(config: InvokeAIAppConfig):
def migrate_models_store(config: InvokeAIAppConfig) -> Path:
"""Migrate models from v1 models.yaml to v3.2 models.yaml."""
# avoid circular import
from invokeai.backend.model_manager import DuplicateModelException, ModelInstall
from invokeai.backend.model_manager.install import DuplicateModelException, ModelInstall
from invokeai.backend.model_manager.storage import get_config_store
app_config = InvokeAIAppConfig.get_config()
@ -64,3 +64,4 @@ def migrate_models_store(config: InvokeAIAppConfig):
logger.info(f"Original version of models config file saved as {str(old_file) + '.orig'}")
shutil.move(old_file, str(old_file) + ".orig")
shutil.move(new_file, old_file)
return old_file

View File

@ -27,7 +27,8 @@ from pydantic import BaseModel
import invokeai.configs as configs
from invokeai.app.services.config import InvokeAIAppConfig
from invokeai.backend.install.install_helper import InstallHelper
from invokeai.backend.model_manager import BaseModelType, ModelInstall, ModelInstallJob, ModelType
from invokeai.backend.model_manager import BaseModelType, ModelType
from invokeai.backend.model_manager.install import ModelInstall, ModelInstallJob
from invokeai.backend.util import choose_precision, choose_torch_device
from invokeai.backend.util.logging import InvokeAILogger
from invokeai.frontend.install.widgets import (

View File

@ -19,7 +19,7 @@ from npyscreen import fmPopup
# minimum size for UIs
MIN_COLS = 150
MIN_LINES = 40
MIN_LINES = 45
class WindowTooSmallException(Exception):
@ -264,6 +264,17 @@ class SingleSelectWithChanged(npyscreen.SelectOne):
self.on_changed(self.value)
class CheckboxWithChanged(npyscreen.Checkbox):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.on_changed = None
def whenToggled(self):
super().whenToggled
if self.on_changed:
self.on_changed(self.value)
class SingleSelectColumnsSimple(SelectColumnBase, SingleSelectWithChanged):
"""Row of radio buttons. Spacebar to select."""