mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
more refactoring; fixed place where rel conversion missed
This commit is contained in:
parent
982a568349
commit
99daa97978
@ -129,7 +129,7 @@ class ModelInstall(object):
|
|||||||
model_dict[key] = ModelLoadInfo(**value)
|
model_dict[key] = ModelLoadInfo(**value)
|
||||||
|
|
||||||
# supplement with entries in models.yaml
|
# supplement with entries in models.yaml
|
||||||
installed_models = self.mgr.list_models()
|
installed_models = [x for x in self.mgr.list_models() if not self._is_autoloaded(x)]
|
||||||
|
|
||||||
for md in installed_models:
|
for md in installed_models:
|
||||||
base = md["base_model"]
|
base = md["base_model"]
|
||||||
@ -148,6 +148,18 @@ class ModelInstall(object):
|
|||||||
)
|
)
|
||||||
return {x: model_dict[x] for x in sorted(model_dict.keys(), key=lambda y: model_dict[y].name.lower())}
|
return {x: model_dict[x] for x in sorted(model_dict.keys(), key=lambda y: model_dict[y].name.lower())}
|
||||||
|
|
||||||
|
def _is_autoloaded(self, model_info: dict) -> bool:
|
||||||
|
path = model_info.get("path")
|
||||||
|
if not path:
|
||||||
|
return False
|
||||||
|
for autodir in ['autoimport_dir','lora_dir','embedding_dir','controlnet_dir']:
|
||||||
|
if autodir_path := getattr(self.config, autodir):
|
||||||
|
autodir_path = self.config.root_path / autodir_path
|
||||||
|
print(f'{path} => {autodir_path}; is_relative={Path(path).is_relative_to(autodir_path)}',file=log)
|
||||||
|
if Path(path).is_relative_to(autodir_path):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def list_models(self, model_type):
|
def list_models(self, model_type):
|
||||||
installed = self.mgr.list_models(model_type=model_type)
|
installed = self.mgr.list_models(model_type=model_type)
|
||||||
print(f"Installed models of type `{model_type}`:")
|
print(f"Installed models of type `{model_type}`:")
|
||||||
|
@ -187,7 +187,7 @@ class ModelCache(object):
|
|||||||
# TODO: lock for no copies on simultaneous calls?
|
# TODO: lock for no copies on simultaneous calls?
|
||||||
cache_entry = self._cached_models.get(key, None)
|
cache_entry = self._cached_models.get(key, None)
|
||||||
if cache_entry is None:
|
if cache_entry is None:
|
||||||
self.logger.info(f"Loading model {model_path}, type {base_model}:{model_type}:{submodel}")
|
self.logger.info(f"Loading model {model_path}, type {base_model.value}:{model_type.value}:{submodel.value}")
|
||||||
|
|
||||||
# this will remove older cached models until
|
# this will remove older cached models until
|
||||||
# there is sufficient room to load the requested model
|
# there is sufficient room to load the requested model
|
||||||
|
@ -586,7 +586,7 @@ class ModelManager(object):
|
|||||||
|
|
||||||
# expose paths as absolute to help web UI
|
# expose paths as absolute to help web UI
|
||||||
if path := model_dict.get("path"):
|
if path := model_dict.get("path"):
|
||||||
model_dict["path"] = str(self.app_config.root_path / path)
|
model_dict["path"] = str(self.resolve_model_path(path))
|
||||||
models.append(model_dict)
|
models.append(model_dict)
|
||||||
|
|
||||||
return models
|
return models
|
||||||
@ -654,10 +654,9 @@ class ModelManager(object):
|
|||||||
The returned dict has the same format as the dict returned by
|
The returned dict has the same format as the dict returned by
|
||||||
model_info().
|
model_info().
|
||||||
"""
|
"""
|
||||||
# relativize paths as they go in - this makes it easier to move the root directory around
|
# relativize paths as they go in - this makes it easier to move the models directory around
|
||||||
if path := model_attributes.get("path"):
|
if path := model_attributes.get("path"):
|
||||||
if Path(path).is_relative_to(self.app_config.models_path):
|
model_attributes["path"] = str(self.relative_model_path(Path(path)))
|
||||||
model_attributes["path"] = str(Path(path).relative_to(self.app_config.models_path))
|
|
||||||
|
|
||||||
model_class = MODEL_CLASSES[base_model][model_type]
|
model_class = MODEL_CLASSES[base_model][model_type]
|
||||||
model_config = model_class.create_config(**model_attributes)
|
model_config = model_class.create_config(**model_attributes)
|
||||||
@ -715,7 +714,7 @@ class ModelManager(object):
|
|||||||
if not model_cfg:
|
if not model_cfg:
|
||||||
raise ModelNotFoundException(f"Unknown model: {model_key}")
|
raise ModelNotFoundException(f"Unknown model: {model_key}")
|
||||||
|
|
||||||
old_path = self.app_config.root_path / model_cfg.path
|
old_path = self.resolve_model_path(model_cfg.path)
|
||||||
new_name = new_name or model_name
|
new_name = new_name or model_name
|
||||||
new_base = new_base or base_model
|
new_base = new_base or base_model
|
||||||
new_key = self.create_key(new_name, new_base, model_type)
|
new_key = self.create_key(new_name, new_base, model_type)
|
||||||
@ -725,11 +724,13 @@ class ModelManager(object):
|
|||||||
# if this is a model file/directory that we manage ourselves, we need to move it
|
# if this is a model file/directory that we manage ourselves, we need to move it
|
||||||
if old_path.is_relative_to(self.app_config.models_path):
|
if old_path.is_relative_to(self.app_config.models_path):
|
||||||
new_path = (
|
new_path = (
|
||||||
self.app_config.root_path
|
self.resolve_model_path(
|
||||||
/ "models"
|
Path(
|
||||||
/ BaseModelType(new_base).value
|
BaseModelType(new_base).value,
|
||||||
/ ModelType(model_type).value
|
ModelType(model_type).value,
|
||||||
/ new_name
|
new_name,
|
||||||
|
)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
move(old_path, new_path)
|
move(old_path, new_path)
|
||||||
model_cfg.path = str(new_path.relative_to(self.app_config.models_path))
|
model_cfg.path = str(new_path.relative_to(self.app_config.models_path))
|
||||||
@ -810,9 +811,15 @@ class ModelManager(object):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def resolve_model_path(self, path: str) -> Path:
|
def resolve_model_path(self, path: Union[Path,str]) -> Path:
|
||||||
|
"""return relative paths based on configured models_path"""
|
||||||
return self.app_config.models_path / path
|
return self.app_config.models_path / path
|
||||||
|
|
||||||
|
def relative_model_path(self, model_path: Path) -> Path:
|
||||||
|
if model_path.is_relative_to(self.app_config.models_path):
|
||||||
|
model_path = model_path.relative_to(self.app_config.models_path)
|
||||||
|
return model_path
|
||||||
|
|
||||||
def search_models(self, search_folder):
|
def search_models(self, search_folder):
|
||||||
self.logger.info(f"Finding Models In: {search_folder}")
|
self.logger.info(f"Finding Models In: {search_folder}")
|
||||||
models_folder_ckpt = Path(search_folder).glob("**/*.ckpt")
|
models_folder_ckpt = Path(search_folder).glob("**/*.ckpt")
|
||||||
@ -896,7 +903,7 @@ class ModelManager(object):
|
|||||||
if model_config.path.startswith("models"):
|
if model_config.path.startswith("models"):
|
||||||
model_config.path = str(Path(*Path(model_config.path).parts[1:]))
|
model_config.path = str(Path(*Path(model_config.path).parts[1:]))
|
||||||
|
|
||||||
model_path = self.app_config.models_path.absolute() / model_config.path
|
model_path = self.resolve_model_path(model_config.path).absolute()
|
||||||
if not model_path.exists():
|
if not model_path.exists():
|
||||||
model_class = MODEL_CLASSES[cur_base_model][cur_model_type]
|
model_class = MODEL_CLASSES[cur_base_model][cur_model_type]
|
||||||
if model_class.save_to_config:
|
if model_class.save_to_config:
|
||||||
@ -915,7 +922,7 @@ class ModelManager(object):
|
|||||||
if model_type is not None and cur_model_type != model_type:
|
if model_type is not None and cur_model_type != model_type:
|
||||||
continue
|
continue
|
||||||
model_class = MODEL_CLASSES[cur_base_model][cur_model_type]
|
model_class = MODEL_CLASSES[cur_base_model][cur_model_type]
|
||||||
models_dir = self.app_config.models_path / cur_base_model.value / cur_model_type.value
|
models_dir = self.resolve_model_path(Path(cur_base_model.value, cur_model_type.value))
|
||||||
|
|
||||||
if not models_dir.exists():
|
if not models_dir.exists():
|
||||||
continue # TODO: or create all folders?
|
continue # TODO: or create all folders?
|
||||||
@ -928,10 +935,8 @@ class ModelManager(object):
|
|||||||
try:
|
try:
|
||||||
if model_key in self.models:
|
if model_key in self.models:
|
||||||
raise DuplicateModelException(f"Model with key {model_key} added twice")
|
raise DuplicateModelException(f"Model with key {model_key} added twice")
|
||||||
|
|
||||||
if model_path.is_relative_to(self.app_config.models_path):
|
model_path = self.relative_model_path(model_path)
|
||||||
model_path = model_path.relative_to(self.app_config.models_path)
|
|
||||||
|
|
||||||
model_config: ModelConfigBase = model_class.probe_config(str(model_path))
|
model_config: ModelConfigBase = model_class.probe_config(str(model_path))
|
||||||
self.models[model_key] = model_config
|
self.models[model_key] = model_config
|
||||||
new_models_found = True
|
new_models_found = True
|
||||||
@ -942,12 +947,11 @@ class ModelManager(object):
|
|||||||
except NotImplementedError as e:
|
except NotImplementedError as e:
|
||||||
self.logger.warning(e)
|
self.logger.warning(e)
|
||||||
|
|
||||||
imported_models = self.autoimport()
|
imported_models = self.scan_autoimport_directory()
|
||||||
|
|
||||||
if (new_models_found or imported_models) and self.config_path:
|
if (new_models_found or imported_models) and self.config_path:
|
||||||
self.commit()
|
self.commit()
|
||||||
|
|
||||||
def autoimport(self) -> Dict[str, AddModelResult]:
|
def scan_autoimport_directory(self) -> Dict[str, AddModelResult]:
|
||||||
"""
|
"""
|
||||||
Scan the autoimport directory (if defined) and import new models, delete defunct models.
|
Scan the autoimport directory (if defined) and import new models, delete defunct models.
|
||||||
"""
|
"""
|
||||||
@ -981,7 +985,7 @@ class ModelManager(object):
|
|||||||
# LS: hacky
|
# LS: hacky
|
||||||
# Patch in the SD VAE from core so that it is available for use by the UI
|
# Patch in the SD VAE from core so that it is available for use by the UI
|
||||||
try:
|
try:
|
||||||
self.heuristic_import({config.models_path / "core/convert/sd-vae-ft-mse"})
|
self.heuristic_import({self.resolve_model_path("core/convert/sd-vae-ft-mse")})
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -650,31 +650,6 @@ def process_and_execute(
|
|||||||
conn_out.send_bytes("*done*".encode("utf-8"))
|
conn_out.send_bytes("*done*".encode("utf-8"))
|
||||||
conn_out.close()
|
conn_out.close()
|
||||||
|
|
||||||
|
|
||||||
def do_listings(opt) -> bool:
|
|
||||||
"""List installed models of various sorts, and return
|
|
||||||
True if any were requested."""
|
|
||||||
model_manager = ModelManager(config.model_conf_path)
|
|
||||||
if opt.list_models == "diffusers":
|
|
||||||
print("Diffuser models:")
|
|
||||||
model_manager.print_models()
|
|
||||||
elif opt.list_models == "controlnets":
|
|
||||||
print("Installed Controlnet Models:")
|
|
||||||
cnm = model_manager.list_controlnet_models()
|
|
||||||
print(textwrap.indent("\n".join([x for x in cnm if cnm[x]]), prefix=" "))
|
|
||||||
elif opt.list_models == "loras":
|
|
||||||
print("Installed LoRA/LyCORIS Models:")
|
|
||||||
cnm = model_manager.list_lora_models()
|
|
||||||
print(textwrap.indent("\n".join([x for x in cnm if cnm[x]]), prefix=" "))
|
|
||||||
elif opt.list_models == "tis":
|
|
||||||
print("Installed Textual Inversion Embeddings:")
|
|
||||||
cnm = model_manager.list_ti_models()
|
|
||||||
print(textwrap.indent("\n".join([x for x in cnm if cnm[x]]), prefix=" "))
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
# --------------------------------------------------------
|
# --------------------------------------------------------
|
||||||
def select_and_download_models(opt: Namespace):
|
def select_and_download_models(opt: Namespace):
|
||||||
precision = "float32" if opt.full_precision else choose_precision(torch.device(choose_torch_device()))
|
precision = "float32" if opt.full_precision else choose_precision(torch.device(choose_torch_device()))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user