mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Recognize and load diffusers-style LoRAs (.bin)
Prevent double-reporting of autoimported models - closes #3636 Allow autoimport of diffusers-style LoRA models - closes #3637
This commit is contained in:
parent
90ae8ce26a
commit
863336acbb
@ -193,7 +193,10 @@ class ModelInstall(object):
|
|||||||
models_installed.update(self._install_path(path))
|
models_installed.update(self._install_path(path))
|
||||||
|
|
||||||
# folders style or similar
|
# folders style or similar
|
||||||
elif path.is_dir() and any([(path/x).exists() for x in {'config.json','model_index.json','learned_embeds.bin'}]):
|
elif path.is_dir() and any([(path/x).exists() for x in \
|
||||||
|
{'config.json','model_index.json','learned_embeds.bin','pytorch_lora_weights.bin'}
|
||||||
|
]
|
||||||
|
):
|
||||||
models_installed.update(self._install_path(path))
|
models_installed.update(self._install_path(path))
|
||||||
|
|
||||||
# recursive scan
|
# recursive scan
|
||||||
|
@ -785,7 +785,7 @@ class ModelManager(object):
|
|||||||
if path in known_paths or path.parent in scanned_dirs:
|
if path in known_paths or path.parent in scanned_dirs:
|
||||||
scanned_dirs.add(path)
|
scanned_dirs.add(path)
|
||||||
continue
|
continue
|
||||||
if any([(path/x).exists() for x in {'config.json','model_index.json','learned_embeds.bin'}]):
|
if any([(path/x).exists() for x in {'config.json','model_index.json','learned_embeds.bin','pytorch_lora_weights.bin'}]):
|
||||||
new_models_found.update(installer.heuristic_import(path))
|
new_models_found.update(installer.heuristic_import(path))
|
||||||
scanned_dirs.add(path)
|
scanned_dirs.add(path)
|
||||||
|
|
||||||
@ -794,7 +794,8 @@ class ModelManager(object):
|
|||||||
if path in known_paths or path.parent in scanned_dirs:
|
if path in known_paths or path.parent in scanned_dirs:
|
||||||
continue
|
continue
|
||||||
if path.suffix in {'.ckpt','.bin','.pth','.safetensors','.pt'}:
|
if path.suffix in {'.ckpt','.bin','.pth','.safetensors','.pt'}:
|
||||||
new_models_found.update(installer.heuristic_import(path))
|
import_result = installer.heuristic_import(path)
|
||||||
|
new_models_found.update(import_result)
|
||||||
|
|
||||||
self.logger.info(f'Scanned {items_scanned} files and directories, imported {len(new_models_found)} models')
|
self.logger.info(f'Scanned {items_scanned} files and directories, imported {len(new_models_found)} models')
|
||||||
installed.update(new_models_found)
|
installed.update(new_models_found)
|
||||||
|
@ -126,6 +126,8 @@ class ModelProbe(object):
|
|||||||
return ModelType.Vae
|
return ModelType.Vae
|
||||||
elif any(key.startswith(v) for v in {"lora_te_", "lora_unet_"}):
|
elif any(key.startswith(v) for v in {"lora_te_", "lora_unet_"}):
|
||||||
return ModelType.Lora
|
return ModelType.Lora
|
||||||
|
elif any(key.endswith(v) for v in {"to_k_lora.up.weight", "to_q_lora.down.weight"}):
|
||||||
|
return ModelType.Lora
|
||||||
elif any(key.startswith(v) for v in {"control_model", "input_blocks"}):
|
elif any(key.startswith(v) for v in {"control_model", "input_blocks"}):
|
||||||
return ModelType.ControlNet
|
return ModelType.ControlNet
|
||||||
elif key in {"emb_params", "string_to_param"}:
|
elif key in {"emb_params", "string_to_param"}:
|
||||||
@ -136,7 +138,7 @@ class ModelProbe(object):
|
|||||||
if len(ckpt) < 10 and all(isinstance(v, torch.Tensor) for v in ckpt.values()):
|
if len(ckpt) < 10 and all(isinstance(v, torch.Tensor) for v in ckpt.values()):
|
||||||
return ModelType.TextualInversion
|
return ModelType.TextualInversion
|
||||||
|
|
||||||
raise ValueError("Unable to determine model type")
|
raise ValueError(f"Unable to determine model type for {model_path}")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_model_type_from_folder(cls, folder_path: Path, model: ModelMixin)->ModelType:
|
def get_model_type_from_folder(cls, folder_path: Path, model: ModelMixin)->ModelType:
|
||||||
@ -166,7 +168,7 @@ class ModelProbe(object):
|
|||||||
return type
|
return type
|
||||||
|
|
||||||
# give up
|
# give up
|
||||||
raise ValueError("Unable to determine model type")
|
raise ValueError("Unable to determine model type for {folder_path}")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _scan_and_load_checkpoint(cls,model_path: Path)->dict:
|
def _scan_and_load_checkpoint(cls,model_path: Path)->dict:
|
||||||
|
Loading…
Reference in New Issue
Block a user