2023-02-16 05:34:15 +00:00
|
|
|
"""
|
2023-02-15 06:07:39 +00:00
|
|
|
Utility (backend) functions used by model_install.py
|
2023-02-16 05:34:15 +00:00
|
|
|
"""
|
2023-02-15 06:07:39 +00:00
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import warnings
|
2023-06-03 20:17:53 +00:00
|
|
|
from dataclasses import dataclass,field
|
2023-02-15 06:07:39 +00:00
|
|
|
from pathlib import Path
|
2023-06-17 02:54:36 +00:00
|
|
|
from tempfile import TemporaryDirectory
|
|
|
|
from typing import List, Dict, Callable, Union, Set
|
2023-02-15 06:07:39 +00:00
|
|
|
|
|
|
|
import requests
|
2023-06-23 20:54:52 +00:00
|
|
|
from diffusers import StableDiffusionPipeline
|
2023-06-17 02:54:36 +00:00
|
|
|
from huggingface_hub import hf_hub_url, HfFolder, HfApi
|
2023-02-15 06:07:39 +00:00
|
|
|
from omegaconf import OmegaConf
|
|
|
|
from tqdm import tqdm
|
|
|
|
|
|
|
|
import invokeai.configs as configs
|
2023-03-03 06:02:00 +00:00
|
|
|
|
2023-05-26 00:41:26 +00:00
|
|
|
from invokeai.app.services.config import InvokeAIAppConfig
|
2023-06-17 02:54:36 +00:00
|
|
|
from invokeai.backend.model_management import ModelManager, ModelType, BaseModelType, ModelVariantType
|
|
|
|
from invokeai.backend.model_management.model_probe import ModelProbe, SchedulerPredictionType, ModelProbeInfo
|
|
|
|
from invokeai.backend.util import download_with_resume
|
2023-06-01 04:31:46 +00:00
|
|
|
from ..util.logging import InvokeAILogger
|
2023-05-04 04:43:51 +00:00
|
|
|
|
2023-02-15 06:07:39 +00:00
|
|
|
warnings.filterwarnings("ignore")
|
2023-02-16 05:34:15 +00:00
|
|
|
|
2023-02-15 06:07:39 +00:00
|
|
|
# --------------------------globals-----------------------
|
2023-05-26 00:41:26 +00:00
|
|
|
config = InvokeAIAppConfig.get_config()
|
2023-06-17 02:54:36 +00:00
|
|
|
logger = InvokeAILogger.getLogger(name='InvokeAI')
|
2023-02-15 06:07:39 +00:00
|
|
|
|
|
|
|
# the initial "configs" dir is now bundled in the `invokeai.configs` package
|
|
|
|
Dataset_path = Path(configs.__path__[0]) / "INITIAL_MODELS.yaml"
|
|
|
|
|
2023-02-16 05:34:15 +00:00
|
|
|
Config_preamble = """
|
|
|
|
# This file describes the alternative machine learning models
|
2023-02-15 06:07:39 +00:00
|
|
|
# available to InvokeAI script.
|
|
|
|
#
|
|
|
|
# To add a new model, follow the examples below. Each
|
|
|
|
# model requires a model config file, a weights file,
|
|
|
|
# and the width and height of the images it
|
|
|
|
# was trained on.
|
|
|
|
"""
|
|
|
|
|
2023-06-17 02:54:36 +00:00
|
|
|
LEGACY_CONFIGS = {
|
|
|
|
BaseModelType.StableDiffusion1: {
|
|
|
|
ModelVariantType.Normal: 'v1-inference.yaml',
|
|
|
|
ModelVariantType.Inpaint: 'v1-inpainting-inference.yaml',
|
|
|
|
},
|
|
|
|
|
|
|
|
BaseModelType.StableDiffusion2: {
|
|
|
|
ModelVariantType.Normal: {
|
|
|
|
SchedulerPredictionType.Epsilon: 'v2-inference.yaml',
|
|
|
|
SchedulerPredictionType.VPrediction: 'v2-inference-v.yaml',
|
|
|
|
},
|
|
|
|
ModelVariantType.Inpaint: {
|
|
|
|
SchedulerPredictionType.Epsilon: 'v2-inpainting-inference.yaml',
|
|
|
|
SchedulerPredictionType.VPrediction: 'v2-inpainting-inference-v.yaml',
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-01 04:31:46 +00:00
|
|
|
@dataclass
|
|
|
|
class ModelInstallList:
|
|
|
|
'''Class for listing models to be installed/removed'''
|
Multiple fixes
1. Model installer works correctly under Windows 11 Terminal
2. Fixed crash when configure script hands control off to installer
3. Kill install subprocess on keyboard interrupt
4. Command-line functionality for --yes configuration and model installation
restored.
5. New command-line features:
- install/delete lists of diffusers, LoRAS, controlnets and textual inversions
using repo ids, paths or URLs.
Help:
```
usage: invokeai-model-install [-h] [--diffusers [DIFFUSERS ...]] [--loras [LORAS ...]] [--controlnets [CONTROLNETS ...]] [--textual-inversions [TEXTUAL_INVERSIONS ...]] [--delete] [--full-precision | --no-full-precision]
[--yes] [--default_only] [--list-models {diffusers,loras,controlnets,tis}] [--config_file CONFIG_FILE] [--root_dir ROOT]
InvokeAI model downloader
options:
-h, --help show this help message and exit
--diffusers [DIFFUSERS ...]
List of URLs or repo_ids of diffusers to install/delete
--loras [LORAS ...] List of URLs or repo_ids of LoRA/LyCORIS models to install/delete
--controlnets [CONTROLNETS ...]
List of URLs or repo_ids of controlnet models to install/delete
--textual-inversions [TEXTUAL_INVERSIONS ...]
List of URLs or repo_ids of textual inversion embeddings to install/delete
--delete Delete models listed on command line rather than installing them
--full-precision, --no-full-precision
use 32-bit weights instead of faster 16-bit weights (default: False)
--yes, -y answer "yes" to all prompts
--default_only only install the default model
--list-models {diffusers,loras,controlnets,tis}
list installed models
--config_file CONFIG_FILE, -c CONFIG_FILE
path to configuration file to create
--root_dir ROOT path to root of install directory
```
2023-06-06 01:45:35 +00:00
|
|
|
install_models: List[str] = field(default_factory=list)
|
|
|
|
remove_models: List[str] = field(default_factory=list)
|
2023-03-03 06:02:00 +00:00
|
|
|
|
2023-06-03 03:19:14 +00:00
|
|
|
@dataclass
|
2023-06-17 02:54:36 +00:00
|
|
|
class InstallSelections():
|
2023-06-03 03:19:14 +00:00
|
|
|
install_models: List[str]= field(default_factory=list)
|
|
|
|
remove_models: List[str]=field(default_factory=list)
|
2023-06-27 16:30:53 +00:00
|
|
|
# scan_directory: Path = None
|
|
|
|
# autoscan_on_startup: bool=False
|
2023-06-16 03:32:33 +00:00
|
|
|
|
|
|
|
@dataclass
|
|
|
|
class ModelLoadInfo():
|
|
|
|
name: str
|
|
|
|
model_type: ModelType
|
|
|
|
base_type: BaseModelType
|
|
|
|
path: Path = None
|
|
|
|
repo_id: str = None
|
|
|
|
description: str = ''
|
|
|
|
installed: bool = False
|
|
|
|
recommended: bool = False
|
2023-06-17 02:54:36 +00:00
|
|
|
default: bool = False
|
|
|
|
|
2023-06-16 03:32:33 +00:00
|
|
|
class ModelInstall(object):
|
2023-06-17 02:54:36 +00:00
|
|
|
def __init__(self,
|
|
|
|
config:InvokeAIAppConfig,
|
|
|
|
prediction_type_helper: Callable[[Path],SchedulerPredictionType]=None,
|
2023-06-23 20:35:39 +00:00
|
|
|
model_manager: ModelManager = None,
|
2023-06-17 02:54:36 +00:00
|
|
|
access_token:str = None):
|
2023-06-16 03:32:33 +00:00
|
|
|
self.config = config
|
2023-06-23 20:35:39 +00:00
|
|
|
self.mgr = model_manager or ModelManager(config.model_conf_path)
|
2023-06-16 03:32:33 +00:00
|
|
|
self.datasets = OmegaConf.load(Dataset_path)
|
2023-06-17 02:54:36 +00:00
|
|
|
self.prediction_helper = prediction_type_helper
|
|
|
|
self.access_token = access_token or HfFolder.get_token()
|
|
|
|
self.reverse_paths = self._reverse_paths(self.datasets)
|
2023-06-16 03:32:33 +00:00
|
|
|
|
|
|
|
def all_models(self)->Dict[str,ModelLoadInfo]:
|
|
|
|
'''
|
2023-06-17 02:54:36 +00:00
|
|
|
Return dict of model_key=>ModelLoadInfo objects.
|
|
|
|
This method consolidates and simplifies the entries in both
|
|
|
|
models.yaml and INITIAL_MODELS.yaml so that they can
|
|
|
|
be treated uniformly. It also sorts the models alphabetically
|
|
|
|
by their name, to improve the display somewhat.
|
2023-06-16 03:32:33 +00:00
|
|
|
'''
|
|
|
|
model_dict = dict()
|
2023-06-17 02:54:36 +00:00
|
|
|
|
2023-06-16 03:32:33 +00:00
|
|
|
# first populate with the entries in INITIAL_MODELS.yaml
|
|
|
|
for key, value in self.datasets.items():
|
|
|
|
name,base,model_type = ModelManager.parse_key(key)
|
|
|
|
value['name'] = name
|
|
|
|
value['base_type'] = base
|
|
|
|
value['model_type'] = model_type
|
|
|
|
model_dict[key] = ModelLoadInfo(**value)
|
|
|
|
|
|
|
|
# supplement with entries in models.yaml
|
|
|
|
installed_models = self.mgr.list_models()
|
2023-06-23 18:13:37 +00:00
|
|
|
for md in installed_models:
|
|
|
|
base = md['base_model']
|
|
|
|
model_type = md['type']
|
|
|
|
name = md['name']
|
|
|
|
key = ModelManager.create_key(name, base, model_type)
|
|
|
|
if key in model_dict:
|
|
|
|
model_dict[key].installed = True
|
|
|
|
else:
|
|
|
|
model_dict[key] = ModelLoadInfo(
|
|
|
|
name = name,
|
|
|
|
base_type = base,
|
|
|
|
model_type = model_type,
|
|
|
|
path = value.get('path'),
|
|
|
|
installed = True,
|
|
|
|
)
|
2023-06-16 03:32:33 +00:00
|
|
|
return {x : model_dict[x] for x in sorted(model_dict.keys(),key=lambda y: model_dict[y].name.lower())}
|
|
|
|
|
|
|
|
def starter_models(self)->Set[str]:
|
|
|
|
models = set()
|
|
|
|
for key, value in self.datasets.items():
|
|
|
|
name,base,model_type = ModelManager.parse_key(key)
|
2023-06-24 15:45:49 +00:00
|
|
|
if model_type==ModelType.Main:
|
2023-06-16 03:32:33 +00:00
|
|
|
models.add(key)
|
|
|
|
return models
|
2023-06-17 02:54:36 +00:00
|
|
|
|
|
|
|
def recommended_models(self)->Set[str]:
|
|
|
|
starters = self.starter_models()
|
|
|
|
return set([x for x in starters if self.datasets[x].get('recommended',False)])
|
|
|
|
|
|
|
|
def default_model(self)->str:
|
|
|
|
starters = self.starter_models()
|
|
|
|
defaults = [x for x in starters if self.datasets[x].get('default',False)]
|
|
|
|
return defaults[0]
|
|
|
|
|
|
|
|
def install(self, selections: InstallSelections):
|
|
|
|
job = 1
|
|
|
|
jobs = len(selections.remove_models) + len(selections.install_models)
|
2023-06-16 03:32:33 +00:00
|
|
|
|
2023-06-17 02:54:36 +00:00
|
|
|
# remove requested models
|
|
|
|
for key in selections.remove_models:
|
|
|
|
name,base,mtype = self.mgr.parse_key(key)
|
|
|
|
logger.info(f'Deleting {mtype} model {name} [{job}/{jobs}]')
|
|
|
|
self.mgr.del_model(name,base,mtype)
|
|
|
|
job += 1
|
|
|
|
|
|
|
|
# add requested models
|
|
|
|
for path in selections.install_models:
|
|
|
|
logger.info(f'Installing {path} [{job}/{jobs}]')
|
|
|
|
self.heuristic_install(path)
|
|
|
|
job += 1
|
|
|
|
|
|
|
|
self.mgr.commit()
|
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
def heuristic_install(self,
|
|
|
|
model_path_id_or_url: Union[str,Path],
|
|
|
|
models_installed: Set[Path]=None)->Set[Path]:
|
|
|
|
|
|
|
|
if not models_installed:
|
|
|
|
models_installed = set()
|
|
|
|
|
2023-06-17 02:54:36 +00:00
|
|
|
# A little hack to allow nested routines to retrieve info on the requested ID
|
|
|
|
self.current_id = model_path_id_or_url
|
|
|
|
path = Path(model_path_id_or_url)
|
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
try:
|
|
|
|
# checkpoint file, or similar
|
|
|
|
if path.is_file():
|
|
|
|
models_installed.add(self._install_path(path))
|
|
|
|
|
|
|
|
# folders style or similar
|
|
|
|
elif path.is_dir() and any([(path/x).exists() for x in {'config.json','model_index.json','learned_embeds.bin'}]):
|
|
|
|
models_installed.add(self._install_path(path))
|
2023-06-17 02:54:36 +00:00
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
# recursive scan
|
|
|
|
elif path.is_dir():
|
|
|
|
for child in path.iterdir():
|
|
|
|
self.heuristic_install(child, models_installed=models_installed)
|
2023-06-17 02:54:36 +00:00
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
# huggingface repo
|
|
|
|
elif len(str(path).split('/')) == 2:
|
|
|
|
models_installed.add(self._install_repo(str(path)))
|
2023-06-17 02:54:36 +00:00
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
# a URL
|
|
|
|
elif model_path_id_or_url.startswith(("http:", "https:", "ftp:")):
|
|
|
|
models_installed.add(self._install_url(model_path_id_or_url))
|
2023-06-17 02:54:36 +00:00
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
else:
|
|
|
|
logger.warning(f'{str(model_path_id_or_url)} is not recognized as a local path, repo ID or URL. Skipping')
|
|
|
|
|
|
|
|
except ValueError as e:
|
|
|
|
logger.error(str(e))
|
2023-06-17 02:54:36 +00:00
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
return models_installed
|
2023-06-17 02:54:36 +00:00
|
|
|
|
|
|
|
# install a model from a local path. The optional info parameter is there to prevent
|
|
|
|
# the model from being probed twice in the event that it has already been probed.
|
2023-06-25 22:50:15 +00:00
|
|
|
def _install_path(self, path: Path, info: ModelProbeInfo=None)->Path:
|
2023-06-17 02:54:36 +00:00
|
|
|
try:
|
2023-06-28 19:26:42 +00:00
|
|
|
# logger.debug(f'Probing {path}')
|
2023-06-17 02:54:36 +00:00
|
|
|
info = info or ModelProbe().heuristic_probe(path,self.prediction_helper)
|
2023-06-25 22:50:15 +00:00
|
|
|
model_name = path.stem if info.format=='checkpoint' else path.name
|
|
|
|
if self.mgr.model_exists(model_name, info.base_type, info.model_type):
|
|
|
|
raise ValueError(f'A model named "{model_name}" is already installed.')
|
|
|
|
attributes = self._make_attributes(path,info)
|
|
|
|
self.mgr.add_model(model_name = model_name,
|
|
|
|
base_model = info.base_type,
|
|
|
|
model_type = info.model_type,
|
2023-06-27 16:30:53 +00:00
|
|
|
model_attributes = attributes,
|
2023-06-25 22:50:15 +00:00
|
|
|
)
|
2023-06-17 02:54:36 +00:00
|
|
|
except Exception as e:
|
|
|
|
logger.warning(f'{str(e)} Skipping registration.')
|
2023-06-25 22:50:15 +00:00
|
|
|
return path
|
2023-06-17 02:54:36 +00:00
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
def _install_url(self, url: str)->Path:
|
2023-06-17 02:54:36 +00:00
|
|
|
# copy to a staging area, probe, import and delete
|
|
|
|
with TemporaryDirectory(dir=self.config.models_path) as staging:
|
|
|
|
location = download_with_resume(url,Path(staging))
|
|
|
|
if not location:
|
|
|
|
logger.error(f'Unable to download {url}. Skipping.')
|
|
|
|
info = ModelProbe().heuristic_probe(location)
|
|
|
|
dest = self.config.models_path / info.base_type.value / info.model_type.value / location.name
|
|
|
|
models_path = shutil.move(location,dest)
|
|
|
|
|
|
|
|
# staged version will be garbage-collected at this time
|
2023-06-25 22:50:15 +00:00
|
|
|
return self._install_path(Path(models_path), info)
|
2023-06-17 02:54:36 +00:00
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
def _install_repo(self, repo_id: str)->Path:
|
2023-06-17 02:54:36 +00:00
|
|
|
hinfo = HfApi().model_info(repo_id)
|
2023-06-16 03:32:33 +00:00
|
|
|
|
2023-06-17 02:54:36 +00:00
|
|
|
# we try to figure out how to download this most economically
|
|
|
|
# list all the files in the repo
|
|
|
|
files = [x.rfilename for x in hinfo.siblings]
|
2023-06-20 15:08:27 +00:00
|
|
|
location = None
|
2023-06-17 02:54:36 +00:00
|
|
|
|
|
|
|
with TemporaryDirectory(dir=self.config.models_path) as staging:
|
|
|
|
staging = Path(staging)
|
|
|
|
if 'model_index.json' in files:
|
|
|
|
location = self._download_hf_pipeline(repo_id, staging) # pipeline
|
2023-06-20 15:08:27 +00:00
|
|
|
else:
|
|
|
|
for suffix in ['safetensors','bin']:
|
|
|
|
if f'pytorch_lora_weights.{suffix}' in files:
|
|
|
|
location = self._download_hf_model(repo_id, ['pytorch_lora_weights.bin'], staging) # LoRA
|
|
|
|
break
|
|
|
|
elif self.config.precision=='float16' and f'diffusion_pytorch_model.fp16.{suffix}' in files: # vae, controlnet or some other standalone
|
|
|
|
files = ['config.json', f'diffusion_pytorch_model.fp16.{suffix}']
|
|
|
|
location = self._download_hf_model(repo_id, files, staging)
|
|
|
|
break
|
|
|
|
elif f'diffusion_pytorch_model.{suffix}' in files:
|
|
|
|
files = ['config.json', f'diffusion_pytorch_model.{suffix}']
|
|
|
|
location = self._download_hf_model(repo_id, files, staging)
|
|
|
|
break
|
|
|
|
elif f'learned_embeds.{suffix}' in files:
|
2023-06-23 20:54:52 +00:00
|
|
|
location = self._download_hf_model(repo_id, ['learned_embeds.suffix'], staging)
|
2023-06-20 15:08:27 +00:00
|
|
|
break
|
|
|
|
if not location:
|
|
|
|
logger.warning(f'Could not determine type of repo {repo_id}. Skipping install.')
|
|
|
|
return
|
|
|
|
|
2023-06-17 02:54:36 +00:00
|
|
|
info = ModelProbe().heuristic_probe(location, self.prediction_helper)
|
2023-06-20 15:08:27 +00:00
|
|
|
if not info:
|
|
|
|
logger.warning(f'Could not probe {location}. Skipping install.')
|
|
|
|
return
|
2023-06-17 02:54:36 +00:00
|
|
|
dest = self.config.models_path / info.base_type.value / info.model_type.value / self._get_model_name(repo_id,location)
|
|
|
|
if dest.exists():
|
|
|
|
shutil.rmtree(dest)
|
|
|
|
shutil.copytree(location,dest)
|
2023-06-25 22:50:15 +00:00
|
|
|
return self._install_path(dest, info)
|
|
|
|
|
|
|
|
def _get_model_name(self,path_name: str, location: Path)->str:
|
|
|
|
'''
|
|
|
|
Calculate a name for the model - primitive implementation.
|
|
|
|
'''
|
|
|
|
if key := self.reverse_paths.get(path_name):
|
|
|
|
(name, base, mtype) = ModelManager.parse_key(key)
|
|
|
|
return name
|
|
|
|
else:
|
|
|
|
return location.stem
|
2023-06-17 02:54:36 +00:00
|
|
|
|
|
|
|
def _make_attributes(self, path: Path, info: ModelProbeInfo)->dict:
|
2023-06-27 16:30:53 +00:00
|
|
|
model_name = path.name if path.is_dir() else path.stem
|
|
|
|
description = f'{info.base_type.value} {info.model_type.value} model {model_name}'
|
2023-06-17 02:54:36 +00:00
|
|
|
if key := self.reverse_paths.get(self.current_id):
|
|
|
|
if key in self.datasets:
|
2023-06-27 16:30:53 +00:00
|
|
|
description = self.datasets[key].get('description') or description
|
2023-06-25 22:50:15 +00:00
|
|
|
|
|
|
|
rel_path = self.relative_to_root(path)
|
|
|
|
|
2023-06-17 02:54:36 +00:00
|
|
|
attributes = dict(
|
2023-06-25 22:50:15 +00:00
|
|
|
path = str(rel_path),
|
2023-06-17 02:54:36 +00:00
|
|
|
description = str(description),
|
2023-06-23 18:13:37 +00:00
|
|
|
model_format = info.format,
|
2023-06-17 02:54:36 +00:00
|
|
|
)
|
2023-06-24 15:45:49 +00:00
|
|
|
if info.model_type == ModelType.Main:
|
2023-06-23 20:54:52 +00:00
|
|
|
attributes.update(dict(variant = info.variant_type,))
|
2023-06-17 02:54:36 +00:00
|
|
|
if info.format=="checkpoint":
|
|
|
|
try:
|
2023-06-25 22:50:15 +00:00
|
|
|
possible_conf = path.with_suffix('.yaml')
|
|
|
|
if possible_conf.exists():
|
|
|
|
legacy_conf = str(self.relative_to_root(possible_conf))
|
|
|
|
elif info.base_type == BaseModelType.StableDiffusion2:
|
|
|
|
legacy_conf = Path(self.config.legacy_conf_dir, LEGACY_CONFIGS[info.base_type][info.variant_type][info.prediction_type])
|
|
|
|
else:
|
|
|
|
legacy_conf = Path(self.config.legacy_conf_dir, LEGACY_CONFIGS[info.base_type][info.variant_type])
|
2023-06-17 02:54:36 +00:00
|
|
|
except KeyError:
|
2023-06-25 22:50:15 +00:00
|
|
|
legacy_conf = Path(self.config.legacy_conf_dir, 'v1-inference.yaml') # best guess
|
2023-06-17 02:54:36 +00:00
|
|
|
|
|
|
|
attributes.update(
|
|
|
|
dict(
|
2023-06-25 22:50:15 +00:00
|
|
|
config = str(legacy_conf)
|
2023-06-17 02:54:36 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
return attributes
|
|
|
|
|
2023-06-25 22:50:15 +00:00
|
|
|
def relative_to_root(self, path: Path)->Path:
|
|
|
|
root = self.config.root_path
|
|
|
|
if path.is_relative_to(root):
|
|
|
|
return path.relative_to(root)
|
|
|
|
else:
|
|
|
|
return path
|
|
|
|
|
2023-06-17 02:54:36 +00:00
|
|
|
def _download_hf_pipeline(self, repo_id: str, staging: Path)->Path:
|
|
|
|
'''
|
|
|
|
This retrieves a StableDiffusion model from cache or remote and then
|
|
|
|
does a save_pretrained() to the indicated staging area.
|
|
|
|
'''
|
|
|
|
_,name = repo_id.split("/")
|
|
|
|
revisions = ['fp16','main'] if self.config.precision=='float16' else ['main']
|
|
|
|
model = None
|
|
|
|
for revision in revisions:
|
|
|
|
try:
|
|
|
|
model = StableDiffusionPipeline.from_pretrained(repo_id,revision=revision,safety_checker=None)
|
|
|
|
except: # most errors are due to fp16 not being present. Fix this to catch other errors
|
2023-02-16 06:30:59 +00:00
|
|
|
pass
|
2023-06-17 02:54:36 +00:00
|
|
|
if model:
|
|
|
|
break
|
|
|
|
if not model:
|
|
|
|
logger.error(f'Diffusers model {repo_id} could not be downloaded. Skipping.')
|
|
|
|
return None
|
|
|
|
model.save_pretrained(staging / name, safe_serialization=True)
|
|
|
|
return staging / name
|
|
|
|
|
|
|
|
def _download_hf_model(self, repo_id: str, files: List[str], staging: Path)->Path:
|
|
|
|
_,name = repo_id.split("/")
|
|
|
|
location = staging / name
|
|
|
|
paths = list()
|
|
|
|
for filename in files:
|
|
|
|
p = hf_download_with_resume(repo_id,
|
|
|
|
model_dir=location,
|
|
|
|
model_name=filename,
|
|
|
|
access_token = self.access_token
|
|
|
|
)
|
|
|
|
if p:
|
|
|
|
paths.append(p)
|
|
|
|
else:
|
|
|
|
logger.warning(f'Could not download {filename} from {repo_id}.')
|
|
|
|
|
|
|
|
return location if len(paths)>0 else None
|
2023-02-16 08:22:25 +00:00
|
|
|
|
2023-06-17 02:54:36 +00:00
|
|
|
@classmethod
|
|
|
|
def _reverse_paths(cls,datasets)->dict:
|
|
|
|
'''
|
|
|
|
Reverse mapping from repo_id/path to destination name.
|
|
|
|
'''
|
|
|
|
return {v.get('path') or v.get('repo_id') : k for k, v in datasets.items()}
|
2023-05-30 17:49:43 +00:00
|
|
|
|
2023-02-15 06:07:39 +00:00
|
|
|
# -------------------------------------
|
|
|
|
def yes_or_no(prompt: str, default_yes=True):
|
|
|
|
default = "y" if default_yes else "n"
|
|
|
|
response = input(f"{prompt} [{default}] ") or default
|
|
|
|
if default_yes:
|
|
|
|
return response[0] not in ("n", "N")
|
|
|
|
else:
|
|
|
|
return response[0] in ("y", "Y")
|
|
|
|
|
|
|
|
# ---------------------------------------------
|
2023-06-17 02:54:36 +00:00
|
|
|
def hf_download_from_pretrained(
|
2023-06-16 03:32:33 +00:00
|
|
|
model_class: object, model_name: str, destination: Path, **kwargs
|
2023-02-15 06:07:39 +00:00
|
|
|
):
|
Multiple fixes
1. Model installer works correctly under Windows 11 Terminal
2. Fixed crash when configure script hands control off to installer
3. Kill install subprocess on keyboard interrupt
4. Command-line functionality for --yes configuration and model installation
restored.
5. New command-line features:
- install/delete lists of diffusers, LoRAS, controlnets and textual inversions
using repo ids, paths or URLs.
Help:
```
usage: invokeai-model-install [-h] [--diffusers [DIFFUSERS ...]] [--loras [LORAS ...]] [--controlnets [CONTROLNETS ...]] [--textual-inversions [TEXTUAL_INVERSIONS ...]] [--delete] [--full-precision | --no-full-precision]
[--yes] [--default_only] [--list-models {diffusers,loras,controlnets,tis}] [--config_file CONFIG_FILE] [--root_dir ROOT]
InvokeAI model downloader
options:
-h, --help show this help message and exit
--diffusers [DIFFUSERS ...]
List of URLs or repo_ids of diffusers to install/delete
--loras [LORAS ...] List of URLs or repo_ids of LoRA/LyCORIS models to install/delete
--controlnets [CONTROLNETS ...]
List of URLs or repo_ids of controlnet models to install/delete
--textual-inversions [TEXTUAL_INVERSIONS ...]
List of URLs or repo_ids of textual inversion embeddings to install/delete
--delete Delete models listed on command line rather than installing them
--full-precision, --no-full-precision
use 32-bit weights instead of faster 16-bit weights (default: False)
--yes, -y answer "yes" to all prompts
--default_only only install the default model
--list-models {diffusers,loras,controlnets,tis}
list installed models
--config_file CONFIG_FILE, -c CONFIG_FILE
path to configuration file to create
--root_dir ROOT path to root of install directory
```
2023-06-06 01:45:35 +00:00
|
|
|
logger = InvokeAILogger.getLogger('InvokeAI')
|
|
|
|
logger.addFilter(lambda x: 'fp16 is not a valid' not in x.getMessage())
|
|
|
|
|
2023-02-15 06:07:39 +00:00
|
|
|
model = model_class.from_pretrained(
|
|
|
|
model_name,
|
|
|
|
resume_download=True,
|
|
|
|
**kwargs,
|
|
|
|
)
|
2023-06-16 03:32:33 +00:00
|
|
|
model.save_pretrained(destination, safe_serialization=True)
|
|
|
|
return destination
|
2023-02-15 06:07:39 +00:00
|
|
|
|
|
|
|
# ---------------------------------------------
|
|
|
|
def hf_download_with_resume(
|
2023-05-30 17:49:43 +00:00
|
|
|
repo_id: str,
|
|
|
|
model_dir: str,
|
|
|
|
model_name: str,
|
|
|
|
model_dest: Path = None,
|
|
|
|
access_token: str = None,
|
2023-02-15 06:07:39 +00:00
|
|
|
) -> Path:
|
2023-05-30 17:49:43 +00:00
|
|
|
model_dest = model_dest or Path(os.path.join(model_dir, model_name))
|
2023-02-15 06:07:39 +00:00
|
|
|
os.makedirs(model_dir, exist_ok=True)
|
|
|
|
|
|
|
|
url = hf_hub_url(repo_id, model_name)
|
|
|
|
|
|
|
|
header = {"Authorization": f"Bearer {access_token}"} if access_token else {}
|
|
|
|
open_mode = "wb"
|
|
|
|
exist_size = 0
|
|
|
|
|
|
|
|
if os.path.exists(model_dest):
|
|
|
|
exist_size = os.path.getsize(model_dest)
|
|
|
|
header["Range"] = f"bytes={exist_size}-"
|
|
|
|
open_mode = "ab"
|
|
|
|
|
|
|
|
resp = requests.get(url, headers=header, stream=True)
|
|
|
|
total = int(resp.headers.get("content-length", 0))
|
|
|
|
|
|
|
|
if (
|
|
|
|
resp.status_code == 416
|
|
|
|
): # "range not satisfiable", which means nothing to return
|
2023-06-01 04:31:46 +00:00
|
|
|
logger.info(f"{model_name}: complete file found. Skipping.")
|
2023-02-15 06:07:39 +00:00
|
|
|
return model_dest
|
2023-05-30 17:49:43 +00:00
|
|
|
elif resp.status_code == 404:
|
2023-06-01 04:31:46 +00:00
|
|
|
logger.warning("File not found")
|
2023-05-30 17:49:43 +00:00
|
|
|
return None
|
2023-02-15 06:07:39 +00:00
|
|
|
elif resp.status_code != 200:
|
2023-06-01 04:31:46 +00:00
|
|
|
logger.warning(f"{model_name}: {resp.reason}")
|
2023-02-15 06:07:39 +00:00
|
|
|
elif exist_size > 0:
|
2023-06-01 04:31:46 +00:00
|
|
|
logger.info(f"{model_name}: partial file found. Resuming...")
|
2023-02-15 06:07:39 +00:00
|
|
|
else:
|
2023-06-01 04:31:46 +00:00
|
|
|
logger.info(f"{model_name}: Downloading...")
|
2023-02-15 06:07:39 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
with open(model_dest, open_mode) as file, tqdm(
|
|
|
|
desc=model_name,
|
|
|
|
initial=exist_size,
|
|
|
|
total=total + exist_size,
|
|
|
|
unit="iB",
|
|
|
|
unit_scale=True,
|
|
|
|
unit_divisor=1000,
|
|
|
|
) as bar:
|
|
|
|
for data in resp.iter_content(chunk_size=1024):
|
|
|
|
size = file.write(data)
|
|
|
|
bar.update(size)
|
|
|
|
except Exception as e:
|
2023-06-01 04:31:46 +00:00
|
|
|
logger.error(f"An error occurred while downloading {model_name}: {str(e)}")
|
2023-02-15 06:07:39 +00:00
|
|
|
return None
|
|
|
|
return model_dest
|
|
|
|
|
|
|
|
|