mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
[Enhancements] Allow user to specify VAE with !import_model and delete underlying model with !del_model (#2369)
Fix two deficiencies in the CLI's support for model management: 1. `!import_model` did not allow user to specify VAE file. This is now fixed. 2. `!del_model` did not offer the user the opportunity to delete the underlying weights file or diffusers directory. This is now fixed.
This commit is contained in:
commit
eaf7934d74
@ -93,6 +93,7 @@ voxel_art-1.0:
|
||||
format: ckpt
|
||||
vae:
|
||||
repo_id: stabilityai/sd-vae-ft-mse
|
||||
file: vae-ft-mse-840000-ema-pruned.ckpt
|
||||
recommended: False
|
||||
width: 512
|
||||
height: 512
|
||||
@ -102,7 +103,7 @@ ft-mse-improved-autoencoder-840000:
|
||||
format: ckpt
|
||||
config: VAE/default
|
||||
file: vae-ft-mse-840000-ema-pruned.ckpt
|
||||
recommended: False
|
||||
recommended: True
|
||||
width: 512
|
||||
height: 512
|
||||
trinart_vae:
|
||||
|
@ -852,6 +852,7 @@ class Generate:
|
||||
model_data = cache.get_model(model_name)
|
||||
except Exception as e:
|
||||
print(f'** model {model_name} could not be loaded: {str(e)}')
|
||||
print(traceback.format_exc(), file=sys.stderr)
|
||||
if previous_model_name is None:
|
||||
raise e
|
||||
print(f'** trying to reload previous model')
|
||||
|
@ -578,7 +578,7 @@ def import_model(model_path:str, gen, opt, completer):
|
||||
elif re.match('^[\w.+-]+/[\w.+-]+$',model_path):
|
||||
model_name = import_diffuser_model(model_path, gen, opt, completer)
|
||||
elif os.path.isdir(model_path):
|
||||
model_name = import_diffuser_model(model_path, gen, opt, completer)
|
||||
model_name = import_diffuser_model(Path(model_path), gen, opt, completer)
|
||||
else:
|
||||
print(f'** {model_path} is neither the path to a .ckpt file nor a diffusers repository id. Can\'t import.')
|
||||
|
||||
@ -590,7 +590,7 @@ def import_model(model_path:str, gen, opt, completer):
|
||||
gen.model_manager.del_model(model_name)
|
||||
return
|
||||
|
||||
if input('Make this the default model? [n] ') in ('y','Y'):
|
||||
if input('Make this the default model? [n] ').strip() in ('y','Y'):
|
||||
gen.model_manager.set_default_model(model_name)
|
||||
|
||||
gen.model_manager.commit(opt.conf)
|
||||
@ -607,10 +607,14 @@ def import_diffuser_model(path_or_repo:str, gen, opt, completer)->str:
|
||||
model_name=default_name,
|
||||
model_description=default_description
|
||||
)
|
||||
vae = None
|
||||
if input('Replace this model\'s VAE with "stabilityai/sd-vae-ft-mse"? [n] ').strip() in ('y','Y'):
|
||||
vae = dict(repo_id='stabilityai/sd-vae-ft-mse')
|
||||
|
||||
if not manager.import_diffuser_model(
|
||||
path_or_repo,
|
||||
model_name = model_name,
|
||||
vae = vae,
|
||||
description = model_description):
|
||||
print('** model failed to import')
|
||||
return None
|
||||
@ -628,17 +632,28 @@ def import_ckpt_model(path_or_url:str, gen, opt, completer)->str:
|
||||
)
|
||||
config_file = None
|
||||
default = Path(Globals.root,'configs/stable-diffusion/v1-inference.yaml')
|
||||
|
||||
completer.complete_extensions(('.yaml','.yml'))
|
||||
completer.set_line(str(default))
|
||||
done = False
|
||||
while not done:
|
||||
config_file = input('Configuration file for this model: ').strip()
|
||||
done = os.path.exists(config_file)
|
||||
|
||||
completer.complete_extensions(('.ckpt','.safetensors'))
|
||||
vae = None
|
||||
default = Path(Globals.root,'models/ldm/stable-diffusion-v1/vae-ft-mse-840000-ema-pruned.ckpt')
|
||||
completer.set_line(str(default))
|
||||
done = False
|
||||
while not done:
|
||||
vae = input('VAE file for this model (leave blank for none): ').strip() or None
|
||||
done = (not vae) or os.path.exists(vae)
|
||||
completer.complete_extensions(None)
|
||||
|
||||
if not manager.import_ckpt_model(
|
||||
path_or_url,
|
||||
config = config_file,
|
||||
vae = vae,
|
||||
model_name = model_name,
|
||||
model_description = model_description,
|
||||
commit_to_conf = opt.conf,
|
||||
@ -710,7 +725,7 @@ def optimize_model(model_name_or_path:str, gen, opt, completer):
|
||||
return
|
||||
|
||||
completer.update_models(gen.model_manager.list_models())
|
||||
if input(f'Load optimized model {model_name}? [y] ') not in ('n','N'):
|
||||
if input(f'Load optimized model {model_name}? [y] ').strip() not in ('n','N'):
|
||||
gen.set_model(model_name)
|
||||
|
||||
response = input(f'Delete the original .ckpt file at ({ckpt_path} ? [n] ')
|
||||
@ -726,7 +741,12 @@ def del_config(model_name:str, gen, opt, completer):
|
||||
if model_name not in gen.model_manager.config:
|
||||
print(f"** Unknown model {model_name}")
|
||||
return
|
||||
gen.model_manager.del_model(model_name)
|
||||
|
||||
if input(f'Remove {model_name} from the list of models known to InvokeAI? [y] ').strip().startswith(('n','N')):
|
||||
return
|
||||
|
||||
delete_completely = input('Completely remove the model file or directory from disk? [n] ').startswith(('y','Y'))
|
||||
gen.model_manager.del_model(model_name,delete_files=delete_completely)
|
||||
gen.model_manager.commit(opt.conf)
|
||||
print(f'** {model_name} deleted')
|
||||
completer.update_models(gen.model_manager.list_models())
|
||||
|
@ -18,7 +18,9 @@ import traceback
|
||||
import warnings
|
||||
import safetensors.torch
|
||||
from pathlib import Path
|
||||
from shutil import move, rmtree
|
||||
from typing import Union, Any
|
||||
from huggingface_hub import scan_cache_dir
|
||||
from ldm.util import download_with_progress_bar
|
||||
|
||||
import torch
|
||||
@ -35,6 +37,9 @@ from ldm.invoke.globals import Globals, global_models_dir, global_autoscan_dir,
|
||||
from ldm.util import instantiate_from_config, ask_user
|
||||
|
||||
DEFAULT_MAX_MODELS=2
|
||||
VAE_TO_REPO_ID = { # hack, see note in convert_and_import()
|
||||
'vae-ft-mse-840000-ema-pruned': 'stabilityai/sd-vae-ft-mse',
|
||||
}
|
||||
|
||||
class ModelManager(object):
|
||||
def __init__(self,
|
||||
@ -230,7 +235,7 @@ class ModelManager(object):
|
||||
line = f'\033[1m{line}\033[0m'
|
||||
print(line)
|
||||
|
||||
def del_model(self, model_name:str) -> None:
|
||||
def del_model(self, model_name:str, delete_files:bool=False) -> None:
|
||||
'''
|
||||
Delete the named model.
|
||||
'''
|
||||
@ -238,9 +243,25 @@ class ModelManager(object):
|
||||
if model_name not in omega:
|
||||
print(f'** Unknown model {model_name}')
|
||||
return
|
||||
# save these for use in deletion later
|
||||
conf = omega[model_name]
|
||||
repo_id = conf.get('repo_id',None)
|
||||
path = self._abs_path(conf.get('path',None))
|
||||
weights = self._abs_path(conf.get('weights',None))
|
||||
|
||||
del omega[model_name]
|
||||
if model_name in self.stack:
|
||||
self.stack.remove(model_name)
|
||||
if delete_files:
|
||||
if weights:
|
||||
print(f'** deleting file {weights}')
|
||||
Path(weights).unlink(missing_ok=True)
|
||||
elif path:
|
||||
print(f'** deleting directory {path}')
|
||||
rmtree(path,ignore_errors=True)
|
||||
elif repo_id:
|
||||
print(f'** deleting the cached model directory for {repo_id}')
|
||||
self._delete_model_from_cache(repo_id)
|
||||
|
||||
def add_model(self, model_name:str, model_attributes:dict, clobber:bool=False) -> None:
|
||||
'''
|
||||
@ -417,7 +438,7 @@ class ModelManager(object):
|
||||
safety_checker=None,
|
||||
local_files_only=not Globals.internet_available
|
||||
)
|
||||
if 'vae' in mconfig:
|
||||
if 'vae' in mconfig and mconfig['vae'] is not None:
|
||||
vae = self._load_vae(mconfig['vae'])
|
||||
pipeline_args.update(vae=vae)
|
||||
if not isinstance(name_or_path,Path):
|
||||
@ -523,11 +544,12 @@ class ModelManager(object):
|
||||
print('>> Model scanned ok!')
|
||||
|
||||
def import_diffuser_model(self,
|
||||
repo_or_path:Union[str,Path],
|
||||
model_name:str=None,
|
||||
description:str=None,
|
||||
commit_to_conf:Path=None,
|
||||
)->bool:
|
||||
repo_or_path:Union[str,Path],
|
||||
model_name:str=None,
|
||||
description:str=None,
|
||||
vae:dict=None,
|
||||
commit_to_conf:Path=None,
|
||||
)->bool:
|
||||
'''
|
||||
Attempts to install the indicated diffuser model and returns True if successful.
|
||||
|
||||
@ -543,6 +565,7 @@ class ModelManager(object):
|
||||
description = description or f'imported diffusers model {model_name}'
|
||||
new_config = dict(
|
||||
description=description,
|
||||
vae=vae,
|
||||
format='diffusers',
|
||||
)
|
||||
if isinstance(repo_or_path,Path) and repo_or_path.exists():
|
||||
@ -556,18 +579,22 @@ class ModelManager(object):
|
||||
return True
|
||||
|
||||
def import_ckpt_model(self,
|
||||
weights:Union[str,Path],
|
||||
config:Union[str,Path]='configs/stable-diffusion/v1-inference.yaml',
|
||||
model_name:str=None,
|
||||
model_description:str=None,
|
||||
commit_to_conf:Path=None,
|
||||
)->bool:
|
||||
weights:Union[str,Path],
|
||||
config:Union[str,Path]='configs/stable-diffusion/v1-inference.yaml',
|
||||
vae:Union[str,Path]=None,
|
||||
model_name:str=None,
|
||||
model_description:str=None,
|
||||
commit_to_conf:Path=None,
|
||||
)->bool:
|
||||
'''
|
||||
Attempts to install the indicated ckpt file and returns True if successful.
|
||||
|
||||
"weights" can be either a path-like object corresponding to a local .ckpt file
|
||||
or a http/https URL pointing to a remote model.
|
||||
|
||||
"vae" is a Path or str object pointing to a ckpt or safetensors file to be used
|
||||
as the VAE for this model.
|
||||
|
||||
"config" is the model config file to use with this ckpt file. It defaults to
|
||||
v1-inference.yaml. If a URL is provided, the config will be downloaded.
|
||||
|
||||
@ -594,6 +621,8 @@ class ModelManager(object):
|
||||
width=512,
|
||||
height=512
|
||||
)
|
||||
if vae:
|
||||
new_config['vae'] = vae
|
||||
self.add_model(model_name, new_config, True)
|
||||
if commit_to_conf:
|
||||
self.commit(commit_to_conf)
|
||||
@ -633,7 +662,7 @@ class ModelManager(object):
|
||||
|
||||
def convert_and_import(self,
|
||||
ckpt_path:Path,
|
||||
diffuser_path:Path,
|
||||
diffusers_path:Path,
|
||||
model_name=None,
|
||||
model_description=None,
|
||||
commit_to_conf:Path=None,
|
||||
@ -645,46 +674,56 @@ class ModelManager(object):
|
||||
new_config = None
|
||||
from ldm.invoke.ckpt_to_diffuser import convert_ckpt_to_diffuser
|
||||
import transformers
|
||||
if diffuser_path.exists():
|
||||
print(f'ERROR: The path {str(diffuser_path)} already exists. Please move or remove it and try again.')
|
||||
if diffusers_path.exists():
|
||||
print(f'ERROR: The path {str(diffusers_path)} already exists. Please move or remove it and try again.')
|
||||
return
|
||||
|
||||
model_name = model_name or diffuser_path.name
|
||||
model_name = model_name or diffusers_path.name
|
||||
model_description = model_description or 'Optimized version of {model_name}'
|
||||
print(f'>> {model_name}: optimizing (30-60s).')
|
||||
print(f'>> Optimizing {model_name} (30-60s)')
|
||||
try:
|
||||
verbosity =transformers.logging.get_verbosity()
|
||||
transformers.logging.set_verbosity_error()
|
||||
convert_ckpt_to_diffuser(ckpt_path, diffuser_path,extract_ema=True)
|
||||
convert_ckpt_to_diffuser(ckpt_path, diffusers_path,extract_ema=True)
|
||||
transformers.logging.set_verbosity(verbosity)
|
||||
print(f'>> Success. Optimized model is now located at {str(diffuser_path)}')
|
||||
print(f'>> Writing new config file entry for {model_name}...',end='')
|
||||
print(f'>> Success. Optimized model is now located at {str(diffusers_path)}')
|
||||
print(f'>> Writing new config file entry for {model_name}')
|
||||
new_config = dict(
|
||||
path=str(diffuser_path),
|
||||
path=str(diffusers_path),
|
||||
description=model_description,
|
||||
format='diffusers',
|
||||
)
|
||||
|
||||
# HACK (LS): in the event that the original entry is using a custom ckpt VAE, we try to
|
||||
# map that VAE onto a diffuser VAE using a hard-coded dictionary.
|
||||
# I would prefer to do this differently: We load the ckpt model into memory, swap the
|
||||
# VAE in memory, and then pass that to convert_ckpt_to_diffuser() so that the swapped
|
||||
# VAE is built into the model. However, when I tried this I got obscure key errors.
|
||||
if model_name in self.config and (vae_ckpt_path := self.model_info(model_name)['vae']):
|
||||
vae_basename = Path(vae_ckpt_path).stem
|
||||
diffusers_vae = None
|
||||
if (diffusers_vae := VAE_TO_REPO_ID.get(vae_basename,None)):
|
||||
print(f'>> {vae_basename} VAE corresponds to known {diffusers_vae} diffusers version')
|
||||
new_config.update(
|
||||
vae = {'repo_id': diffusers_vae}
|
||||
)
|
||||
else:
|
||||
print(f'** Custom VAE "{vae_basename}" found, but corresponding diffusers model unknown')
|
||||
print(f'** Using "stabilityai/sd-vae-ft-mse"; If this isn\'t right, please edit the model config')
|
||||
new_config.update(
|
||||
vae = {'repo_id': 'stabilityai/sd-vae-ft-mse'}
|
||||
)
|
||||
|
||||
self.del_model(model_name)
|
||||
self.add_model(model_name, new_config, True)
|
||||
if commit_to_conf:
|
||||
self.commit(commit_to_conf)
|
||||
print('>> Conversion succeeded')
|
||||
except Exception as e:
|
||||
print(f'** Conversion failed: {str(e)}')
|
||||
traceback.print_exc()
|
||||
|
||||
print('done.')
|
||||
return new_config
|
||||
|
||||
def del_config(self, model_name:str, gen, opt, completer):
|
||||
current_model = gen.model_name
|
||||
if model_name == current_model:
|
||||
print("** Can't delete active model. !switch to another model first. **")
|
||||
return
|
||||
gen.model_manager.del_model(model_name)
|
||||
gen.model_manager.commit(opt.conf)
|
||||
print(f'** {model_name} deleted')
|
||||
completer.del_model(model_name)
|
||||
|
||||
def search_models(self, search_folder):
|
||||
print(f'>> Finding Models In: {search_folder}')
|
||||
models_folder_ckpt = Path(search_folder).glob('**/*.ckpt')
|
||||
@ -766,7 +805,6 @@ class ModelManager(object):
|
||||
|
||||
print('** Legacy version <= 2.2.5 model directory layout detected. Reorganizing.')
|
||||
print('** This is a quick one-time operation.')
|
||||
from shutil import move, rmtree
|
||||
|
||||
# transformer files get moved into the hub directory
|
||||
if cls._is_huggingface_hub_directory_present():
|
||||
@ -982,6 +1020,27 @@ class ModelManager(object):
|
||||
|
||||
return vae
|
||||
|
||||
@staticmethod
|
||||
def _delete_model_from_cache(repo_id):
|
||||
cache_info = scan_cache_dir(global_cache_dir('diffusers'))
|
||||
|
||||
# I'm sure there is a way to do this with comprehensions
|
||||
# but the code quickly became incomprehensible!
|
||||
hashes_to_delete = set()
|
||||
for repo in cache_info.repos:
|
||||
if repo.repo_id==repo_id:
|
||||
for revision in repo.revisions:
|
||||
hashes_to_delete.add(revision.commit_hash)
|
||||
strategy = cache_info.delete_revisions(*hashes_to_delete)
|
||||
print(f'** deletion of this model is expected to free {strategy.expected_freed_size_str}')
|
||||
strategy.execute()
|
||||
|
||||
@staticmethod
|
||||
def _abs_path(path:Union(str,Path))->Path:
|
||||
if path is None or Path(path).is_absolute():
|
||||
return path
|
||||
return Path(Globals.root,path).resolve()
|
||||
|
||||
@staticmethod
|
||||
def _is_huggingface_hub_directory_present() -> bool:
|
||||
return os.getenv('HF_HOME') is not None or os.getenv('XDG_CACHE_HOME') is not None
|
||||
|
Loading…
Reference in New Issue
Block a user