allow clip-vit-large-patch14 text encoder to coexist with tokenizer in same directory

This commit is contained in:
Lincoln Stein 2023-07-05 13:14:41 -04:00
parent ea81ce9489
commit cf173b522b

View File

@ -228,6 +228,7 @@ class MigrateTo3(object):
self._migrate_pretrained(CLIPTextModel, self._migrate_pretrained(CLIPTextModel,
repo_id = repo_id, repo_id = repo_id,
dest = target_dir / 'clip-vit-large-patch14', dest = target_dir / 'clip-vit-large-patch14',
force = True,
**kwargs) **kwargs)
# sd-2 # sd-2
@ -291,21 +292,21 @@ class MigrateTo3(object):
def _model_probe_to_path(self, info: ModelProbeInfo)->Path: def _model_probe_to_path(self, info: ModelProbeInfo)->Path:
return Path(self.dest_models, info.base_type.value, info.model_type.value) return Path(self.dest_models, info.base_type.value, info.model_type.value)
def _migrate_pretrained(self, model_class, repo_id: str, dest: Path, **kwargs): def _migrate_pretrained(self, model_class, repo_id: str, dest: Path, force:bool=False, **kwargs):
if dest.exists(): if dest.exists() and not force:
logger.info(f'Skipping existing {dest}') logger.info(f'Skipping existing {dest}')
return return
model = model_class.from_pretrained(repo_id, **kwargs) model = model_class.from_pretrained(repo_id, **kwargs)
self._save_pretrained(model, dest) self._save_pretrained(model, dest, overwrite=force)
def _save_pretrained(self, model, dest: Path): def _save_pretrained(self, model, dest: Path, overwrite: bool=False):
if dest.exists():
logger.info(f'Skipping existing {dest}')
return
model_name = dest.name model_name = dest.name
download_path = dest.with_name(f'{model_name}.downloading') if overwrite:
model.save_pretrained(download_path, safe_serialization=True) model.save_pretrained(dest, safe_serialization=True)
download_path.replace(dest) else:
download_path = dest.with_name(f'{model_name}.downloading')
model.save_pretrained(download_path, safe_serialization=True)
download_path.replace(dest)
def _download_vae(self, repo_id: str, subfolder:str=None)->Path: def _download_vae(self, repo_id: str, subfolder:str=None)->Path:
vae = AutoencoderKL.from_pretrained(repo_id, cache_dir=self.root_directory / 'models/hub', subfolder=subfolder) vae = AutoencoderKL.from_pretrained(repo_id, cache_dir=self.root_directory / 'models/hub', subfolder=subfolder)
@ -573,8 +574,10 @@ script, which will perform a full upgrade in place."""
dest_directory = args.dest_directory dest_directory = args.dest_directory
assert dest_directory.is_dir(), f"{dest_directory} is not a valid directory" assert dest_directory.is_dir(), f"{dest_directory} is not a valid directory"
assert (dest_directory / 'models').is_dir(), f"{dest_directory} does not contain a 'models' subdirectory"
assert (dest_directory / 'invokeai.yaml').exists(), f"{dest_directory} does not contain an InvokeAI init file." # TODO: revisit
# assert (dest_directory / 'models').is_dir(), f"{dest_directory} does not contain a 'models' subdirectory"
# assert (dest_directory / 'invokeai.yaml').exists(), f"{dest_directory} does not contain an InvokeAI init file."
do_migrate(root_directory,dest_directory) do_migrate(root_directory,dest_directory)