Store & load 🤗 models at XDG_CACHE_HOME if HF_HOME is not set (#2359)

This commit allows InvokeAI to store & load 🤗 models at a location set
by `XDG_CACHE_HOME` environment variable if `HF_HOME` is not set.

By integrating this commit, a user who either use `HF_HOME` or
`XDG_CACHE_HOME` environment variables in their environment can let
InvokeAI to reuse the existing cache directory used by 🤗 library by
default. I happened to benefit from this commit because I have a Jupyter
Notebook that uses 🤗 diffusers model stored at `XDG_CACHE_HOME`
directory.

Reference:
https://huggingface.co/docs/huggingface_hub/main/en/package_reference/environment_variables#xdgcachehome
This commit is contained in:
Lincoln Stein 2023-01-18 17:05:06 -05:00 committed by GitHub
commit ce17051b28
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 38 additions and 16 deletions

View File

@ -66,7 +66,17 @@ def global_cache_dir(subdir:Union[str,Path]='')->Path:
global_cache_dir('diffusers')
global_cache_dir('transformers')
'''
if (home := os.environ.get('HF_HOME')):
home: str = os.getenv('HF_HOME')
if home is None:
home = os.getenv('XDG_CACHE_HOME')
if home is not None:
# Set `home` to $XDG_CACHE_HOME/huggingface, which is the default location mentioned in HuggingFace Hub Client Library.
# See: https://huggingface.co/docs/huggingface_hub/main/en/package_reference/environment_variables#xdgcachehome
home += os.sep + 'huggingface'
if home is not None:
return Path(home,subdir)
else:
return Path(Globals.root,'models',subdir)

View File

@ -758,7 +758,11 @@ class ModelManager(object):
from shutil import move, rmtree
# transformer files get moved into the hub directory
hub = models_dir / 'hub'
if cls._is_huggingface_hub_directory_present():
hub = global_cache_dir('hub')
else:
hub = models_dir / 'hub'
os.makedirs(hub, exist_ok=True)
for model in legacy_locations:
source = models_dir / model
@ -771,7 +775,11 @@ class ModelManager(object):
move(source, dest)
# anything else gets moved into the diffusers directory
diffusers = models_dir / 'diffusers'
if cls._is_huggingface_hub_directory_present():
diffusers = global_cache_dir('diffusers')
else:
diffusers = models_dir / 'diffusers'
os.makedirs(diffusers, exist_ok=True)
for root, dirs, _ in os.walk(models_dir, topdown=False):
for dir in dirs:
@ -962,3 +970,7 @@ class ModelManager(object):
print(f'** Could not load VAE {name_or_path}: {str(deferred_error)}')
return vae
@staticmethod
def _is_huggingface_hub_directory_present() -> bool:
return os.getenv('HF_HOME') is not None or os.getenv('XDG_CACHE_HOME') is not None