From 241313c4a651dac5662fff01ba65e2473ca631cb Mon Sep 17 00:00:00 2001
From: gogurtenjoyer <36354352+gogurtenjoyer@users.noreply.github.com>
Date: Thu, 12 Jan 2023 14:09:35 -0800
Subject: [PATCH 1/4] Update automated install doc - link to MS C libs

Updated the link for the MS Visual C libraries - I'm not sure if MS changed the location of the files but this new one leads right to the file downloads.
---
 docs/installation/010_INSTALL_AUTOMATED.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/installation/010_INSTALL_AUTOMATED.md b/docs/installation/010_INSTALL_AUTOMATED.md
index 0bafc5c861..8007c59b6d 100644
--- a/docs/installation/010_INSTALL_AUTOMATED.md
+++ b/docs/installation/010_INSTALL_AUTOMATED.md
@@ -52,7 +52,7 @@ version of InvokeAI with the option to upgrade to experimental versions later.
         find python, then open the Python installer again and choose
         "Modify" existing installation.
 
-        - Installation requires an up to date version of the Microsoft Visual C libraries. Please install the 2015-2022 libraries available here: https://learn.microsoft.com/en-us/cpp/windows/deploying-native-desktop-applications-visual-cpp?view=msvc-170
+        - Installation requires an up to date version of the Microsoft Visual C libraries. Please install the 2015-2022 libraries available here: https://learn.microsoft.com/en-US/cpp/windows/latest-supported-vc-redist?view=msvc-170
 
     === "Mac users"
 

From 2282e681f79a4c3550de04dda93e67c23ed1613a Mon Sep 17 00:00:00 2001
From: Daya Adianto <addianto@users.noreply.github.com>
Date: Wed, 18 Jan 2023 19:02:16 +0700
Subject: [PATCH 2/4] =?UTF-8?q?Store=20&=20load=20=F0=9F=A4=97=20models=20?=
 =?UTF-8?q?at=20XDG=5FCACHE=5FHOME=20if=20HF=5FHOME=20is=20not=20set?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

This commit allows InvokeAI to store & load 🤗 models at a location
set by `XDG_CACHE_HOME` environment variable if `HF_HOME` is not set.

Reference: https://huggingface.co/docs/huggingface_hub/main/en/package_reference/environment_variables#xdgcachehome
---
 ldm/invoke/globals.py | 14 ++++++++++++--
 1 file changed, 12 insertions(+), 2 deletions(-)

diff --git a/ldm/invoke/globals.py b/ldm/invoke/globals.py
index 897bf5e204..137171aa33 100644
--- a/ldm/invoke/globals.py
+++ b/ldm/invoke/globals.py
@@ -62,11 +62,21 @@ def global_cache_dir(subdir:Union[str,Path]='')->Path:
     '''
     Returns Path to the model cache directory. If a subdirectory
     is provided, it will be appended to the end of the path, allowing
-    for huggingface-style conventions: 
+    for huggingface-style conventions:
          global_cache_dir('diffusers')
          global_cache_dir('transformers')
     '''
-    if (home := os.environ.get('HF_HOME')):
+    home: str = os.getenv('HF_HOME')
+
+    if home is None:
+        home = os.getenv('XDG_CACHE_HOME')
+
+        if home is not None:
+            # Set `home` to $XDG_CACHE_HOME/huggingface, which is the default location mentioned in HuggingFace Hub Client Library.
+            # See: https://huggingface.co/docs/huggingface_hub/main/en/package_reference/environment_variables#xdgcachehome
+            home += os.sep + 'huggingface'
+
+    if home is not None:
         return Path(home,subdir)
     else:
         return Path(Globals.root,'models',subdir)

From aa4e8d8cf3e5930380442311203e96995e7f8471 Mon Sep 17 00:00:00 2001
From: Daya Adianto <addianto@users.noreply.github.com>
Date: Wed, 18 Jan 2023 21:02:31 +0700
Subject: [PATCH 3/4] =?UTF-8?q?Migrate=20legacy=20models=20(pre-2.3.0)=20t?=
 =?UTF-8?q?o=20=F0=9F=A4=97=20cache=20directory=20if=20exists?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 ldm/invoke/model_manager.py | 40 ++++++++++++++++++++++++-------------
 1 file changed, 26 insertions(+), 14 deletions(-)

diff --git a/ldm/invoke/model_manager.py b/ldm/invoke/model_manager.py
index 6dce95b415..399d9bc934 100644
--- a/ldm/invoke/model_manager.py
+++ b/ldm/invoke/model_manager.py
@@ -166,7 +166,7 @@ class ModelManager(object):
             # don't include VAEs in listing (legacy style)
             if 'config' in stanza and '/VAE/' in stanza['config']:
                 continue
-            
+
             models[name] = dict()
             format = stanza.get('format','ckpt') # Determine Format
 
@@ -183,7 +183,7 @@ class ModelManager(object):
                 format = format,
                 status = status,
             )
-            
+
             # Checkpoint Config Parse
             if format == 'ckpt':
                 models[name].update(
@@ -193,7 +193,7 @@ class ModelManager(object):
                     width = str(stanza.get('width', 512)),
                     height = str(stanza.get('height', 512)),
                 )
-                
+
             # Diffusers Config Parse
             if (vae := stanza.get('vae',None)):
                 if isinstance(vae,DictConfig):
@@ -202,14 +202,14 @@ class ModelManager(object):
                         path = str(vae.get('path',None)),
                         subfolder = str(vae.get('subfolder',None))
                     )
-                    
+
             if format == 'diffusers':
                 models[name].update(
                     vae = vae,
                     repo_id = str(stanza.get('repo_id', None)),
                     path = str(stanza.get('path',None)),
                 )
-        
+
         return models
 
     def print_models(self) -> None:
@@ -257,7 +257,7 @@ class ModelManager(object):
         assert (clobber or model_name not in omega), f'attempt to overwrite existing model definition "{model_name}"'
 
         omega[model_name] = model_attributes
-        
+
         if 'weights' in omega[model_name]:
             omega[model_name]['weights'].replace('\\','/')
 
@@ -554,12 +554,12 @@ class ModelManager(object):
         '''
         Attempts to install the indicated ckpt file and returns True if successful.
 
-        "weights" can be either a path-like object corresponding to a local .ckpt file 
+        "weights" can be either a path-like object corresponding to a local .ckpt file
         or a http/https URL pointing to a remote model.
 
         "config" is the model config file to use with this ckpt file. It defaults to
         v1-inference.yaml. If a URL is provided, the config will be downloaded.
-        
+
         You can optionally provide a model name and/or description. If not provided,
         then these will be derived from the weight file name. If you provide a commit_to_conf
         path to the configuration file, then the new entry will be committed to the
@@ -572,7 +572,7 @@ class ModelManager(object):
             return False
         if config_path is None or not config_path.exists():
             return False
-            
+
         model_name = model_name or Path(weights).stem
         model_description = model_description or f'imported stable diffusion weights file {model_name}'
         new_config = dict(
@@ -587,7 +587,7 @@ class ModelManager(object):
         if commit_to_conf:
             self.commit(commit_to_conf)
         return True
-                       
+
     def autoconvert_weights(
             self,
             conf_path:Path,
@@ -660,7 +660,7 @@ class ModelManager(object):
         except Exception as e:
             print(f'** Conversion failed: {str(e)}')
             traceback.print_exc()
-            
+
         print('done.')
         return new_config
 
@@ -756,9 +756,13 @@ class ModelManager(object):
         print('** Legacy version <= 2.2.5 model directory layout detected. Reorganizing.')
         print('** This is a quick one-time operation.')
         from shutil import move, rmtree
-        
+
         # transformer files get moved into the hub directory
-        hub = models_dir / 'hub'
+        if cls._is_huggingface_hub_directory_present():
+            hub = global_cache_dir() / 'hub'
+        else:
+            hub = models_dir / 'hub'
+
         os.makedirs(hub, exist_ok=True)
         for model in legacy_locations:
             source = models_dir / model
@@ -771,7 +775,11 @@ class ModelManager(object):
                     move(source, dest)
 
         # anything else gets moved into the diffusers directory
-        diffusers = models_dir / 'diffusers'
+        if cls._is_huggingface_hub_directory_present():
+            diffusers = global_cache_dir() / 'diffusers'
+        else:
+            diffusers = models_dir / 'diffusers'
+
         os.makedirs(diffusers, exist_ok=True)
         for root, dirs, _ in os.walk(models_dir, topdown=False):
             for dir in dirs:
@@ -962,3 +970,7 @@ class ModelManager(object):
             print(f'** Could not load VAE {name_or_path}: {str(deferred_error)}')
 
         return vae
+
+    @staticmethod
+    def _is_huggingface_hub_directory_present() -> bool:
+        return os.getenv('HF_HOME') is not None or os.getenv('XDG_CACHE_HOME') is not None

From f3e952ecf0dcf26d7b2401b8fb20882952f33d8d Mon Sep 17 00:00:00 2001
From: Daya Adianto <addianto@users.noreply.github.com>
Date: Wed, 18 Jan 2023 21:06:01 +0700
Subject: [PATCH 4/4] Use global_cache_dir calls properly

---
 ldm/invoke/model_manager.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/ldm/invoke/model_manager.py b/ldm/invoke/model_manager.py
index 399d9bc934..0193baefba 100644
--- a/ldm/invoke/model_manager.py
+++ b/ldm/invoke/model_manager.py
@@ -759,7 +759,7 @@ class ModelManager(object):
 
         # transformer files get moved into the hub directory
         if cls._is_huggingface_hub_directory_present():
-            hub = global_cache_dir() / 'hub'
+            hub = global_cache_dir('hub')
         else:
             hub = models_dir / 'hub'
 
@@ -776,7 +776,7 @@ class ModelManager(object):
 
         # anything else gets moved into the diffusers directory
         if cls._is_huggingface_hub_directory_present():
-            diffusers = global_cache_dir() / 'diffusers'
+            diffusers = global_cache_dir('diffusers')
         else:
             diffusers = models_dir / 'diffusers'