mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Merge branch 'main' into maryhipp/optional-version
This commit is contained in:
commit
71a07ee5a7
@ -328,6 +328,25 @@ class ModelCache(object):
|
|||||||
|
|
||||||
refs = sys.getrefcount(cache_entry.model)
|
refs = sys.getrefcount(cache_entry.model)
|
||||||
|
|
||||||
|
# manualy clear local variable references of just finished function calls
|
||||||
|
# for some reason python don't want to collect it even by gc.collect() immidiately
|
||||||
|
if refs > 2:
|
||||||
|
while True:
|
||||||
|
cleared = False
|
||||||
|
for referrer in gc.get_referrers(cache_entry.model):
|
||||||
|
if type(referrer).__name__ == "frame":
|
||||||
|
# RuntimeError: cannot clear an executing frame
|
||||||
|
with suppress(RuntimeError):
|
||||||
|
referrer.clear()
|
||||||
|
cleared = True
|
||||||
|
#break
|
||||||
|
|
||||||
|
# repeat if referrers changes(due to frame clear), else exit loop
|
||||||
|
if cleared:
|
||||||
|
gc.collect()
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
device = cache_entry.model.device if hasattr(cache_entry.model, "device") else None
|
device = cache_entry.model.device if hasattr(cache_entry.model, "device") else None
|
||||||
self.logger.debug(f"Model: {model_key}, locks: {cache_entry._locks}, device: {device}, loaded: {cache_entry.loaded}, refs: {refs}")
|
self.logger.debug(f"Model: {model_key}, locks: {cache_entry._locks}, device: {device}, loaded: {cache_entry.loaded}, refs: {refs}")
|
||||||
|
|
||||||
@ -364,6 +383,9 @@ class ModelCache(object):
|
|||||||
vram_in_use += mem.vram_used # note vram_used is negative
|
vram_in_use += mem.vram_used # note vram_used is negative
|
||||||
self.logger.debug(f'{(vram_in_use/GIG):.2f}GB VRAM used for models; max allowed={(reserved/GIG):.2f}GB')
|
self.logger.debug(f'{(vram_in_use/GIG):.2f}GB VRAM used for models; max allowed={(reserved/GIG):.2f}GB')
|
||||||
|
|
||||||
|
gc.collect()
|
||||||
|
torch.cuda.empty_cache()
|
||||||
|
|
||||||
def _local_model_hash(self, model_path: Union[str, Path]) -> str:
|
def _local_model_hash(self, model_path: Union[str, Path]) -> str:
|
||||||
sha = hashlib.sha256()
|
sha = hashlib.sha256()
|
||||||
path = Path(model_path)
|
path = Path(model_path)
|
||||||
|
@ -140,14 +140,14 @@ Layout of the `models` directory:
|
|||||||
|
|
||||||
models
|
models
|
||||||
├── sd-1
|
├── sd-1
|
||||||
│ ├── controlnet
|
│ ├── controlnet
|
||||||
│ ├── lora
|
│ ├── lora
|
||||||
│ ├── main
|
│ ├── main
|
||||||
│ └── embedding
|
│ └── embedding
|
||||||
├── sd-2
|
├── sd-2
|
||||||
│ ├── controlnet
|
│ ├── controlnet
|
||||||
│ ├── lora
|
│ ├── lora
|
||||||
│ ├── main
|
│ ├── main
|
||||||
│ └── embedding
|
│ └── embedding
|
||||||
└── core
|
└── core
|
||||||
├── face_reconstruction
|
├── face_reconstruction
|
||||||
@ -824,10 +824,14 @@ class ModelManager(object):
|
|||||||
assert config_file_path is not None,'no config file path to write to'
|
assert config_file_path is not None,'no config file path to write to'
|
||||||
config_file_path = self.app_config.root_path / config_file_path
|
config_file_path = self.app_config.root_path / config_file_path
|
||||||
tmpfile = os.path.join(os.path.dirname(config_file_path), "new_config.tmp")
|
tmpfile = os.path.join(os.path.dirname(config_file_path), "new_config.tmp")
|
||||||
with open(tmpfile, "w", encoding="utf-8") as outfile:
|
try:
|
||||||
outfile.write(self.preamble())
|
with open(tmpfile, "w", encoding="utf-8") as outfile:
|
||||||
outfile.write(yaml_str)
|
outfile.write(self.preamble())
|
||||||
os.replace(tmpfile, config_file_path)
|
outfile.write(yaml_str)
|
||||||
|
os.replace(tmpfile, config_file_path)
|
||||||
|
except OSError as err:
|
||||||
|
self.logger.warning(f"Could not modify the config file at {config_file_path}")
|
||||||
|
self.logger.warning(err)
|
||||||
|
|
||||||
def preamble(self) -> str:
|
def preamble(self) -> str:
|
||||||
"""
|
"""
|
||||||
@ -986,4 +990,3 @@ class ModelManager(object):
|
|||||||
successfully_installed.update(installed)
|
successfully_installed.update(installed)
|
||||||
self.commit()
|
self.commit()
|
||||||
return successfully_installed
|
return successfully_installed
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user