Update invokeai/backend/model_management/model_cache.py

Co-authored-by: StAlKeR7779 <stalkek7779@yandex.ru>
This commit is contained in:
Lincoln Stein 2023-08-16 08:48:44 -04:00 committed by GitHub
parent be8edaf3fd
commit bb1b8ceaa8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -226,7 +226,7 @@ class ModelCache(object):
self.stats.high_watermark = max(self.stats.high_watermark, self._cache_size())
self.stats.in_cache = len(self._cached_models)
self.stats.loaded_model_sizes[key] = max(
self.stats.loaded_model_sizes.get("key", 0), model_info.get_size(submodel)
self.stats.loaded_model_sizes.get(key, 0), model_info.get_size(submodel)
)
with suppress(Exception):