From aa02ebf8f5c2f8a90737c5a9607fccb5bca00437 Mon Sep 17 00:00:00 2001 From: Ryan Dick Date: Sat, 4 Nov 2023 08:52:10 -0400 Subject: [PATCH] Fix model cache gc.collect() condition. --- invokeai/backend/model_management/model_cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/invokeai/backend/model_management/model_cache.py b/invokeai/backend/model_management/model_cache.py index 0b2a8b8df7..83af789219 100644 --- a/invokeai/backend/model_management/model_cache.py +++ b/invokeai/backend/model_management/model_cache.py @@ -493,7 +493,7 @@ class ModelCache(object): else: pos += 1 - if models_cleared < 0: + if models_cleared > 0: # There would likely be some 'garbage' to be collected regardless of whether a model was cleared or not, but # there is a significant time cost to calling `gc.collect()`, so we want to use it sparingly. (The time cost # is high even if no garbage gets collected.)