From cad9a41433646244cb4ec400831a07ebe7459d8e Mon Sep 17 00:00:00 2001 From: Ryan Dick Date: Tue, 27 Aug 2024 14:43:41 +0000 Subject: [PATCH] Remove unused MOdelCache.exists(...) function. --- .../model_manager/load/model_cache/model_cache_base.py | 9 --------- .../load/model_cache/model_cache_default.py | 9 --------- 2 files changed, 18 deletions(-) diff --git a/invokeai/backend/model_manager/load/model_cache/model_cache_base.py b/invokeai/backend/model_manager/load/model_cache/model_cache_base.py index 012fd42d55..97fd401da0 100644 --- a/invokeai/backend/model_manager/load/model_cache/model_cache_base.py +++ b/invokeai/backend/model_manager/load/model_cache/model_cache_base.py @@ -193,15 +193,6 @@ class ModelCacheBase(ABC, Generic[T]): """ pass - @abstractmethod - def exists( - self, - key: str, - submodel_type: Optional[SubModelType] = None, - ) -> bool: - """Return true if the model identified by key and submodel_type is in the cache.""" - pass - @abstractmethod def cache_size(self) -> int: """Get the total size of the models currently cached.""" diff --git a/invokeai/backend/model_manager/load/model_cache/model_cache_default.py b/invokeai/backend/model_manager/load/model_cache/model_cache_default.py index acc1e27349..5063f27184 100644 --- a/invokeai/backend/model_manager/load/model_cache/model_cache_default.py +++ b/invokeai/backend/model_manager/load/model_cache/model_cache_default.py @@ -130,15 +130,6 @@ class ModelCache(ModelCacheBase[AnyModel]): total += cache_record.size return total - def exists( - self, - key: str, - submodel_type: Optional[SubModelType] = None, - ) -> bool: - """Return true if the model identified by key and submodel_type is in the cache.""" - key = self._make_cache_key(key, submodel_type) - return key in self._cached_models - def put( self, key: str,