From d8b1f29066d1b93c0aaca093a67ac9bbec0ccf05 Mon Sep 17 00:00:00 2001 From: Lincoln Stein Date: Sat, 13 May 2023 16:29:18 -0400 Subject: [PATCH] proxy SDModelInfo so that it can be used directly as context --- invokeai/backend/model_management/model_cache.py | 2 +- invokeai/backend/model_management/model_manager.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/invokeai/backend/model_management/model_cache.py b/invokeai/backend/model_management/model_cache.py index 08aa2f3044..558de0f28c 100644 --- a/invokeai/backend/model_management/model_cache.py +++ b/invokeai/backend/model_management/model_cache.py @@ -68,7 +68,7 @@ class SDModelType(Enum): # distinguish them by class lora=LoraType textual_inversion=TIType - + class ModelStatus(Enum): unknown='unknown' not_loaded='not loaded' diff --git a/invokeai/backend/model_management/model_manager.py b/invokeai/backend/model_management/model_manager.py index f1a6dc18dc..4fbc80703a 100644 --- a/invokeai/backend/model_management/model_manager.py +++ b/invokeai/backend/model_management/model_manager.py @@ -176,6 +176,12 @@ class SDModelInfo(): revision: str = None _cache: ModelCache = None + def __enter__(self): + return self.context.__enter__() + + def __exit__(self,*args, **kwargs): + self.context.__exit__(*args, **kwargs) + @property def status(self)->ModelStatus: '''Return load status of this model as a model_cache.ModelStatus enum'''