mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
proxy SDModelInfo so that it can be used directly as context
This commit is contained in:
parent
b23c9f1da5
commit
d8b1f29066
@ -68,7 +68,7 @@ class SDModelType(Enum):
|
|||||||
# distinguish them by class
|
# distinguish them by class
|
||||||
lora=LoraType
|
lora=LoraType
|
||||||
textual_inversion=TIType
|
textual_inversion=TIType
|
||||||
|
|
||||||
class ModelStatus(Enum):
|
class ModelStatus(Enum):
|
||||||
unknown='unknown'
|
unknown='unknown'
|
||||||
not_loaded='not loaded'
|
not_loaded='not loaded'
|
||||||
|
@ -176,6 +176,12 @@ class SDModelInfo():
|
|||||||
revision: str = None
|
revision: str = None
|
||||||
_cache: ModelCache = None
|
_cache: ModelCache = None
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self.context.__enter__()
|
||||||
|
|
||||||
|
def __exit__(self,*args, **kwargs):
|
||||||
|
self.context.__exit__(*args, **kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def status(self)->ModelStatus:
|
def status(self)->ModelStatus:
|
||||||
'''Return load status of this model as a model_cache.ModelStatus enum'''
|
'''Return load status of this model as a model_cache.ModelStatus enum'''
|
||||||
|
Loading…
Reference in New Issue
Block a user