mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
prevent "!switch state gets confused if model switching fails"
- If !switch were to fail on a particular model, then generate got confused and wouldn't try again until you switch to a different working model and back again. - This commit fixes and closes #1547
This commit is contained in:
parent
a341297b0c
commit
9adaf8f8ad
@ -832,6 +832,7 @@ class Generate:
|
||||
model_data = cache.get_model(model_name)
|
||||
if model_data is None: # restore previous
|
||||
model_data = cache.get_model(self.model_name)
|
||||
model_name = self.model_name # addresses Issue #1547
|
||||
|
||||
self.model = model_data['model']
|
||||
self.width = model_data['width']
|
||||
|
@ -78,11 +78,12 @@ class ModelCache(object):
|
||||
else: # we're about to load a new model, so potentially offload the least recently used one
|
||||
try:
|
||||
requested_model, width, height, hash = self._load_model(model_name)
|
||||
self.models[model_name] = {}
|
||||
self.models[model_name]['model'] = requested_model
|
||||
self.models[model_name]['width'] = width
|
||||
self.models[model_name]['height'] = height
|
||||
self.models[model_name]['hash'] = hash
|
||||
self.models[model_name] = {
|
||||
'model': requested_model,
|
||||
'width': width,
|
||||
'height': height,
|
||||
'hash': hash,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
print(f'** model {model_name} could not be loaded: {str(e)}')
|
||||
|
Loading…
Reference in New Issue
Block a user