Merge branch 'refactor/nodes-on-generator' of github.com:invoke-ai/InvokeAI into refactor/nodes-on-generator

This commit is contained in:
Lincoln Stein
2023-03-10 19:36:40 -05:00
9 changed files with 30 additions and 136 deletions

View File

@ -1274,7 +1274,7 @@ def load_pipeline_from_original_stable_diffusion_ckpt(
tokenizer=tokenizer,
unet=unet.to(precision),
scheduler=scheduler,
safety_checker=safety_checker.to(precision),
safety_checker=None if return_generator_pipeline else safety_checker.to(precision),
feature_extractor=feature_extractor,
)
else:

View File

@ -108,7 +108,7 @@ class ModelManager(object):
if model_name in self.models:
requested_model = self.models[model_name]["model"]
print(f">> Retrieving model {model_name} from system RAM cache")
self.models[model_name]["model"] = self._model_from_cpu(requested_model)
requested_model.ready()
width = self.models[model_name]["width"]
height = self.models[model_name]["height"]
hash = self.models[model_name]["hash"]
@ -503,7 +503,7 @@ class ModelManager(object):
print(f">> Offloading {model_name} to CPU")
model = self.models[model_name]["model"]
self.models[model_name]["model"] = self._model_to_cpu(model)
model.offload_all()
gc.collect()
if self._has_cuda():
@ -1048,43 +1048,6 @@ class ModelManager(object):
self.stack.remove(model_name)
self.models.pop(model_name, None)
def _model_to_cpu(self, model):
if self.device == CPU_DEVICE:
return model
if isinstance(model, StableDiffusionGeneratorPipeline):
model.offload_all()
return model
model.cond_stage_model.device = CPU_DEVICE
model.to(CPU_DEVICE)
for submodel in ("first_stage_model", "cond_stage_model", "model"):
try:
getattr(model, submodel).to(CPU_DEVICE)
except AttributeError:
pass
return model
def _model_from_cpu(self, model):
if self.device == CPU_DEVICE:
return model
if isinstance(model, StableDiffusionGeneratorPipeline):
model.ready()
return model
model.to(self.device)
model.cond_stage_model.device = self.device
for submodel in ("first_stage_model", "cond_stage_model", "model"):
try:
getattr(model, submodel).to(self.device)
except AttributeError:
pass
return model
def _pop_oldest_model(self):
"""
Remove the first element of the FIFO, which ought