mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
feat(nodes): skip on duplicate loras instead of erroring
The `LoRA` and `SDXL LoRA` nodes would error if it duplicated LoRAs. To make the nodes more resilient, they now skip dupes and log a warning to the console instead. Also added a warning for the LoRA Collection Loader nodes. These already skipped but didn't log a warning.
This commit is contained in:
parent
edcaf8287d
commit
c4de9ef910
@ -188,31 +188,33 @@ class LoRALoaderInvocation(BaseInvocation):
|
|||||||
if not context.models.exists(lora_key):
|
if not context.models.exists(lora_key):
|
||||||
raise Exception(f"Unkown lora: {lora_key}!")
|
raise Exception(f"Unkown lora: {lora_key}!")
|
||||||
|
|
||||||
if self.unet is not None and any(lora.lora.key == lora_key for lora in self.unet.loras):
|
|
||||||
raise Exception(f'LoRA "{lora_key}" already applied to unet')
|
|
||||||
|
|
||||||
if self.clip is not None and any(lora.lora.key == lora_key for lora in self.clip.loras):
|
|
||||||
raise Exception(f'LoRA "{lora_key}" already applied to clip')
|
|
||||||
|
|
||||||
output = LoRALoaderOutput()
|
output = LoRALoaderOutput()
|
||||||
|
|
||||||
if self.unet is not None:
|
if self.unet is not None:
|
||||||
output.unet = self.unet.model_copy(deep=True)
|
output.unet = self.unet.model_copy(deep=True)
|
||||||
output.unet.loras.append(
|
|
||||||
LoRAField(
|
if any(lora.lora.key == lora_key for lora in self.unet.loras):
|
||||||
lora=self.lora,
|
context.logger.warning(f'LoRA "{lora_key}" already applied to UNet, skipping')
|
||||||
weight=self.weight,
|
else:
|
||||||
|
output.unet.loras.append(
|
||||||
|
LoRAField(
|
||||||
|
lora=self.lora,
|
||||||
|
weight=self.weight,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
if self.clip is not None:
|
if self.clip is not None:
|
||||||
output.clip = self.clip.model_copy(deep=True)
|
output.clip = self.clip.model_copy(deep=True)
|
||||||
output.clip.loras.append(
|
|
||||||
LoRAField(
|
if any(lora.lora.key == lora_key for lora in self.clip.loras):
|
||||||
lora=self.lora,
|
context.logger.warning(f'LoRA "{lora_key}" already applied to CLIP, skipping')
|
||||||
weight=self.weight,
|
else:
|
||||||
|
output.clip.loras.append(
|
||||||
|
LoRAField(
|
||||||
|
lora=self.lora,
|
||||||
|
weight=self.weight,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@ -264,6 +266,7 @@ class LoRACollectionLoader(BaseInvocation):
|
|||||||
|
|
||||||
for lora in loras:
|
for lora in loras:
|
||||||
if lora.lora.key in added_loras:
|
if lora.lora.key in added_loras:
|
||||||
|
context.logger.warning(f'LoRA "{lora.lora.key}" already applied, skipping')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not context.models.exists(lora.lora.key):
|
if not context.models.exists(lora.lora.key):
|
||||||
@ -334,43 +337,46 @@ class SDXLLoRALoaderInvocation(BaseInvocation):
|
|||||||
if not context.models.exists(lora_key):
|
if not context.models.exists(lora_key):
|
||||||
raise Exception(f"Unknown lora: {lora_key}!")
|
raise Exception(f"Unknown lora: {lora_key}!")
|
||||||
|
|
||||||
if self.unet is not None and any(lora.lora.key == lora_key for lora in self.unet.loras):
|
|
||||||
raise Exception(f'LoRA "{lora_key}" already applied to unet')
|
|
||||||
|
|
||||||
if self.clip is not None and any(lora.lora.key == lora_key for lora in self.clip.loras):
|
|
||||||
raise Exception(f'LoRA "{lora_key}" already applied to clip')
|
|
||||||
|
|
||||||
if self.clip2 is not None and any(lora.lora.key == lora_key for lora in self.clip2.loras):
|
|
||||||
raise Exception(f'LoRA "{lora_key}" already applied to clip2')
|
|
||||||
|
|
||||||
output = SDXLLoRALoaderOutput()
|
output = SDXLLoRALoaderOutput()
|
||||||
|
|
||||||
if self.unet is not None:
|
if self.unet is not None:
|
||||||
output.unet = self.unet.model_copy(deep=True)
|
output.unet = self.unet.model_copy(deep=True)
|
||||||
output.unet.loras.append(
|
|
||||||
LoRAField(
|
if any(lora.lora.key == lora_key for lora in self.unet.loras):
|
||||||
lora=self.lora,
|
context.logger.warning(f'LoRA "{lora_key}" already applied to UNet, skipping')
|
||||||
weight=self.weight,
|
else:
|
||||||
|
output.unet.loras.append(
|
||||||
|
LoRAField(
|
||||||
|
lora=self.lora,
|
||||||
|
weight=self.weight,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
if self.clip is not None:
|
if self.clip is not None:
|
||||||
output.clip = self.clip.model_copy(deep=True)
|
output.clip = self.clip.model_copy(deep=True)
|
||||||
output.clip.loras.append(
|
|
||||||
LoRAField(
|
if any(lora.lora.key == lora_key for lora in self.clip.loras):
|
||||||
lora=self.lora,
|
context.logger.warning(f'LoRA "{lora_key}" already applied to CLIP, skipping')
|
||||||
weight=self.weight,
|
else:
|
||||||
|
output.clip.loras.append(
|
||||||
|
LoRAField(
|
||||||
|
lora=self.lora,
|
||||||
|
weight=self.weight,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
if self.clip2 is not None:
|
if self.clip2 is not None:
|
||||||
output.clip2 = self.clip2.model_copy(deep=True)
|
output.clip2 = self.clip2.model_copy(deep=True)
|
||||||
output.clip2.loras.append(
|
|
||||||
LoRAField(
|
if any(lora.lora.key == lora_key for lora in self.clip2.loras):
|
||||||
lora=self.lora,
|
context.logger.warning(f'LoRA "{lora_key}" already applied to CLIP2, skipping')
|
||||||
weight=self.weight,
|
else:
|
||||||
|
output.clip2.loras.append(
|
||||||
|
LoRAField(
|
||||||
|
lora=self.lora,
|
||||||
|
weight=self.weight,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
|
||||||
return output
|
return output
|
||||||
|
|
||||||
@ -414,6 +420,7 @@ class SDXLLoRACollectionLoader(BaseInvocation):
|
|||||||
|
|
||||||
for lora in loras:
|
for lora in loras:
|
||||||
if lora.lora.key in added_loras:
|
if lora.lora.key in added_loras:
|
||||||
|
context.logger.warning(f'LoRA "{lora.lora.key}" already applied, skipping')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not context.models.exists(lora.lora.key):
|
if not context.models.exists(lora.lora.key):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user