feat(nodes): skip on duplicate loras instead of erroring

The `LoRA` and `SDXL LoRA` nodes would error if it duplicated LoRAs. To make the nodes more resilient, they now skip dupes and log a warning to the console instead.

Also added a warning for the LoRA Collection Loader nodes. These already skipped but didn't log a warning.
This commit is contained in:
psychedelicious 2024-07-26 14:10:38 +10:00
parent edcaf8287d
commit c4de9ef910

View File

@ -188,31 +188,33 @@ class LoRALoaderInvocation(BaseInvocation):
if not context.models.exists(lora_key):
raise Exception(f"Unkown lora: {lora_key}!")
if self.unet is not None and any(lora.lora.key == lora_key for lora in self.unet.loras):
raise Exception(f'LoRA "{lora_key}" already applied to unet')
if self.clip is not None and any(lora.lora.key == lora_key for lora in self.clip.loras):
raise Exception(f'LoRA "{lora_key}" already applied to clip')
output = LoRALoaderOutput()
if self.unet is not None:
output.unet = self.unet.model_copy(deep=True)
output.unet.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
if any(lora.lora.key == lora_key for lora in self.unet.loras):
context.logger.warning(f'LoRA "{lora_key}" already applied to UNet, skipping')
else:
output.unet.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
)
)
)
if self.clip is not None:
output.clip = self.clip.model_copy(deep=True)
output.clip.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
if any(lora.lora.key == lora_key for lora in self.clip.loras):
context.logger.warning(f'LoRA "{lora_key}" already applied to CLIP, skipping')
else:
output.clip.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
)
)
)
return output
@ -264,6 +266,7 @@ class LoRACollectionLoader(BaseInvocation):
for lora in loras:
if lora.lora.key in added_loras:
context.logger.warning(f'LoRA "{lora.lora.key}" already applied, skipping')
continue
if not context.models.exists(lora.lora.key):
@ -334,43 +337,46 @@ class SDXLLoRALoaderInvocation(BaseInvocation):
if not context.models.exists(lora_key):
raise Exception(f"Unknown lora: {lora_key}!")
if self.unet is not None and any(lora.lora.key == lora_key for lora in self.unet.loras):
raise Exception(f'LoRA "{lora_key}" already applied to unet')
if self.clip is not None and any(lora.lora.key == lora_key for lora in self.clip.loras):
raise Exception(f'LoRA "{lora_key}" already applied to clip')
if self.clip2 is not None and any(lora.lora.key == lora_key for lora in self.clip2.loras):
raise Exception(f'LoRA "{lora_key}" already applied to clip2')
output = SDXLLoRALoaderOutput()
if self.unet is not None:
output.unet = self.unet.model_copy(deep=True)
output.unet.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
if any(lora.lora.key == lora_key for lora in self.unet.loras):
context.logger.warning(f'LoRA "{lora_key}" already applied to UNet, skipping')
else:
output.unet.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
)
)
)
if self.clip is not None:
output.clip = self.clip.model_copy(deep=True)
output.clip.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
if any(lora.lora.key == lora_key for lora in self.clip.loras):
context.logger.warning(f'LoRA "{lora_key}" already applied to CLIP, skipping')
else:
output.clip.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
)
)
)
if self.clip2 is not None:
output.clip2 = self.clip2.model_copy(deep=True)
output.clip2.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
if any(lora.lora.key == lora_key for lora in self.clip2.loras):
context.logger.warning(f'LoRA "{lora_key}" already applied to CLIP2, skipping')
else:
output.clip2.loras.append(
LoRAField(
lora=self.lora,
weight=self.weight,
)
)
)
return output
@ -414,6 +420,7 @@ class SDXLLoRACollectionLoader(BaseInvocation):
for lora in loras:
if lora.lora.key in added_loras:
context.logger.warning(f'LoRA "{lora.lora.key}" already applied, skipping')
continue
if not context.models.exists(lora.lora.key):