Fix double LoRA patching of the UNet. This was presumably added by accident due to a previous merge conflict.

This commit is contained in:
Ryan Dick 2023-11-17 14:49:03 -05:00 committed by Kent Keirsey
parent 63d3212bec
commit d756c9b10a

View File

@ -706,7 +706,6 @@ class DenoiseLatentsInvocation(BaseInvocation):
) )
with ( with (
ExitStack() as exit_stack, ExitStack() as exit_stack,
ModelPatcher.apply_lora_unet(unet_info.context.model, _lora_loader()),
ModelPatcher.apply_freeu(unet_info.context.model, self.unet.freeu_config), ModelPatcher.apply_freeu(unet_info.context.model, self.unet.freeu_config),
set_seamless(unet_info.context.model, self.unet.seamless_axes), set_seamless(unet_info.context.model, self.unet.seamless_axes),
unet_info as unet, unet_info as unet,