diff --git a/invokeai/app/invocations/model.py b/invokeai/app/invocations/model.py index ca76ce7d51..e794423fa1 100644 --- a/invokeai/app/invocations/model.py +++ b/invokeai/app/invocations/model.py @@ -404,7 +404,7 @@ class SeamlessModeOutput(BaseInvocationOutput): @title("Seamless") @tags("seamless", "model") class SeamlessModeInvocation(BaseInvocation): - """Apply seamless mode to unet.""" + """Applies the seamless transformation to the Model UNet and VAE.""" type: Literal["seamless"] = "seamless" diff --git a/invokeai/backend/model_management/seamless.py b/invokeai/backend/model_management/seamless.py index db0274c3f7..b56b64f1de 100644 --- a/invokeai/backend/model_management/seamless.py +++ b/invokeai/backend/model_management/seamless.py @@ -1,7 +1,8 @@ from __future__ import annotations from contextlib import contextmanager -from typing import TypeVar, Union +from typing import TypeVar +import diffusers import torch.nn as nn from diffusers.models import UNet2DModel, AutoencoderKL @@ -22,10 +23,9 @@ def _conv_forward_asymmetric(self, input, weight, bias): ) -@contextmanager - ModelType = TypeVar('ModelType', UNet2DModel, AutoencoderKL) +@contextmanager def set_seamless(model: ModelType, seamless_axes): try: to_restore = [] @@ -51,6 +51,8 @@ def set_seamless(model: ModelType, seamless_axes): to_restore.append((m, m._conv_forward)) m._conv_forward = _conv_forward_asymmetric.__get__(m, nn.Conv2d) + if isinstance(m, diffusers.models.lora.LoRACompatibleConv) and m.lora_layer is None: + m.forward = nn.Conv2d.forward.__get__(m, nn.Conv2d) yield @@ -60,4 +62,6 @@ def set_seamless(model: ModelType, seamless_axes): if hasattr(m, "asymmetric_padding_mode"): del m.asymmetric_padding_mode if hasattr(m, "asymmetric_padding"): - del m.asymmetric_padding \ No newline at end of file + del m.asymmetric_padding + if isinstance(m, diffusers.models.lora.LoRACompatibleConv): + m.forward = diffusers.models.lora.LoRACompatibleConv.forward.__get__(m,diffusers.models.lora.LoRACompatibleConv) \ No newline at end of file