diff --git a/ldm/modules/attention.py b/ldm/modules/attention.py index 1321e9db1e..894c4db839 100644 --- a/ldm/modules/attention.py +++ b/ldm/modules/attention.py @@ -297,9 +297,9 @@ class BasicTransformerBlock(nn.Module): def _forward(self, x, context=None): x = x.contiguous() if x.device.type == 'mps' else x - x += self.attn1(self.norm1(x)) - x += self.attn2(self.norm2(x), context=context) - x += self.ff(self.norm3(x)) + x = self.attn1(self.norm1(x)) + x + x = self.attn2(self.norm2(x), context=context) + x + x = self.ff(self.norm3(x)) + x return x