From bf1beaa607984e34f3bfab8b264444318b6f47b0 Mon Sep 17 00:00:00 2001 From: Lincoln Stein Date: Mon, 12 Sep 2022 14:34:36 -0400 Subject: [PATCH] revert 49a96b90 due to conflicts during training --- ldm/modules/attention.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ldm/modules/attention.py b/ldm/modules/attention.py index 1321e9db1e..894c4db839 100644 --- a/ldm/modules/attention.py +++ b/ldm/modules/attention.py @@ -297,9 +297,9 @@ class BasicTransformerBlock(nn.Module): def _forward(self, x, context=None): x = x.contiguous() if x.device.type == 'mps' else x - x += self.attn1(self.norm1(x)) - x += self.attn2(self.norm2(x), context=context) - x += self.ff(self.norm3(x)) + x = self.attn1(self.norm1(x)) + x + x = self.attn2(self.norm2(x), context=context) + x + x = self.ff(self.norm3(x)) + x return x