From 3033331f65781b18d3fb6e96d90117694bba6252 Mon Sep 17 00:00:00 2001 From: Lincoln Stein Date: Thu, 27 Oct 2022 22:50:06 -0400 Subject: [PATCH] remove unneeded warnings from attention.py --- ldm/modules/attention.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ldm/modules/attention.py b/ldm/modules/attention.py index 8d160f004b..4c36fa8a6c 100644 --- a/ldm/modules/attention.py +++ b/ldm/modules/attention.py @@ -236,9 +236,7 @@ class CrossAttention(nn.Module): return self.einsum_lowest_level(q, k, v, None, None, None) div = 1 << int((size_mb - 1) / max_tensor_mb).bit_length() if div <= q.shape[0]: - print("warning: untested call to einsum_op_slice_dim0") return self.einsum_op_slice_dim0(q, k, v, q.shape[0] // div) - print("warning: untested call to einsum_op_slice_dim1") return self.einsum_op_slice_dim1(q, k, v, max(q.shape[1] // div, 1)) def einsum_op_cuda(self, q, k, v):