merge PR #495 - keep using float16 in ldm.modules.attention

This commit is contained in:
Lincoln Stein 2022-09-11 10:34:06 -04:00
parent 8748370f44
commit 70aa674e9e
2 changed files with 2 additions and 3 deletions

View File

@ -181,7 +181,7 @@ class CrossAttention(nn.Module):
q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> (b h) n d', h=h), (q_in, k_in, v_in))
del q_in, k_in, v_in
r1 = torch.zeros(q.shape[0], q.shape[1], v.shape[2], device=q.device)
r1 = torch.zeros(q.shape[0], q.shape[1], v.shape[2], device=q.device, dtype=q.dtype)
if device_type == 'mps':
mem_free_total = psutil.virtual_memory().available
@ -213,7 +213,7 @@ class CrossAttention(nn.Module):
end = i + slice_size
s1 = einsum('b i d, b j d -> b i j', q[:, i:end], k) * self.scale
s2 = s1.softmax(dim=-1)
s2 = s1.softmax(dim=-1, dtype=r1.dtype)
del s1
r1[:, i:end] = einsum('b i j, b j d -> b i d', s2, v)

View File

@ -185,7 +185,6 @@ def main_loop(t2i, outdir, prompt_as_dir, parser, infile):
continue
if opt.seed is not None and opt.seed < 0: # retrieve previous value!
try:
print(f'last seeds = {last_seeds}, opt.seed={opt.seed}')
opt.seed = last_seeds[opt.seed]
print(f'reusing previous seed {opt.seed}')
except IndexError: