change tensor length to 768 per #572

This commit is contained in:
Lincoln Stein 2022-09-16 18:25:43 -04:00
parent 37e2418ee0
commit 622db491b2

View File

@ -82,7 +82,9 @@ class EmbeddingManager(nn.Module):
get_embedding_for_clip_token,
embedder.transformer.text_model.embeddings,
)
token_dim = 1280
# per bug report #572
#token_dim = 1280
token_dim = 768
else: # using LDM's BERT encoder
self.is_clip = False
get_token_for_string = partial(