From 3aa1c8d3a88205b133b31688b39648e431178016 Mon Sep 17 00:00:00 2001 From: Ryan Dick Date: Mon, 27 May 2024 10:35:02 -0400 Subject: [PATCH] Update TextualInversionManager for compatibility with the latest transformers release. See https://github.com/invoke-ai/InvokeAI/issues/6445. --- invokeai/backend/textual_inversion.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/invokeai/backend/textual_inversion.py b/invokeai/backend/textual_inversion.py index 368736617b..005031c95b 100644 --- a/invokeai/backend/textual_inversion.py +++ b/invokeai/backend/textual_inversion.py @@ -107,7 +107,7 @@ class TextualInversionManager(BaseTextualInversionManager): # Do not exceed the max model input size. The -2 here is compensating for # compel.embeddings_provider.get_token_ids(), which first removes and then adds back the start and end tokens. - max_length = list(self.tokenizer.max_model_input_sizes.values())[0] - 2 + max_length = self.tokenizer.model_max_length - 2 if len(new_token_ids) > max_length: new_token_ids = new_token_ids[0:max_length]