Update TextualInversionManager for compatibility with the latest transformers release. See https://github.com/invoke-ai/InvokeAI/issues/6445.

This commit is contained in:
Ryan Dick 2024-05-27 10:35:02 -04:00 committed by Kent Keirsey
parent 994c61b67a
commit 3aa1c8d3a8

View File

@ -107,7 +107,7 @@ class TextualInversionManager(BaseTextualInversionManager):
# Do not exceed the max model input size. The -2 here is compensating for # Do not exceed the max model input size. The -2 here is compensating for
# compel.embeddings_provider.get_token_ids(), which first removes and then adds back the start and end tokens. # compel.embeddings_provider.get_token_ids(), which first removes and then adds back the start and end tokens.
max_length = list(self.tokenizer.max_model_input_sizes.values())[0] - 2 max_length = self.tokenizer.model_max_length - 2
if len(new_token_ids) > max_length: if len(new_token_ids) > max_length:
new_token_ids = new_token_ids[0:max_length] new_token_ids = new_token_ids[0:max_length]