From a0a259eef1891baceafe06d9c266a10a4acfb183 Mon Sep 17 00:00:00 2001 From: Ryan Dick Date: Wed, 21 Aug 2024 19:17:39 +0000 Subject: [PATCH] Fix max_seq_len field description. --- invokeai/app/invocations/model.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/invokeai/app/invocations/model.py b/invokeai/app/invocations/model.py index e6beeed42d..6f54281075 100644 --- a/invokeai/app/invocations/model.py +++ b/invokeai/app/invocations/model.py @@ -164,7 +164,10 @@ class FluxModelLoaderOutput(BaseInvocationOutput): clip: CLIPField = OutputField(description=FieldDescriptions.clip, title="CLIP") t5_encoder: T5EncoderField = OutputField(description=FieldDescriptions.t5_encoder, title="T5 Encoder") vae: VAEField = OutputField(description=FieldDescriptions.vae, title="VAE") - max_seq_len: Literal[256, 512] = OutputField(description=FieldDescriptions.vae, title="Max Seq Length") + max_seq_len: Literal[256, 512] = OutputField( + description="The max sequence length to used for the T5 encoder. (256 for schnell transformer, 512 for dev transformer)", + title="Max Seq Length", + ) @invocation("flux_model_loader", title="Flux Main Model", tags=["model", "flux"], category="model", version="1.0.3")