mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
cleanup and fix kwarg
This commit is contained in:
parent
57db66634d
commit
768e969c90
@ -17,7 +17,7 @@ from compel.prompt_parser import (
|
||||
Fragment,
|
||||
PromptParser,
|
||||
)
|
||||
from transformers import CLIPTextModel, CLIPTokenizer
|
||||
from transformers import CLIPTokenizer
|
||||
|
||||
from invokeai.backend.globals import Globals
|
||||
|
||||
@ -71,7 +71,7 @@ def get_uc_and_c_and_ec(
|
||||
text_encoder=text_encoder,
|
||||
textual_inversion_manager=model.textual_inversion_manager,
|
||||
dtype_for_device_getter=torch_dtype,
|
||||
truncate_too_long_prompts=False
|
||||
truncate_long_prompts=False
|
||||
)
|
||||
|
||||
# get rid of any newline characters
|
||||
@ -118,12 +118,12 @@ def get_prompt_structure(
|
||||
legacy_blend = try_parse_legacy_blend(
|
||||
positive_prompt_string, skip_normalize_legacy_blend
|
||||
)
|
||||
positive_prompt: FlattenedPrompt | Blend
|
||||
positive_prompt: Union[FlattenedPrompt, Blend]
|
||||
if legacy_blend is not None:
|
||||
positive_prompt = legacy_blend
|
||||
else:
|
||||
positive_prompt = Compel.parse_prompt_string(positive_prompt_string)
|
||||
negative_prompt: FlattenedPrompt | Blend = Compel.parse_prompt_string(
|
||||
negative_prompt: Union[FlattenedPrompt, Blend] = Compel.parse_prompt_string(
|
||||
negative_prompt_string
|
||||
)
|
||||
|
||||
|
@ -38,7 +38,7 @@ dependencies = [
|
||||
"albumentations",
|
||||
"click",
|
||||
"clip_anytorch", # replacing "clip @ https://github.com/openai/CLIP/archive/eaa22acb90a5876642d0507623e859909230a52d.zip",
|
||||
"compel==0.1.7",
|
||||
"compel @ https://github.com/damian0815/compel/archive/no_max_token_limit.zip",
|
||||
"datasets",
|
||||
"diffusers[torch]~=0.14",
|
||||
"dnspython==2.2.1",
|
||||
|
Loading…
Reference in New Issue
Block a user