cleanup and fix kwarg

This commit is contained in:
damian 2023-03-08 18:00:54 +01:00
parent 57db66634d
commit 768e969c90
2 changed files with 5 additions and 5 deletions

View File

@ -17,7 +17,7 @@ from compel.prompt_parser import (
Fragment, Fragment,
PromptParser, PromptParser,
) )
from transformers import CLIPTextModel, CLIPTokenizer from transformers import CLIPTokenizer
from invokeai.backend.globals import Globals from invokeai.backend.globals import Globals
@ -71,7 +71,7 @@ def get_uc_and_c_and_ec(
text_encoder=text_encoder, text_encoder=text_encoder,
textual_inversion_manager=model.textual_inversion_manager, textual_inversion_manager=model.textual_inversion_manager,
dtype_for_device_getter=torch_dtype, dtype_for_device_getter=torch_dtype,
truncate_too_long_prompts=False truncate_long_prompts=False
) )
# get rid of any newline characters # get rid of any newline characters
@ -118,12 +118,12 @@ def get_prompt_structure(
legacy_blend = try_parse_legacy_blend( legacy_blend = try_parse_legacy_blend(
positive_prompt_string, skip_normalize_legacy_blend positive_prompt_string, skip_normalize_legacy_blend
) )
positive_prompt: FlattenedPrompt | Blend positive_prompt: Union[FlattenedPrompt, Blend]
if legacy_blend is not None: if legacy_blend is not None:
positive_prompt = legacy_blend positive_prompt = legacy_blend
else: else:
positive_prompt = Compel.parse_prompt_string(positive_prompt_string) positive_prompt = Compel.parse_prompt_string(positive_prompt_string)
negative_prompt: FlattenedPrompt | Blend = Compel.parse_prompt_string( negative_prompt: Union[FlattenedPrompt, Blend] = Compel.parse_prompt_string(
negative_prompt_string negative_prompt_string
) )

View File

@ -38,7 +38,7 @@ dependencies = [
"albumentations", "albumentations",
"click", "click",
"clip_anytorch", # replacing "clip @ https://github.com/openai/CLIP/archive/eaa22acb90a5876642d0507623e859909230a52d.zip", "clip_anytorch", # replacing "clip @ https://github.com/openai/CLIP/archive/eaa22acb90a5876642d0507623e859909230a52d.zip",
"compel==0.1.7", "compel @ https://github.com/damian0815/compel/archive/no_max_token_limit.zip",
"datasets", "datasets",
"diffusers[torch]~=0.14", "diffusers[torch]~=0.14",
"dnspython==2.2.1", "dnspython==2.2.1",