diff --git a/invokeai/app/invocations/compel.py b/invokeai/app/invocations/compel.py index ca6ca644bb..771c811eea 100644 --- a/invokeai/app/invocations/compel.py +++ b/invokeai/app/invocations/compel.py @@ -3,9 +3,8 @@ from typing import Iterator, List, Optional, Tuple, Union import torch from compel import Compel, ReturnedEmbeddingsType from compel.prompt_parser import Blend, Conjunction, CrossAttentionControlSubstitute, FlattenedPrompt, Fragment -from transformers import CLIPTokenizer, CLIPTextModel +from transformers import CLIPTextModel, CLIPTokenizer -import invokeai.backend.util.logging as logger from invokeai.app.invocations.fields import ( FieldDescriptions, Input, @@ -14,11 +13,9 @@ from invokeai.app.invocations.fields import ( UIComponent, ) from invokeai.app.invocations.primitives import ConditioningOutput -from invokeai.app.services.model_records import UnknownModelException from invokeai.app.services.shared.invocation_context import InvocationContext from invokeai.app.util.ti_utils import generate_ti_list from invokeai.backend.lora import LoRAModelRaw -from invokeai.backend.model_manager.config import ModelType from invokeai.backend.model_patcher import ModelPatcher from invokeai.backend.stable_diffusion.diffusion.conditioning_data import ( BasicConditioningInfo, @@ -26,7 +23,6 @@ from invokeai.backend.stable_diffusion.diffusion.conditioning_data import ( ExtraConditioningInfo, SDXLConditioningInfo, ) -from invokeai.backend.textual_inversion import TextualInversionModelRaw from invokeai.backend.util.devices import torch_dtype from .baseinvocation import ( diff --git a/invokeai/app/util/ti_utils.py b/invokeai/app/util/ti_utils.py index c2645e0702..0d803408fd 100644 --- a/invokeai/app/util/ti_utils.py +++ b/invokeai/app/util/ti_utils.py @@ -1,11 +1,11 @@ import re from typing import List, Tuple +import invokeai.backend.util.logging as logger +from invokeai.app.services.model_records import UnknownModelException +from invokeai.app.services.shared.invocation_context import InvocationContext from invokeai.backend.model_manager.config import BaseModelType, ModelType from invokeai.backend.textual_inversion import TextualInversionModelRaw -from invokeai.app.services.shared.invocation_context import InvocationContext -from invokeai.app.services.model_records import UnknownModelException -import invokeai.backend.util.logging as logger def extract_ti_triggers_from_prompt(prompt: str) -> List[str]: @@ -41,4 +41,4 @@ def generate_ti_list(prompt: str, base: BaseModelType, context: InvocationContex logger.warning(f'trigger: "{trigger}" not a valid textual inversion model for this graph') except Exception: logger.warning(f'Failed to load TI model for trigger: "{trigger}"') - return ti_list \ No newline at end of file + return ti_list