Ruff check

This commit is contained in:
Brandon Rising 2024-02-27 15:20:55 -05:00 committed by psychedelicious
parent ca9b815c89
commit f475b78734
2 changed files with 5 additions and 9 deletions

View File

@ -3,9 +3,8 @@ from typing import Iterator, List, Optional, Tuple, Union
import torch import torch
from compel import Compel, ReturnedEmbeddingsType from compel import Compel, ReturnedEmbeddingsType
from compel.prompt_parser import Blend, Conjunction, CrossAttentionControlSubstitute, FlattenedPrompt, Fragment from compel.prompt_parser import Blend, Conjunction, CrossAttentionControlSubstitute, FlattenedPrompt, Fragment
from transformers import CLIPTokenizer, CLIPTextModel from transformers import CLIPTextModel, CLIPTokenizer
import invokeai.backend.util.logging as logger
from invokeai.app.invocations.fields import ( from invokeai.app.invocations.fields import (
FieldDescriptions, FieldDescriptions,
Input, Input,
@ -14,11 +13,9 @@ from invokeai.app.invocations.fields import (
UIComponent, UIComponent,
) )
from invokeai.app.invocations.primitives import ConditioningOutput from invokeai.app.invocations.primitives import ConditioningOutput
from invokeai.app.services.model_records import UnknownModelException
from invokeai.app.services.shared.invocation_context import InvocationContext from invokeai.app.services.shared.invocation_context import InvocationContext
from invokeai.app.util.ti_utils import generate_ti_list from invokeai.app.util.ti_utils import generate_ti_list
from invokeai.backend.lora import LoRAModelRaw from invokeai.backend.lora import LoRAModelRaw
from invokeai.backend.model_manager.config import ModelType
from invokeai.backend.model_patcher import ModelPatcher from invokeai.backend.model_patcher import ModelPatcher
from invokeai.backend.stable_diffusion.diffusion.conditioning_data import ( from invokeai.backend.stable_diffusion.diffusion.conditioning_data import (
BasicConditioningInfo, BasicConditioningInfo,
@ -26,7 +23,6 @@ from invokeai.backend.stable_diffusion.diffusion.conditioning_data import (
ExtraConditioningInfo, ExtraConditioningInfo,
SDXLConditioningInfo, SDXLConditioningInfo,
) )
from invokeai.backend.textual_inversion import TextualInversionModelRaw
from invokeai.backend.util.devices import torch_dtype from invokeai.backend.util.devices import torch_dtype
from .baseinvocation import ( from .baseinvocation import (

View File

@ -1,11 +1,11 @@
import re import re
from typing import List, Tuple from typing import List, Tuple
import invokeai.backend.util.logging as logger
from invokeai.app.services.model_records import UnknownModelException
from invokeai.app.services.shared.invocation_context import InvocationContext
from invokeai.backend.model_manager.config import BaseModelType, ModelType from invokeai.backend.model_manager.config import BaseModelType, ModelType
from invokeai.backend.textual_inversion import TextualInversionModelRaw from invokeai.backend.textual_inversion import TextualInversionModelRaw
from invokeai.app.services.shared.invocation_context import InvocationContext
from invokeai.app.services.model_records import UnknownModelException
import invokeai.backend.util.logging as logger
def extract_ti_triggers_from_prompt(prompt: str) -> List[str]: def extract_ti_triggers_from_prompt(prompt: str) -> List[str]: