mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Ruff check
This commit is contained in:
parent
ca9b815c89
commit
f475b78734
@ -3,9 +3,8 @@ from typing import Iterator, List, Optional, Tuple, Union
|
|||||||
import torch
|
import torch
|
||||||
from compel import Compel, ReturnedEmbeddingsType
|
from compel import Compel, ReturnedEmbeddingsType
|
||||||
from compel.prompt_parser import Blend, Conjunction, CrossAttentionControlSubstitute, FlattenedPrompt, Fragment
|
from compel.prompt_parser import Blend, Conjunction, CrossAttentionControlSubstitute, FlattenedPrompt, Fragment
|
||||||
from transformers import CLIPTokenizer, CLIPTextModel
|
from transformers import CLIPTextModel, CLIPTokenizer
|
||||||
|
|
||||||
import invokeai.backend.util.logging as logger
|
|
||||||
from invokeai.app.invocations.fields import (
|
from invokeai.app.invocations.fields import (
|
||||||
FieldDescriptions,
|
FieldDescriptions,
|
||||||
Input,
|
Input,
|
||||||
@ -14,11 +13,9 @@ from invokeai.app.invocations.fields import (
|
|||||||
UIComponent,
|
UIComponent,
|
||||||
)
|
)
|
||||||
from invokeai.app.invocations.primitives import ConditioningOutput
|
from invokeai.app.invocations.primitives import ConditioningOutput
|
||||||
from invokeai.app.services.model_records import UnknownModelException
|
|
||||||
from invokeai.app.services.shared.invocation_context import InvocationContext
|
from invokeai.app.services.shared.invocation_context import InvocationContext
|
||||||
from invokeai.app.util.ti_utils import generate_ti_list
|
from invokeai.app.util.ti_utils import generate_ti_list
|
||||||
from invokeai.backend.lora import LoRAModelRaw
|
from invokeai.backend.lora import LoRAModelRaw
|
||||||
from invokeai.backend.model_manager.config import ModelType
|
|
||||||
from invokeai.backend.model_patcher import ModelPatcher
|
from invokeai.backend.model_patcher import ModelPatcher
|
||||||
from invokeai.backend.stable_diffusion.diffusion.conditioning_data import (
|
from invokeai.backend.stable_diffusion.diffusion.conditioning_data import (
|
||||||
BasicConditioningInfo,
|
BasicConditioningInfo,
|
||||||
@ -26,7 +23,6 @@ from invokeai.backend.stable_diffusion.diffusion.conditioning_data import (
|
|||||||
ExtraConditioningInfo,
|
ExtraConditioningInfo,
|
||||||
SDXLConditioningInfo,
|
SDXLConditioningInfo,
|
||||||
)
|
)
|
||||||
from invokeai.backend.textual_inversion import TextualInversionModelRaw
|
|
||||||
from invokeai.backend.util.devices import torch_dtype
|
from invokeai.backend.util.devices import torch_dtype
|
||||||
|
|
||||||
from .baseinvocation import (
|
from .baseinvocation import (
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
import re
|
import re
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
|
import invokeai.backend.util.logging as logger
|
||||||
|
from invokeai.app.services.model_records import UnknownModelException
|
||||||
|
from invokeai.app.services.shared.invocation_context import InvocationContext
|
||||||
from invokeai.backend.model_manager.config import BaseModelType, ModelType
|
from invokeai.backend.model_manager.config import BaseModelType, ModelType
|
||||||
from invokeai.backend.textual_inversion import TextualInversionModelRaw
|
from invokeai.backend.textual_inversion import TextualInversionModelRaw
|
||||||
from invokeai.app.services.shared.invocation_context import InvocationContext
|
|
||||||
from invokeai.app.services.model_records import UnknownModelException
|
|
||||||
import invokeai.backend.util.logging as logger
|
|
||||||
|
|
||||||
|
|
||||||
def extract_ti_triggers_from_prompt(prompt: str) -> List[str]:
|
def extract_ti_triggers_from_prompt(prompt: str) -> List[str]:
|
||||||
@ -41,4 +41,4 @@ def generate_ti_list(prompt: str, base: BaseModelType, context: InvocationContex
|
|||||||
logger.warning(f'trigger: "{trigger}" not a valid textual inversion model for this graph')
|
logger.warning(f'trigger: "{trigger}" not a valid textual inversion model for this graph')
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.warning(f'Failed to load TI model for trigger: "{trigger}"')
|
logger.warning(f'Failed to load TI model for trigger: "{trigger}"')
|
||||||
return ti_list
|
return ti_list
|
||||||
|
Loading…
Reference in New Issue
Block a user