resolve some undefined symbols in model_cache

This commit is contained in:
Lincoln Stein
2023-05-18 14:31:47 -04:00
223 changed files with 3236 additions and 8781 deletions

View File

@ -5,10 +5,8 @@ from .baseinvocation import BaseInvocation, BaseInvocationOutput, InvocationCont
from .model import ClipField
from ...backend.util.devices import choose_torch_device, torch_dtype
from ...backend.util.devices import torch_dtype
from ...backend.stable_diffusion.diffusion import InvokeAIDiffuserComponent
from ...backend.stable_diffusion.textual_inversion_manager import TextualInversionManager
from ...backend.model_management import SDModelType
from compel import Compel
from compel.prompt_parser import (
@ -18,8 +16,6 @@ from compel.prompt_parser import (
Fragment,
)
from invokeai.backend.globals import Globals
class ConditioningField(BaseModel):
conditioning_name: Optional[str] = Field(default=None, description="The name of conditioning data")
@ -91,7 +87,7 @@ class CompelInvocation(BaseInvocation):
prompt: Union[FlattenedPrompt, Blend] = Compel.parse_prompt_string(self.prompt)
if getattr(Globals, "log_tokenization", False):
if context.services.configuration.log_tokenization:
log_tokenization_for_prompt_object(prompt, tokenizer)
c, options = compel.build_conditioning_tensor_for_prompt_object(prompt)