all files migrated; tweaks needed

This commit is contained in:
Lincoln Stein
2023-03-03 00:02:15 -05:00
parent 3f0b0f3250
commit 6a990565ff
496 changed files with 276 additions and 934 deletions

View File

@ -15,7 +15,7 @@ from torch import nn
from compel.cross_attention_control import Arguments
from diffusers.models.unet_2d_condition import UNet2DConditionModel
from diffusers.models.cross_attention import AttnProcessor
from ...devices import torch_dtype
from ...util import torch_dtype
class CrossAttentionType(enum.Enum):

View File

@ -23,7 +23,7 @@ from omegaconf import ListConfig
import urllib
from ..textual_inversion_manager import TextualInversionManager
from ...util import (
from ...util.util import (
log_txt_as_img,
exists,
default,

View File

@ -4,7 +4,7 @@ import torch
import numpy as np
from tqdm import tqdm
from functools import partial
from ...devices import choose_torch_device
from ...util import choose_torch_device
from .shared_invokeai_diffusion import InvokeAIDiffuserComponent
from .sampler import Sampler
from ..diffusionmodules.util import noise_like

View File

@ -7,7 +7,7 @@ import torch
import numpy as np
from tqdm import tqdm
from functools import partial
from ...devices import choose_torch_device
from ...util import choose_torch_device
from .shared_invokeai_diffusion import InvokeAIDiffuserComponent
from ..diffusionmodules.util import (

View File

@ -8,7 +8,7 @@ import torch
from diffusers.models.cross_attention import AttnProcessor
from typing_extensions import TypeAlias
from ldm.invoke.globals import Globals
from invokeai.backend.globals import Globals
from .cross_attention_control import Arguments, \
restore_default_cross_attention, override_cross_attention, Context, get_cross_attention_modules, \
CrossAttentionType, SwapCrossAttnContext