bump to diffusers 0.15.1, remove dangling module

This commit is contained in:
Lincoln Stein 2023-04-18 19:20:38 -04:00
parent 47b9910b48
commit bd8ffd36bf
2 changed files with 1 additions and 5 deletions

View File

@ -11,7 +11,6 @@ import psutil
import torch
from compel.cross_attention_control import Arguments
from diffusers.models.attention_processor import AttentionProcessor
from diffusers.models.unet_2d_condition import UNet2DConditionModel
from torch import nn
from ...util import torch_dtype
@ -408,12 +407,9 @@ def override_cross_attention(model, context: Context, is_running_diffusers=False
def get_cross_attention_modules(
model, which: CrossAttentionType
) -> list[tuple[str, InvokeAICrossAttentionMixin]]:
from ldm.modules.attention import CrossAttention # avoid circular import - TODO: rename as in diffusers?
cross_attention_class: type = (
InvokeAIDiffusersCrossAttention
if isinstance(model, UNet2DConditionModel)
else CrossAttention
)
which_attn = "attn1" if which is CrossAttentionType.SELF else "attn2"
attention_module_tuples = [

View File

@ -40,7 +40,7 @@ dependencies = [
"clip_anytorch", # replacing "clip @ https://github.com/openai/CLIP/archive/eaa22acb90a5876642d0507623e859909230a52d.zip",
"compel==1.0.5",
"datasets",
"diffusers[torch]==0.15.*",
"diffusers[torch]==0.15.1",
"dnspython==2.2.1",
"einops",
"eventlet",