Add dpmpp_sde and dpmpp_2m_sde schedulers(with karras)

This commit is contained in:
Sergey Borisov
2023-06-18 23:38:15 +03:00
parent f312e1448f
commit f3d9797ebe
8 changed files with 35 additions and 6 deletions

View File

@ -7,7 +7,7 @@ import einops
from pydantic import BaseModel, Field, validator
import torch
from diffusers import ControlNetModel
from diffusers import ControlNetModel, DPMSolverMultistepScheduler
from diffusers.image_processor import VaeImageProcessor
from diffusers.schedulers import SchedulerMixin as Scheduler
@ -222,6 +222,15 @@ class TextToLatentsInvocation(BaseInvocation):
c, extra_conditioning_info = context.services.latents.get(self.positive_conditioning.conditioning_name)
uc, _ = context.services.latents.get(self.negative_conditioning.conditioning_name)
custom_args = dict(
eta=0.0, #ddim_eta
)
if type(scheduler) is DPMSolverMultistepScheduler and scheduler.config.algorithm_type in ["sde-dpmsolver", "sde-dpmsolver++"]:
custom_args.update(
generator=torch.Generator(device=uc.device).manual_seed(0),
)
conditioning_data = ConditioningData(
unconditioned_embeddings=uc,
text_embeddings=c,
@ -233,7 +242,7 @@ class TextToLatentsInvocation(BaseInvocation):
h_symmetry_time_pct=None,#h_symmetry_time_pct,
v_symmetry_time_pct=None#v_symmetry_time_pct,
),
).add_scheduler_args_if_applicable(scheduler, eta=0.0)#ddim_eta)
).add_scheduler_args_if_applicable(scheduler, **custom_args)
return conditioning_data
def create_pipeline(self, unet, scheduler) -> StableDiffusionGeneratorPipeline: