From f3d9797ebeb924069827ad5b607c672d551f36b5 Mon Sep 17 00:00:00 2001 From: Sergey Borisov Date: Sun, 18 Jun 2023 23:38:15 +0300 Subject: [PATCH 1/2] Add dpmpp_sde and dpmpp_2m_sde schedulers(with karras) --- invokeai/app/invocations/latent.py | 13 +++++++++++-- invokeai/backend/install/legacy_arg_parsing.py | 4 ++++ .../stable_diffusion/schedulers/schedulers.py | 6 +++++- invokeai/backend/web/modules/parameters.py | 4 ++++ invokeai/frontend/web/src/app/constants.ts | 8 ++++++++ .../src/services/api/models/InpaintInvocation.ts | 2 +- .../api/models/LatentsToLatentsInvocation.ts | 2 +- .../services/api/models/TextToLatentsInvocation.ts | 2 +- 8 files changed, 35 insertions(+), 6 deletions(-) diff --git a/invokeai/app/invocations/latent.py b/invokeai/app/invocations/latent.py index cf216e6c54..1a448edca8 100644 --- a/invokeai/app/invocations/latent.py +++ b/invokeai/app/invocations/latent.py @@ -7,7 +7,7 @@ import einops from pydantic import BaseModel, Field, validator import torch -from diffusers import ControlNetModel +from diffusers import ControlNetModel, DPMSolverMultistepScheduler from diffusers.image_processor import VaeImageProcessor from diffusers.schedulers import SchedulerMixin as Scheduler @@ -222,6 +222,15 @@ class TextToLatentsInvocation(BaseInvocation): c, extra_conditioning_info = context.services.latents.get(self.positive_conditioning.conditioning_name) uc, _ = context.services.latents.get(self.negative_conditioning.conditioning_name) + custom_args = dict( + eta=0.0, #ddim_eta + ) + + if type(scheduler) is DPMSolverMultistepScheduler and scheduler.config.algorithm_type in ["sde-dpmsolver", "sde-dpmsolver++"]: + custom_args.update( + generator=torch.Generator(device=uc.device).manual_seed(0), + ) + conditioning_data = ConditioningData( unconditioned_embeddings=uc, text_embeddings=c, @@ -233,7 +242,7 @@ class TextToLatentsInvocation(BaseInvocation): h_symmetry_time_pct=None,#h_symmetry_time_pct, v_symmetry_time_pct=None#v_symmetry_time_pct, ), - ).add_scheduler_args_if_applicable(scheduler, eta=0.0)#ddim_eta) + ).add_scheduler_args_if_applicable(scheduler, **custom_args) return conditioning_data def create_pipeline(self, unet, scheduler) -> StableDiffusionGeneratorPipeline: diff --git a/invokeai/backend/install/legacy_arg_parsing.py b/invokeai/backend/install/legacy_arg_parsing.py index b4f3ab1186..4a58ff8336 100644 --- a/invokeai/backend/install/legacy_arg_parsing.py +++ b/invokeai/backend/install/legacy_arg_parsing.py @@ -22,6 +22,10 @@ SAMPLER_CHOICES = [ "dpmpp_2s_k", "dpmpp_2m", "dpmpp_2m_k", + "dpmpp_2m_sde", + "dpmpp_2m_sde_k", + "dpmpp_sde", + "dpmpp_sde_k", "unipc", ] diff --git a/invokeai/backend/stable_diffusion/schedulers/schedulers.py b/invokeai/backend/stable_diffusion/schedulers/schedulers.py index d8da143962..77c45d5eb8 100644 --- a/invokeai/backend/stable_diffusion/schedulers/schedulers.py +++ b/invokeai/backend/stable_diffusion/schedulers/schedulers.py @@ -1,7 +1,7 @@ from diffusers import DDIMScheduler, DPMSolverMultistepScheduler, KDPM2DiscreteScheduler, \ KDPM2AncestralDiscreteScheduler, EulerDiscreteScheduler, EulerAncestralDiscreteScheduler, \ HeunDiscreteScheduler, LMSDiscreteScheduler, PNDMScheduler, UniPCMultistepScheduler, \ - DPMSolverSinglestepScheduler, DEISMultistepScheduler, DDPMScheduler + DPMSolverSinglestepScheduler, DEISMultistepScheduler, DDPMScheduler, DPMSolverSDEScheduler SCHEDULER_MAP = dict( ddim=(DDIMScheduler, dict()), @@ -21,5 +21,9 @@ SCHEDULER_MAP = dict( dpmpp_2s_k=(DPMSolverSinglestepScheduler, dict(use_karras_sigmas=True)), dpmpp_2m=(DPMSolverMultistepScheduler, dict(use_karras_sigmas=False)), dpmpp_2m_k=(DPMSolverMultistepScheduler, dict(use_karras_sigmas=True)), + dpmpp_2m_sde=(DPMSolverMultistepScheduler, dict(use_karras_sigmas=False, algorithm_type='sde-dpmsolver++')), + dpmpp_2m_sde_k=(DPMSolverMultistepScheduler, dict(use_karras_sigmas=True, algorithm_type='sde-dpmsolver++')), + dpmpp_sde=(DPMSolverSDEScheduler, dict(use_karras_sigmas=False, noise_sampler_seed=0)), + dpmpp_sde_k=(DPMSolverSDEScheduler, dict(use_karras_sigmas=True, noise_sampler_seed=0)), unipc=(UniPCMultistepScheduler, dict(cpu_only=True)) ) diff --git a/invokeai/backend/web/modules/parameters.py b/invokeai/backend/web/modules/parameters.py index 9b00093a44..440f21a947 100644 --- a/invokeai/backend/web/modules/parameters.py +++ b/invokeai/backend/web/modules/parameters.py @@ -20,6 +20,10 @@ SAMPLER_CHOICES = [ "dpmpp_2s_k", "dpmpp_2m", "dpmpp_2m_k", + "dpmpp_2m_sde", + "dpmpp_2m_sde_k", + "dpmpp_sde", + "dpmpp_sde_k", "unipc", ] diff --git a/invokeai/frontend/web/src/app/constants.ts b/invokeai/frontend/web/src/app/constants.ts index db5fea4a66..5fd413d915 100644 --- a/invokeai/frontend/web/src/app/constants.ts +++ b/invokeai/frontend/web/src/app/constants.ts @@ -9,6 +9,8 @@ export const SCHEDULER_NAMES_AS_CONST = [ 'ddpm', 'dpmpp_2s', 'dpmpp_2m', + 'dpmpp_2m_sde', + 'dpmpp_sde', 'heun', 'kdpm_2', 'lms', @@ -17,6 +19,8 @@ export const SCHEDULER_NAMES_AS_CONST = [ 'euler_k', 'dpmpp_2s_k', 'dpmpp_2m_k', + 'dpmpp_2m_sde_k', + 'dpmpp_sde_k', 'heun_k', 'lms_k', 'euler_a', @@ -32,16 +36,20 @@ export const SCHEDULER_LABEL_MAP: Record = { deis: 'DEIS', ddim: 'DDIM', ddpm: 'DDPM', + dpmpp_sde: 'DPM++ SDE', dpmpp_2s: 'DPM++ 2S', dpmpp_2m: 'DPM++ 2M', + dpmpp_2m_sde: 'DPM++ 2M SDE', heun: 'Heun', kdpm_2: 'KDPM 2', lms: 'LMS', pndm: 'PNDM', unipc: 'UniPC', euler_k: 'Euler Karras', + dpmpp_sde_k: 'DPM++ SDE Karras', dpmpp_2s_k: 'DPM++ 2S Karras', dpmpp_2m_k: 'DPM++ 2M Karras', + dpmpp_2m_sde_k: 'DPM++ 2M SDE Karras', heun_k: 'Heun Karras', lms_k: 'LMS Karras', euler_a: 'Euler Ancestral', diff --git a/invokeai/frontend/web/src/services/api/models/InpaintInvocation.ts b/invokeai/frontend/web/src/services/api/models/InpaintInvocation.ts index 7eb0039c87..6527508237 100644 --- a/invokeai/frontend/web/src/services/api/models/InpaintInvocation.ts +++ b/invokeai/frontend/web/src/services/api/models/InpaintInvocation.ts @@ -45,7 +45,7 @@ export type InpaintInvocation = { /** * The scheduler to use */ - scheduler?: 'ddim' | 'ddpm' | 'deis' | 'lms' | 'lms_k' | 'pndm' | 'heun' | 'heun_k' | 'euler' | 'euler_k' | 'euler_a' | 'kdpm_2' | 'kdpm_2_a' | 'dpmpp_2s' | 'dpmpp_2s_k' | 'dpmpp_2m' | 'dpmpp_2m_k' | 'unipc'; + scheduler?: 'ddim' | 'ddpm' | 'deis' | 'lms' | 'lms_k' | 'pndm' | 'heun' | 'heun_k' | 'euler' | 'euler_k' | 'euler_a' | 'kdpm_2' | 'kdpm_2_a' | 'dpmpp_2s' | 'dpmpp_2s_k' | 'dpmpp_2m' | 'dpmpp_2m_k' | 'dpmpp_2m_sde' | 'dpmpp_2m_sde_k' | 'dpmpp_sde' | 'dpmpp_sde_k' | 'unipc'; /** * The model to use (currently ignored) */ diff --git a/invokeai/frontend/web/src/services/api/models/LatentsToLatentsInvocation.ts b/invokeai/frontend/web/src/services/api/models/LatentsToLatentsInvocation.ts index 174d368178..d5f2915bf2 100644 --- a/invokeai/frontend/web/src/services/api/models/LatentsToLatentsInvocation.ts +++ b/invokeai/frontend/web/src/services/api/models/LatentsToLatentsInvocation.ts @@ -42,7 +42,7 @@ export type LatentsToLatentsInvocation = { /** * The scheduler to use */ - scheduler?: 'ddim' | 'ddpm' | 'deis' | 'lms' | 'lms_k' | 'pndm' | 'heun' | 'heun_k' | 'euler' | 'euler_k' | 'euler_a' | 'kdpm_2' | 'kdpm_2_a' | 'dpmpp_2s' | 'dpmpp_2s_k' | 'dpmpp_2m' | 'dpmpp_2m_k' | 'unipc'; + scheduler?: 'ddim' | 'ddpm' | 'deis' | 'lms' | 'lms_k' | 'pndm' | 'heun' | 'heun_k' | 'euler' | 'euler_k' | 'euler_a' | 'kdpm_2' | 'kdpm_2_a' | 'dpmpp_2s' | 'dpmpp_2s_k' | 'dpmpp_2m' | 'dpmpp_2m_k' | 'dpmpp_2m_sde' | 'dpmpp_2m_sde_k' | 'dpmpp_sde' | 'dpmpp_sde_k' | 'unipc'; /** * The model to use (currently ignored) */ diff --git a/invokeai/frontend/web/src/services/api/models/TextToLatentsInvocation.ts b/invokeai/frontend/web/src/services/api/models/TextToLatentsInvocation.ts index 117533f106..513acff08d 100644 --- a/invokeai/frontend/web/src/services/api/models/TextToLatentsInvocation.ts +++ b/invokeai/frontend/web/src/services/api/models/TextToLatentsInvocation.ts @@ -42,7 +42,7 @@ export type TextToLatentsInvocation = { /** * The scheduler to use */ - scheduler?: 'ddim' | 'ddpm' | 'deis' | 'lms' | 'lms_k' | 'pndm' | 'heun' | 'heun_k' | 'euler' | 'euler_k' | 'euler_a' | 'kdpm_2' | 'kdpm_2_a' | 'dpmpp_2s' | 'dpmpp_2s_k' | 'dpmpp_2m' | 'dpmpp_2m_k' | 'unipc'; + scheduler?: 'ddim' | 'ddpm' | 'deis' | 'lms' | 'lms_k' | 'pndm' | 'heun' | 'heun_k' | 'euler' | 'euler_k' | 'euler_a' | 'kdpm_2' | 'kdpm_2_a' | 'dpmpp_2s' | 'dpmpp_2s_k' | 'dpmpp_2m' | 'dpmpp_2m_k' | 'dpmpp_2m_sde' | 'dpmpp_2m_sde_k' | 'dpmpp_sde' | 'dpmpp_sde_k' | 'unipc'; /** * The model to use (currently ignored) */ From 9b324077442f53f3ff646865cd6e657576bcac67 Mon Sep 17 00:00:00 2001 From: Sergey Borisov Date: Mon, 19 Jun 2023 00:34:01 +0300 Subject: [PATCH 2/2] Provide generator to all schedulers step function to make both ancestral and sde schedulers reproducible --- invokeai/app/invocations/latent.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/invokeai/app/invocations/latent.py b/invokeai/app/invocations/latent.py index 1a448edca8..63db3d925c 100644 --- a/invokeai/app/invocations/latent.py +++ b/invokeai/app/invocations/latent.py @@ -222,15 +222,6 @@ class TextToLatentsInvocation(BaseInvocation): c, extra_conditioning_info = context.services.latents.get(self.positive_conditioning.conditioning_name) uc, _ = context.services.latents.get(self.negative_conditioning.conditioning_name) - custom_args = dict( - eta=0.0, #ddim_eta - ) - - if type(scheduler) is DPMSolverMultistepScheduler and scheduler.config.algorithm_type in ["sde-dpmsolver", "sde-dpmsolver++"]: - custom_args.update( - generator=torch.Generator(device=uc.device).manual_seed(0), - ) - conditioning_data = ConditioningData( unconditioned_embeddings=uc, text_embeddings=c, @@ -242,7 +233,17 @@ class TextToLatentsInvocation(BaseInvocation): h_symmetry_time_pct=None,#h_symmetry_time_pct, v_symmetry_time_pct=None#v_symmetry_time_pct, ), - ).add_scheduler_args_if_applicable(scheduler, **custom_args) + ) + + conditioning_data = conditioning_data.add_scheduler_args_if_applicable( + scheduler, + + # for ddim scheduler + eta=0.0, #ddim_eta + + # for ancestral and sde schedulers + generator=torch.Generator(device=uc.device).manual_seed(0), + ) return conditioning_data def create_pipeline(self, unet, scheduler) -> StableDiffusionGeneratorPipeline: