Add UniPC Scheduler

This commit is contained in:
blessedcoolant 2023-05-11 20:52:37 +12:00
parent b0c41b4828
commit 06b5800d28
8 changed files with 67 additions and 57 deletions

View File

@ -54,16 +54,17 @@ class NoiseOutput(BaseInvocationOutput):
# TODO: this seems like a hack
scheduler_map = dict(
ddim=diffusers.DDIMScheduler,
dpmpp_2=diffusers.DPMSolverMultistepScheduler,
k_dpm_2=diffusers.KDPM2DiscreteScheduler,
k_dpm_2_a=diffusers.KDPM2AncestralDiscreteScheduler,
k_dpmpp_2=diffusers.DPMSolverMultistepScheduler,
k_euler=diffusers.EulerDiscreteScheduler,
k_euler_a=diffusers.EulerAncestralDiscreteScheduler,
k_heun=diffusers.HeunDiscreteScheduler,
k_lms=diffusers.LMSDiscreteScheduler,
plms=diffusers.PNDMScheduler,
ddim=(diffusers.DDIMScheduler, dict()),
dpmpp_2=(diffusers.DPMSolverMultistepScheduler, dict()),
k_dpm_2=(diffusers.KDPM2DiscreteScheduler, dict()),
k_dpm_2_a=(diffusers.KDPM2AncestralDiscreteScheduler, dict()),
k_dpmpp_2=(diffusers.DPMSolverMultistepScheduler, dict()),
k_euler=(diffusers.EulerDiscreteScheduler, dict()),
k_euler_a=(diffusers.EulerAncestralDiscreteScheduler, dict()),
k_heun=(diffusers.HeunDiscreteScheduler, dict()),
k_lms=(diffusers.LMSDiscreteScheduler, dict()),
plms=(diffusers.PNDMScheduler, dict()),
unipc=(diffusers.UniPCMultistepScheduler, dict(cpu_only=True))
)
@ -73,8 +74,9 @@ SAMPLER_NAME_VALUES = Literal[
def get_scheduler(scheduler_name:str, model: StableDiffusionGeneratorPipeline)->Scheduler:
scheduler_class = scheduler_map.get(scheduler_name,'ddim')
scheduler = scheduler_class.from_config(model.scheduler.config)
scheduler_class, scheduler_extra_config = scheduler_map.get(scheduler_name,'ddim')
scheduler_config = {**model.scheduler.config, **scheduler_extra_config}
scheduler = scheduler_class.from_config(scheduler_config)
# hack copied over from generate.py
if not hasattr(scheduler, 'uses_inpainting_model'):
scheduler.uses_inpainting_model = lambda: False
@ -293,11 +295,7 @@ class LatentsToLatentsInvocation(TextToLatentsInvocation):
latent, device=model.device, dtype=latent.dtype
)
timesteps, _ = model.get_img2img_timesteps(
self.steps,
self.strength,
device=model.device,
)
timesteps, _ = model.get_img2img_timesteps(self.steps, self.strength)
result_latents, result_attention_map_saver = model.latents_from_embeddings(
latents=initial_latents,

View File

@ -119,6 +119,7 @@ SAMPLER_CHOICES = [
"plms",
# diffusers:
"pndm",
"unipc"
]
PRECISION_CHOICES = [

View File

@ -1049,27 +1049,28 @@ class Generate:
# See https://github.com/huggingface/diffusers/issues/277#issuecomment-1371428672
scheduler_map = dict(
ddim=diffusers.DDIMScheduler,
dpmpp_2=diffusers.DPMSolverMultistepScheduler,
k_dpm_2=diffusers.KDPM2DiscreteScheduler,
k_dpm_2_a=diffusers.KDPM2AncestralDiscreteScheduler,
ddim=(diffusers.DDIMScheduler, dict()),
dpmpp_2=(diffusers.DPMSolverMultistepScheduler, dict()),
k_dpm_2=(diffusers.KDPM2DiscreteScheduler, dict()),
k_dpm_2_a=(diffusers.KDPM2AncestralDiscreteScheduler, dict()),
# DPMSolverMultistepScheduler is technically not `k_` anything, as it is neither
# the k-diffusers implementation nor included in EDM (Karras 2022), but we can
# provide an alias for compatibility.
k_dpmpp_2=diffusers.DPMSolverMultistepScheduler,
k_euler=diffusers.EulerDiscreteScheduler,
k_euler_a=diffusers.EulerAncestralDiscreteScheduler,
k_heun=diffusers.HeunDiscreteScheduler,
k_lms=diffusers.LMSDiscreteScheduler,
plms=diffusers.PNDMScheduler,
k_dpmpp_2=(diffusers.DPMSolverMultistepScheduler, dict()),
k_euler=(diffusers.EulerDiscreteScheduler, dict()),
k_euler_a=(diffusers.EulerAncestralDiscreteScheduler, dict()),
k_heun=(diffusers.HeunDiscreteScheduler, dict()),
k_lms=(diffusers.LMSDiscreteScheduler, dict()),
plms=(diffusers.PNDMScheduler, dict()),
unipc=(diffusers.UniPCMultistepScheduler, dict(cpu_only=True))
)
if self.sampler_name in scheduler_map:
sampler_class = scheduler_map[self.sampler_name]
sampler_class, sampler_extra_config = scheduler_map[self.sampler_name]
msg = (
f"Setting Sampler to {self.sampler_name} ({sampler_class.__name__})"
)
self.sampler = sampler_class.from_config(self.model.scheduler.config)
self.sampler = sampler_class.from_config({**self.model.scheduler.config, **sampler_extra_config})
else:
msg = (
f" Unsupported Sampler: {self.sampler_name} "+

View File

@ -72,17 +72,18 @@ class InvokeAIGeneratorOutput:
# old code that calls Generate will continue to work.
class InvokeAIGenerator(metaclass=ABCMeta):
scheduler_map = dict(
ddim=diffusers.DDIMScheduler,
dpmpp_2=diffusers.DPMSolverMultistepScheduler,
k_dpm_2=diffusers.KDPM2DiscreteScheduler,
k_dpm_2_a=diffusers.KDPM2AncestralDiscreteScheduler,
k_dpmpp_2=diffusers.DPMSolverMultistepScheduler,
k_euler=diffusers.EulerDiscreteScheduler,
k_euler_a=diffusers.EulerAncestralDiscreteScheduler,
k_heun=diffusers.HeunDiscreteScheduler,
k_lms=diffusers.LMSDiscreteScheduler,
plms=diffusers.PNDMScheduler,
)
ddim=(diffusers.DDIMScheduler, dict()),
dpmpp_2=(diffusers.DPMSolverMultistepScheduler, dict()),
k_dpm_2=(diffusers.KDPM2DiscreteScheduler, dict()),
k_dpm_2_a=(diffusers.KDPM2AncestralDiscreteScheduler, dict()),
k_dpmpp_2=(diffusers.DPMSolverMultistepScheduler, dict()),
k_euler=(diffusers.EulerDiscreteScheduler, dict()),
k_euler_a=(diffusers.EulerAncestralDiscreteScheduler, dict()),
k_heun=(diffusers.HeunDiscreteScheduler, dict()),
k_lms=(diffusers.LMSDiscreteScheduler, dict()),
plms=(diffusers.PNDMScheduler, dict()),
unipc=(diffusers.UniPCMultistepScheduler, dict(cpu_only=True))
)
def __init__(self,
model_info: dict,
@ -181,8 +182,9 @@ class InvokeAIGenerator(metaclass=ABCMeta):
return generator_class(model, self.params.precision)
def get_scheduler(self, scheduler_name:str, model: StableDiffusionGeneratorPipeline)->Scheduler:
scheduler_class = self.scheduler_map.get(scheduler_name,'ddim')
scheduler = scheduler_class.from_config(model.scheduler.config)
scheduler_class, scheduler_extra_config = self.scheduler_map.get(scheduler_name,'ddim')
scheduler_config = {**model.scheduler.config, **scheduler_extra_config}
scheduler = scheduler_class.from_config(scheduler_config)
# hack copied over from generate.py
if not hasattr(scheduler, 'uses_inpainting_model'):
scheduler.uses_inpainting_model = lambda: False

View File

@ -47,6 +47,7 @@ from diffusers import (
LDMTextToImagePipeline,
LMSDiscreteScheduler,
PNDMScheduler,
UniPCMultistepScheduler,
StableDiffusionPipeline,
UNet2DConditionModel,
)
@ -1209,6 +1210,8 @@ def load_pipeline_from_original_stable_diffusion_ckpt(
scheduler = EulerAncestralDiscreteScheduler.from_config(scheduler.config)
elif scheduler_type == "dpm":
scheduler = DPMSolverMultistepScheduler.from_config(scheduler.config)
elif scheduler_type == 'unipc':
scheduler = UniPCMultistepScheduler.from_config(scheduler.config)
elif scheduler_type == "ddim":
scheduler = scheduler
else:

View File

@ -509,10 +509,13 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
run_id=None,
callback: Callable[[PipelineIntermediateState], None] = None,
) -> tuple[torch.Tensor, Optional[AttentionMapSaver]]:
if self.scheduler.config.get("cpu_only", False):
scheduler_device = torch.device('cpu')
else:
scheduler_device = self._model_group.device_for(self.unet)
if timesteps is None:
self.scheduler.set_timesteps(
num_inference_steps, device=self._model_group.device_for(self.unet)
)
self.scheduler.set_timesteps(num_inference_steps, device=scheduler_device)
timesteps = self.scheduler.timesteps
infer_latents_from_embeddings = GeneratorToCallbackinator(
self.generate_latents_from_embeddings, PipelineIntermediateState
@ -725,12 +728,8 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
noise: torch.Tensor,
run_id=None,
callback=None,
) -> InvokeAIStableDiffusionPipelineOutput:
timesteps, _ = self.get_img2img_timesteps(
num_inference_steps,
strength,
device=self._model_group.device_for(self.unet),
)
) -> InvokeAIStableDiffusionPipelineOutput:
timesteps, _ = self.get_img2img_timesteps(num_inference_steps, strength)
result_latents, result_attention_maps = self.latents_from_embeddings(
latents=initial_latents if strength < 1.0 else torch.zeros_like(
initial_latents, device=initial_latents.device, dtype=initial_latents.dtype
@ -756,13 +755,19 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
return self.check_for_safety(output, dtype=conditioning_data.dtype)
def get_img2img_timesteps(
self, num_inference_steps: int, strength: float, device
self, num_inference_steps: int, strength: float, device=None
) -> (torch.Tensor, int):
img2img_pipeline = StableDiffusionImg2ImgPipeline(**self.components)
assert img2img_pipeline.scheduler is self.scheduler
img2img_pipeline.scheduler.set_timesteps(num_inference_steps, device=device)
if self.scheduler.config.get("cpu_only", False):
scheduler_device = torch.device('cpu')
else:
scheduler_device = self._model_group.device_for(self.unet)
img2img_pipeline.scheduler.set_timesteps(num_inference_steps, device=scheduler_device)
timesteps, adjusted_steps = img2img_pipeline.get_timesteps(
num_inference_steps, strength, device=device
num_inference_steps, strength, device=scheduler_device
)
# Workaround for low strength resulting in zero timesteps.
# TODO: submit upstream fix for zero-step img2img
@ -796,9 +801,7 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
if init_image.dim() == 3:
init_image = init_image.unsqueeze(0)
timesteps, _ = self.get_img2img_timesteps(
num_inference_steps, strength, device=device
)
timesteps, _ = self.get_img2img_timesteps(num_inference_steps, strength)
# 6. Prepare latent variables
# can't quite use upstream StableDiffusionImg2ImgPipeline.prepare_latents

View File

@ -15,6 +15,7 @@ SAMPLER_CHOICES = [
"plms",
# diffusers:
"pndm",
"unipc"
]

View File

@ -11,6 +11,7 @@ export const DIFFUSERS_SCHEDULERS: Array<string> = [
'k_euler',
'k_euler_a',
'k_heun',
'unipc',
];
// Valid image widths