From 8562dbaaa8fa1e595cd3e83c05ff07c9b76393f4 Mon Sep 17 00:00:00 2001 From: Sergey Borisov Date: Wed, 30 Aug 2023 02:18:08 +0300 Subject: [PATCH 1/2] Hotfix to make second order schedulers work with mask --- .../backend/stable_diffusion/diffusers_pipeline.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/invokeai/backend/stable_diffusion/diffusers_pipeline.py b/invokeai/backend/stable_diffusion/diffusers_pipeline.py index 2d1894c896..d88313f455 100644 --- a/invokeai/backend/stable_diffusion/diffusers_pipeline.py +++ b/invokeai/backend/stable_diffusion/diffusers_pipeline.py @@ -558,12 +558,22 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline): # compute the previous noisy sample x_t -> x_t-1 step_output = self.scheduler.step(noise_pred, timestep, latents, **conditioning_data.scheduler_args) + # TODO: issue to diffusers? + # undo internal counter increment done by scheduler.step, so timestep can be resolved as before call + # this needed to be able call scheduler.add_noise with current timestep + if self.scheduler.order == 2: + self.scheduler._index_counter[timestep.item()] -= 1 + # TODO: this additional_guidance extension point feels redundant with InvokeAIDiffusionComponent. # But the way things are now, scheduler runs _after_ that, so there was # no way to use it to apply an operation that happens after the last scheduler.step. for guidance in additional_guidance: step_output = guidance(step_output, timestep, conditioning_data) + # restore internal counter + if self.scheduler.order == 2: + self.scheduler._index_counter[timestep.item()] += 1 + return step_output def _unet_forward( From ca15b8b33e8b5f64416a79a2e5ac0b459dbf1c0e Mon Sep 17 00:00:00 2001 From: Sergey Borisov Date: Wed, 30 Aug 2023 03:40:59 +0300 Subject: [PATCH 2/2] Fix wrong timestep selection in some cases(dpmpp_sde) --- invokeai/app/invocations/latent.py | 37 +++++++++++++----------------- 1 file changed, 16 insertions(+), 21 deletions(-) diff --git a/invokeai/app/invocations/latent.py b/invokeai/app/invocations/latent.py index 80988f3c71..9cca0bd744 100644 --- a/invokeai/app/invocations/latent.py +++ b/invokeai/app/invocations/latent.py @@ -367,36 +367,31 @@ class DenoiseLatentsInvocation(BaseInvocation): # original idea by https://github.com/AmericanPresidentJimmyCarter # TODO: research more for second order schedulers timesteps def init_scheduler(self, scheduler, device, steps, denoising_start, denoising_end): - num_inference_steps = steps if scheduler.config.get("cpu_only", False): - scheduler.set_timesteps(num_inference_steps, device="cpu") + scheduler.set_timesteps(steps, device="cpu") timesteps = scheduler.timesteps.to(device=device) else: - scheduler.set_timesteps(num_inference_steps, device=device) + scheduler.set_timesteps(steps, device=device) timesteps = scheduler.timesteps - # apply denoising_start + # skip greater order timesteps + _timesteps = timesteps[:: scheduler.order] + + # get start timestep index t_start_val = int(round(scheduler.config.num_train_timesteps * (1 - denoising_start))) - t_start_idx = len(list(filter(lambda ts: ts >= t_start_val, timesteps))) - timesteps = timesteps[t_start_idx:] - if scheduler.order == 2 and t_start_idx > 0: - timesteps = timesteps[1:] + t_start_idx = len(list(filter(lambda ts: ts >= t_start_val, _timesteps))) - # save start timestep to apply noise - init_timestep = timesteps[:1] - - # apply denoising_end + # get end timestep index t_end_val = int(round(scheduler.config.num_train_timesteps * (1 - denoising_end))) - t_end_idx = len(list(filter(lambda ts: ts >= t_end_val, timesteps))) - if scheduler.order == 2 and t_end_idx > 0: - t_end_idx += 1 - timesteps = timesteps[:t_end_idx] + t_end_idx = len(list(filter(lambda ts: ts >= t_end_val, _timesteps[t_start_idx:]))) - # calculate step count based on scheduler order - num_inference_steps = len(timesteps) - if scheduler.order == 2: - num_inference_steps += num_inference_steps % 2 - num_inference_steps = num_inference_steps // 2 + # apply order to indexes + t_start_idx *= scheduler.order + t_end_idx *= scheduler.order + + init_timestep = timesteps[t_start_idx : t_start_idx + 1] + timesteps = timesteps[t_start_idx : t_start_idx + t_end_idx] + num_inference_steps = len(timesteps) // scheduler.order return num_inference_steps, timesteps, init_timestep