Remove the redundant init_timestep parameter that was being passed around. It is simply the first element of the timesteps array.

This commit is contained in:
Ryan Dick
2024-06-19 11:56:13 -04:00
committed by Kent Keirsey
parent 7cafd78d6e
commit fa40061eca
4 changed files with 11 additions and 20 deletions

View File

@ -625,7 +625,6 @@ class DenoiseLatentsInvocation(BaseInvocation):
t_start_idx *= scheduler.order
t_end_idx *= scheduler.order
init_timestep = timesteps[t_start_idx : t_start_idx + 1]
timesteps = timesteps[t_start_idx : t_start_idx + t_end_idx]
scheduler_step_kwargs: Dict[str, Any] = {}
@ -648,7 +647,7 @@ class DenoiseLatentsInvocation(BaseInvocation):
if isinstance(scheduler, TCDScheduler):
scheduler_step_kwargs.update({"eta": 1.0})
return timesteps, init_timestep, scheduler_step_kwargs
return timesteps, scheduler_step_kwargs
def prep_inpaint_mask(
self, context: InvocationContext, latents: torch.Tensor
@ -814,7 +813,7 @@ class DenoiseLatentsInvocation(BaseInvocation):
dtype=unet.dtype,
)
timesteps, init_timestep, scheduler_step_kwargs = self.init_scheduler(
timesteps, scheduler_step_kwargs = self.init_scheduler(
scheduler,
device=unet.device,
steps=self.steps,
@ -826,7 +825,6 @@ class DenoiseLatentsInvocation(BaseInvocation):
result_latents = pipeline.latents_from_embeddings(
latents=latents,
timesteps=timesteps,
init_timestep=init_timestep,
noise=noise,
seed=seed,
mask=mask,