fix step count on ddim

This commit is contained in:
Damian at mba 2022-10-24 01:23:43 +02:00
parent 2619a0b286
commit ee4273d760
2 changed files with 3 additions and 2 deletions

View File

@ -359,6 +359,7 @@ class Sampler(object):
unconditional_guidance_scale=unconditional_guidance_scale,
unconditional_conditioning=unconditional_conditioning,
t_next = ts_next,
step_count=total_steps
)
x_dec, pred_x0, e_t = outs

View File

@ -86,14 +86,14 @@ class InvokeAIDiffuserComponent:
cross_attention_control_types_to_do = CrossAttentionControl.get_active_cross_attention_control_types_for_step(self.cross_attention_control_context, percent_through)
if len(cross_attention_control_types_to_do)==0:
#print('step', step_index, ': not doing cross attention control')
print('pct', percent_through, ': not doing cross attention control')
# faster batched path
x_twice = torch.cat([x]*2)
sigma_twice = torch.cat([sigma]*2)
both_conditionings = torch.cat([unconditioning, conditioning])
unconditioned_next_x, conditioned_next_x = self.model_forward_callback(x_twice, sigma_twice, both_conditionings).chunk(2)
else:
#print('step', step_index, ': doing cross attention control on', cross_attention_control_types_to_do)
print('pct', percent_through, ': doing cross attention control on', cross_attention_control_types_to_do)
# slower non-batched path (20% slower on mac MPS)
# We are only interested in using attention maps for conditioned_next_x, but batching them with generation of
# unconditioned_next_x causes attention maps to *also* be saved for the unconditioned_next_x.