mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
fix step count on ddim
This commit is contained in:
parent
2619a0b286
commit
ee4273d760
@ -359,6 +359,7 @@ class Sampler(object):
|
|||||||
unconditional_guidance_scale=unconditional_guidance_scale,
|
unconditional_guidance_scale=unconditional_guidance_scale,
|
||||||
unconditional_conditioning=unconditional_conditioning,
|
unconditional_conditioning=unconditional_conditioning,
|
||||||
t_next = ts_next,
|
t_next = ts_next,
|
||||||
|
step_count=total_steps
|
||||||
)
|
)
|
||||||
|
|
||||||
x_dec, pred_x0, e_t = outs
|
x_dec, pred_x0, e_t = outs
|
||||||
|
@ -86,14 +86,14 @@ class InvokeAIDiffuserComponent:
|
|||||||
cross_attention_control_types_to_do = CrossAttentionControl.get_active_cross_attention_control_types_for_step(self.cross_attention_control_context, percent_through)
|
cross_attention_control_types_to_do = CrossAttentionControl.get_active_cross_attention_control_types_for_step(self.cross_attention_control_context, percent_through)
|
||||||
|
|
||||||
if len(cross_attention_control_types_to_do)==0:
|
if len(cross_attention_control_types_to_do)==0:
|
||||||
#print('step', step_index, ': not doing cross attention control')
|
print('pct', percent_through, ': not doing cross attention control')
|
||||||
# faster batched path
|
# faster batched path
|
||||||
x_twice = torch.cat([x]*2)
|
x_twice = torch.cat([x]*2)
|
||||||
sigma_twice = torch.cat([sigma]*2)
|
sigma_twice = torch.cat([sigma]*2)
|
||||||
both_conditionings = torch.cat([unconditioning, conditioning])
|
both_conditionings = torch.cat([unconditioning, conditioning])
|
||||||
unconditioned_next_x, conditioned_next_x = self.model_forward_callback(x_twice, sigma_twice, both_conditionings).chunk(2)
|
unconditioned_next_x, conditioned_next_x = self.model_forward_callback(x_twice, sigma_twice, both_conditionings).chunk(2)
|
||||||
else:
|
else:
|
||||||
#print('step', step_index, ': doing cross attention control on', cross_attention_control_types_to_do)
|
print('pct', percent_through, ': doing cross attention control on', cross_attention_control_types_to_do)
|
||||||
# slower non-batched path (20% slower on mac MPS)
|
# slower non-batched path (20% slower on mac MPS)
|
||||||
# We are only interested in using attention maps for conditioned_next_x, but batching them with generation of
|
# We are only interested in using attention maps for conditioned_next_x, but batching them with generation of
|
||||||
# unconditioned_next_x causes attention maps to *also* be saved for the unconditioned_next_x.
|
# unconditioned_next_x causes attention maps to *also* be saved for the unconditioned_next_x.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user