Merge branch 'diffusers_cross_attention_control_reimplementation' of github.com:damian0815/InvokeAI into diffusers_cross_attention_control_reimplementation

This commit is contained in:
Damian Stewart
2023-01-30 14:51:06 +01:00
48 changed files with 1683 additions and 1296 deletions

View File

@ -641,7 +641,7 @@ class SlicedSwapCrossAttnProcesser(SlicedAttnProcessor):
del remapped_original_attn_slice, modified_attn_slice
attn_slice = torch.bmm(attn_slice, original_value[start_idx:end_idx])
attn_slice = torch.bmm(attn_slice, modified_value[start_idx:end_idx])
hidden_states[start_idx:end_idx] = attn_slice

View File

@ -60,17 +60,17 @@ class InvokeAIDiffuserComponent:
@contextmanager
def custom_attention_context(self,
extra_conditioning_info: Optional[ExtraConditioningInfo],
step_count: int,
do_attention_map_saving: bool):
step_count: int):
do_swap = extra_conditioning_info is not None and extra_conditioning_info.wants_cross_attention_control
old_attn_processor = None
if do_swap:
old_attn_processor = self.setup_cross_attention_control(extra_conditioning_info,
step_count=step_count)
step_count=step_count)
try:
yield None
finally:
self.remove_cross_attention_control(old_attn_processor)
if old_attn_processor is not None:
self.remove_cross_attention_control(old_attn_processor)
# TODO resuscitate attention map saving
#self.remove_attention_map_saving()