mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Added check in IP-Adapter to avoid begin/end step percent handling if use of IP-Adapter is already turned off due to potential clash with other cross attention control.
This commit is contained in:
parent
ced297ed21
commit
7ee13879e3
@ -253,6 +253,7 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
|
|||||||
)
|
)
|
||||||
self.invokeai_diffuser = InvokeAIDiffuserComponent(self.unet, self._unet_forward)
|
self.invokeai_diffuser = InvokeAIDiffuserComponent(self.unet, self._unet_forward)
|
||||||
self.control_model = control_model
|
self.control_model = control_model
|
||||||
|
self.use_ip_adapter = False
|
||||||
|
|
||||||
def _adjust_memory_efficient_attention(self, latents: torch.Tensor):
|
def _adjust_memory_efficient_attention(self, latents: torch.Tensor):
|
||||||
"""
|
"""
|
||||||
@ -416,12 +417,14 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
|
|||||||
extra_conditioning_info=conditioning_data.extra,
|
extra_conditioning_info=conditioning_data.extra,
|
||||||
step_count=len(self.scheduler.timesteps),
|
step_count=len(self.scheduler.timesteps),
|
||||||
)
|
)
|
||||||
|
self.use_ip_adapter = False
|
||||||
elif ip_adapter_data is not None:
|
elif ip_adapter_data is not None:
|
||||||
# TODO(ryand): Should we raise an exception if both custom attention and IP-Adapter attention are active?
|
# TODO(ryand): Should we raise an exception if both custom attention and IP-Adapter attention are active?
|
||||||
# As it is now, the IP-Adapter will silently be skipped.
|
# As it is now, the IP-Adapter will silently be skipped.
|
||||||
attn_ctx = ip_adapter_data.ip_adapter_model.apply_ip_adapter_attention(
|
attn_ctx = ip_adapter_data.ip_adapter_model.apply_ip_adapter_attention(
|
||||||
unet=self.invokeai_diffuser.model, scale=ip_adapter_data.weight
|
unet=self.invokeai_diffuser.model, scale=ip_adapter_data.weight
|
||||||
)
|
)
|
||||||
|
self.use_ip_adapter = True
|
||||||
else:
|
else:
|
||||||
attn_ctx = nullcontext()
|
attn_ctx = nullcontext()
|
||||||
|
|
||||||
@ -506,7 +509,7 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
|
|||||||
latent_model_input = self.scheduler.scale_model_input(latents, timestep)
|
latent_model_input = self.scheduler.scale_model_input(latents, timestep)
|
||||||
|
|
||||||
# handle IP-Adapter
|
# handle IP-Adapter
|
||||||
if ip_adapter_data is not None:
|
if self.use_ip_adapter and ip_adapter_data is not None: # somewhat redundant but logic is clearer
|
||||||
first_adapter_step = math.floor(ip_adapter_data.begin_step_percent * total_step_count)
|
first_adapter_step = math.floor(ip_adapter_data.begin_step_percent * total_step_count)
|
||||||
last_adapter_step = math.ceil(ip_adapter_data.end_step_percent * total_step_count)
|
last_adapter_step = math.ceil(ip_adapter_data.end_step_percent * total_step_count)
|
||||||
if step_index >= first_adapter_step and step_index <= last_adapter_step:
|
if step_index >= first_adapter_step and step_index <= last_adapter_step:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user