fix(config): drop usage of deprecated config.xformers, just use the existing utility function

This commit is contained in:
psychedelicious 2024-03-11 22:56:52 +11:00
parent b4182b190f
commit a72cea014c

View File

@ -275,7 +275,7 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
# the remainder if this code is called when attention_type=='auto' # the remainder if this code is called when attention_type=='auto'
if self.unet.device.type == "cuda": if self.unet.device.type == "cuda":
if is_xformers_available() and not config.disable_xformers: if is_xformers_available():
self.enable_xformers_memory_efficient_attention() self.enable_xformers_memory_efficient_attention()
return return
elif hasattr(torch.nn.functional, "scaled_dot_product_attention"): elif hasattr(torch.nn.functional, "scaled_dot_product_attention"):