disable xformers if cuda not available

This commit is contained in:
Lincoln Stein 2023-02-16 22:20:30 -05:00
parent d69156c616
commit 65a7432b5a
2 changed files with 2 additions and 2 deletions

View File

@ -205,7 +205,7 @@ class Generate:
Globals.full_precision = self.precision=='float32'
if is_xformers_available():
if not Globals.disable_xformers:
if torch.cuda.is_available() and not Globals.disable_xformers:
print('>> xformers memory-efficient attention is available and enabled')
else:
print('>> xformers memory-efficient attention is available but disabled')

View File

@ -308,7 +308,7 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
"""
if xformers is available, use it, otherwise use sliced attention.
"""
if is_xformers_available() and not Globals.disable_xformers:
if torch.cuda.is_available() and is_xformers_available() and not Globals.disable_xformers:
self.enable_xformers_memory_efficient_attention()
else:
if torch.backends.mps.is_available():