Skip to content

Commit e29dc97

Browse files
make style
1 parent 8e4733b commit e29dc97

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

src/diffusers/models/attention.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -297,8 +297,8 @@ def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_atten
297297
)
298298
elif not torch.cuda.is_available():
299299
raise ValueError(
300-
"torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is only"
301-
" available for GPU "
300+
"torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is"
301+
" only available for GPU "
302302
)
303303
else:
304304
try:
@@ -461,8 +461,8 @@ def set_use_memory_efficient_attention_xformers(self, use_memory_efficient_atten
461461
)
462462
elif not torch.cuda.is_available():
463463
raise ValueError(
464-
"torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is only"
465-
" available for GPU "
464+
"torch.cuda.is_available() should be True but is False. xformers' memory efficient attention is"
465+
" only available for GPU "
466466
)
467467
else:
468468
try:

0 commit comments

Comments
 (0)