--opt-split-attention now on by default for torch.cuda, off for others (cpu and MPS; because the option does not work there according to reports)

This commit is contained in:
AUTOMATIC
2022-09-21 09:49:02 +03:00
parent 6785fabefb
commit 254da5d127
2 changed files with 3 additions and 6 deletions

View File

@@ -245,7 +245,7 @@ class StableDiffusionModelHijack:
if cmd_opts.opt_split_attention_v1:
ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward_v1
elif not cmd_opts.disable_opt_split_attention:
elif not cmd_opts.disable_opt_split_attention and (cmd_opts.opt_split_attention or torch.cuda.is_available()):
ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward
ldm.modules.diffusionmodules.model.nonlinearity = nonlinearity_hijack
ldm.modules.diffusionmodules.model.AttnBlock.forward = cross_attention_attnblock_forward