revert default cross attention optimization to Doggettx

make --disable-opt-split-attention command line option work again
This commit is contained in:
AUTOMATIC
2023-06-01 08:12:06 +03:00
parent f1533de982
commit 36888092af
3 changed files with 6 additions and 4 deletions

View File

@@ -57,7 +57,7 @@ class SdOptimizationSdpNoMem(SdOptimization):
name = "sdp-no-mem"
label = "scaled dot product without memory efficient attention"
cmd_opt = "opt_sdp_no_mem_attention"
priority = 90
priority = 80
def is_available(self):
return hasattr(torch.nn.functional, "scaled_dot_product_attention") and callable(torch.nn.functional.scaled_dot_product_attention)
@@ -71,7 +71,7 @@ class SdOptimizationSdp(SdOptimizationSdpNoMem):
name = "sdp"
label = "scaled dot product"
cmd_opt = "opt_sdp_attention"
priority = 80
priority = 70
def apply(self):
ldm.modules.attention.CrossAttention.forward = scaled_dot_product_attention_forward
@@ -114,7 +114,7 @@ class SdOptimizationInvokeAI(SdOptimization):
class SdOptimizationDoggettx(SdOptimization):
name = "Doggettx"
cmd_opt = "opt_split_attention"
priority = 20
priority = 90
def apply(self):
ldm.modules.attention.CrossAttention.forward = split_cross_attention_forward