get attention optimizations to work

This commit is contained in:
AUTOMATIC1111
2023-07-13 09:30:33 +03:00
parent b717eb7e56
commit ac4ccfa136
4 changed files with 12 additions and 8 deletions

View File

@@ -378,7 +378,7 @@ def apply_hypernetworks(hypernetworks, context, layer=None):
return context_k, context_v
def attention_CrossAttention_forward(self, x, context=None, mask=None):
def attention_CrossAttention_forward(self, x, context=None, mask=None, **kwargs):
h = self.heads
q = self.to_q(x)