delete xformers attnblock

This commit is contained in:
C43H66N12O12S2 2022-10-17 19:49:11 +03:00 committed by AUTOMATIC1111
parent 786ed49922
commit 2043c4a231

View File

@ -292,15 +292,3 @@ def cross_attention_attnblock_forward(self, x):
return h3
def xformers_attnblock_forward(self, x):
try:
h_ = x
h_ = self.norm(h_)
q1 = self.q(h_).contiguous()
k1 = self.k(h_).contiguous()
v = self.v(h_).contiguous()
out = xformers.ops.memory_efficient_attention(q1, k1, v)
out = self.proj_out(out)
return x + out
except NotImplementedError:
return cross_attention_attnblock_forward(self, x)