use_checkpoint = False

This commit is contained in:
huchenlei
2024-05-15 15:20:40 -04:00
parent 1c0a0c4c26
commit 022d835565
8 changed files with 13 additions and 10 deletions

View File

@@ -4,16 +4,19 @@ import ldm.modules.attention
import ldm.modules.diffusionmodules.openaimodel
# Setting flag=False so that torch skips checking parameters.
# parameters checking is expensive in frequent operations.
def BasicTransformerBlock_forward(self, x, context=None):
return checkpoint(self._forward, x, context)
return checkpoint(self._forward, x, context, flag=False)
def AttentionBlock_forward(self, x):
return checkpoint(self._forward, x)
return checkpoint(self._forward, x, flag=False)
def ResBlock_forward(self, x, emb):
return checkpoint(self._forward, x, emb)
return checkpoint(self._forward, x, emb, flag=False)
stored = []