From 603509ec905a9c9ac1011e9531a9da180828fcc0 Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Sat, 8 Jun 2024 10:54:41 +0300 Subject: [PATCH] as per wfjsw's suggestion, revert changes for sd_hijack_checkpoint.py --- modules/sd_hijack_checkpoint.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/modules/sd_hijack_checkpoint.py b/modules/sd_hijack_checkpoint.py index b2f05bbdc..2604d969f 100644 --- a/modules/sd_hijack_checkpoint.py +++ b/modules/sd_hijack_checkpoint.py @@ -4,19 +4,16 @@ import ldm.modules.attention import ldm.modules.diffusionmodules.openaimodel -# Setting flag=False so that torch skips checking parameters. -# parameters checking is expensive in frequent operations. - def BasicTransformerBlock_forward(self, x, context=None): - return checkpoint(self._forward, x, context, flag=False) + return checkpoint(self._forward, x, context) def AttentionBlock_forward(self, x): - return checkpoint(self._forward, x, flag=False) + return checkpoint(self._forward, x) def ResBlock_forward(self, x, emb): - return checkpoint(self._forward, x, emb, flag=False) + return checkpoint(self._forward, x, emb) stored = []