Add extra norm module into built-in lora ext

refer to LyCORIS 1.9.0.dev6
add new option and module for training norm layer
(Which is reported to be good for style)
This commit is contained in:
Kohaku-Blueleaf
2023-08-13 02:27:39 +08:00
parent b2080756fc
commit bd4da4474b
4 changed files with 105 additions and 11 deletions

View File

@@ -133,7 +133,7 @@ class NetworkModule:
return 1.0
def finalize_updown(self, updown, orig_weight, output_shape):
def finalize_updown(self, updown, orig_weight, output_shape, ex_bias=None):
if self.bias is not None:
updown = updown.reshape(self.bias.shape)
updown += self.bias.to(orig_weight.device, dtype=orig_weight.dtype)
@@ -145,7 +145,10 @@ class NetworkModule:
if orig_weight.size().numel() == updown.size().numel():
updown = updown.reshape(orig_weight.shape)
return updown * self.calc_scale() * self.multiplier()
if ex_bias is None:
ex_bias = 0
return updown * self.calc_scale() * self.multiplier(), ex_bias * self.multiplier()
def calc_updown(self, target):
raise NotImplementedError()