Added smoothstep interpolation to checkpoint merging

This commit is contained in:
William Moorehouse
2022-09-26 10:50:21 -04:00
parent 91643f651d
commit dc11748dea
2 changed files with 21 additions and 4 deletions

View File

@@ -139,16 +139,31 @@ def run_pnginfo(image):
return '', geninfo, info
def run_modelmerger(modelname_0, modelname_1, alpha):
def run_modelmerger(modelname_0, modelname_1, interp_method, interp_amount):
# Linear interpolation (https://en.wikipedia.org/wiki/Linear_interpolation)
def weighted_sum(theta0, theta1, alpha):
return ((1 - alpha) * theta0) + (alpha * theta1)
# Smoothstep (https://en.wikipedia.org/wiki/Smoothstep)
def sigmoid(theta0, theta1, alpha):
alpha = alpha * alpha * (3 - (2 * alpha))
return theta0 + ((theta1 - theta0) * alpha)
model_0 = torch.load('models/' + modelname_0 + '.ckpt')
model_1 = torch.load('models/' + modelname_1 + '.ckpt')
theta_0 = model_0['state_dict']
theta_1 = model_1['state_dict']
theta_func = weighted_sum
if interp_method == "Weighted Sum":
theta_func = weighted_sum
if interp_method == "Sigmoid":
theta_func = sigmoid
for key in theta_0.keys():
if 'model' in key and key in theta_1:
theta_0[key] = (1 - alpha) * theta_0[key] + alpha * theta_1[key]
theta_0[key] = theta_func(theta_0[key], theta_1[key], interp_amount)
for key in theta_1.keys():
if 'model' in key and key not in theta_0: