Checkpoint cache by combination key of checkpoint and vae

This commit is contained in:
Muhammad Rizqi Nur
2022-10-31 15:19:34 +07:00
parent b96d0c4e9e
commit 726769da35
2 changed files with 23 additions and 12 deletions

View File

@@ -43,7 +43,7 @@ def refresh_vae_list(vae_path=vae_path, model_path=model_path):
vae_dict.update(res)
return vae_list
def load_vae(model, checkpoint_file, vae_file="auto"):
def resolve_vae(checkpoint_file, vae_file="auto"):
global first_load, vae_dict, vae_list
# save_settings = False
@@ -94,6 +94,12 @@ def load_vae(model, checkpoint_file, vae_file="auto"):
if vae_file and not os.path.exists(vae_file):
vae_file = None
return vae_file
def load_vae(model, vae_file):
global first_load, vae_dict, vae_list
# save_settings = False
if vae_file:
print(f"Loading VAE weights from: {vae_file}")
vae_ckpt = torch.load(vae_file, map_location=shared.weight_load_location)