option to pad prompt/neg prompt to be same length

This commit is contained in:
AUTOMATIC
2023-05-22 00:13:53 +03:00
parent 8faac8b963
commit 3366e494a1
3 changed files with 16 additions and 0 deletions

View File

@@ -508,6 +508,11 @@ def load_model(checkpoint_info=None, already_loaded_state_dict=None):
timer.record("scripts callbacks")
with devices.autocast(), torch.no_grad():
sd_model.cond_stage_model_empty_prompt = sd_model.cond_stage_model([""])
timer.record("calculate empty prompt")
print(f"Model loaded in {timer.summary()}.")
return sd_model