add an option to unload models during hypernetwork training to save VRAM

This commit is contained in:
AUTOMATIC
2022-10-11 19:03:08 +03:00
parent 6d09b8d1df
commit d4ea5f4d86
5 changed files with 46 additions and 18 deletions

View File

@@ -5,7 +5,7 @@ import gradio as gr
import modules.textual_inversion.textual_inversion
import modules.textual_inversion.preprocess
from modules import sd_hijack, shared
from modules import sd_hijack, shared, devices
from modules.hypernetworks import hypernetwork
@@ -41,5 +41,7 @@ Hypernetwork saved to {html.escape(filename)}
raise
finally:
shared.loaded_hypernetwork = initial_hypernetwork
shared.sd_model.cond_stage_model.to(devices.device)
shared.sd_model.first_stage_model.to(devices.device)
sd_hijack.apply_optimizations()