textual inversion support for SDXL

This commit is contained in:
AUTOMATIC1111
2023-07-29 15:15:06 +03:00
parent 4ca9f70b59
commit 6f0abbb71a
4 changed files with 29 additions and 9 deletions

View File

@@ -56,6 +56,14 @@ def encode_embedding_init_text(self: sgm.modules.GeneralConditioner, init_text,
return torch.cat(res, dim=1)
def tokenize(self: sgm.modules.GeneralConditioner, texts):
for embedder in [embedder for embedder in self.embedders if hasattr(embedder, 'tokenize')]:
return embedder.tokenize(texts)
raise AssertionError('no tokenizer available')
def process_texts(self, texts):
for embedder in [embedder for embedder in self.embedders if hasattr(embedder, 'process_texts')]:
return embedder.process_texts(texts)
@@ -68,6 +76,7 @@ def get_target_prompt_token_count(self, token_count):
# those additions to GeneralConditioner make it possible to use it as model.cond_stage_model from SD1.5 in exist
sgm.modules.GeneralConditioner.encode_embedding_init_text = encode_embedding_init_text
sgm.modules.GeneralConditioner.tokenize = tokenize
sgm.modules.GeneralConditioner.process_texts = process_texts
sgm.modules.GeneralConditioner.get_target_prompt_token_count = get_target_prompt_token_count