cross-attention working with placeholder {} syntax

This commit is contained in:
Damian at mba
2022-10-17 21:15:03 +02:00
parent 8ff507b03b
commit 1fc1f8bf05
8 changed files with 534 additions and 237 deletions

View File

@ -400,7 +400,7 @@ class Generate:
mask_image = None
try:
uc, c, ec = get_uc_and_c_and_ec(
uc, c, ec, ec_index_map = get_uc_and_c_and_ec(
prompt, model =self.model,
skip_normalize=skip_normalize,
log_tokens =self.log_tokenization
@ -438,7 +438,7 @@ class Generate:
sampler=self.sampler,
steps=steps,
cfg_scale=cfg_scale,
conditioning=(uc, c, ec),
conditioning=(uc, c, ec, ec_index_map),
ddim_eta=ddim_eta,
image_callback=image_callback, # called after the final image is generated
step_callback=step_callback, # called after each intermediate image is generated