Revert "comment out customer_attention_context"

This reverts commit 8f8cd90787.

Due to NameError: name 'options' is not defined
This commit is contained in:
Eugene 2023-05-14 00:37:55 -04:00
parent 2623941d91
commit b72c9787a9

View File

@ -59,15 +59,12 @@ def get_uc_and_c_and_ec(prompt_string,
if log_tokens or getattr(Globals, "log_tokenization", False): if log_tokens or getattr(Globals, "log_tokenization", False):
log_tokenization(positive_prompt, negative_prompt, tokenizer=model.tokenizer) log_tokenization(positive_prompt, negative_prompt, tokenizer=model.tokenizer)
# The below has been commented out as it is an instance method used for cleanly loading LoRA models, but is not currently needed. with InvokeAIDiffuserComponent.custom_attention_context(model.unet,
# TODO: Reimplement custom_attention for 3.0 support of LoRA. extra_conditioning_info=None,
step_count=-1):
# with InvokeAIDiffuserComponent.custom_attention_context(model.unet, c, options = compel.build_conditioning_tensor_for_prompt_object(positive_prompt)
# extra_conditioning_info=None, uc, _ = compel.build_conditioning_tensor_for_prompt_object(negative_prompt)
# step_count=-1): [c, uc] = compel.pad_conditioning_tensors_to_same_length([c, uc])
# c, options = compel.build_conditioning_tensor_for_prompt_object(positive_prompt)
# uc, _ = compel.build_conditioning_tensor_for_prompt_object(negative_prompt)
# [c, uc] = compel.pad_conditioning_tensors_to_same_length([c, uc])
# now build the "real" ec # now build the "real" ec
ec = InvokeAIDiffuserComponent.ExtraConditioningInfo(tokens_count_including_eos_bos=tokens_count, ec = InvokeAIDiffuserComponent.ExtraConditioningInfo(tokens_count_including_eos_bos=tokens_count,