From e0581a2c37c2ea353c8696aa884f0565d34642ce Mon Sep 17 00:00:00 2001 From: Damian Stewart Date: Mon, 21 Nov 2022 19:27:40 +0100 Subject: [PATCH] when doing --log_tokenization/-t also log parsed prompt --- ldm/invoke/conditioning.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ldm/invoke/conditioning.py b/ldm/invoke/conditioning.py index ff345f0989..54092578a1 100644 --- a/ldm/invoke/conditioning.py +++ b/ldm/invoke/conditioning.py @@ -47,6 +47,9 @@ def get_uc_and_c_and_ec(prompt_string_uncleaned, model, log_tokens=False, skip_n parsed_prompt = pp.parse_conjunction(prompt_string_cleaned).prompts[0] parsed_negative_prompt: FlattenedPrompt = pp.parse_conjunction(unconditioned_words).prompts[0] + if log_tokens: + print(f">> Parsed prompt to {parsed_prompt}") + print(f">> Parsed negative prompt to {parsed_negative_prompt}") conditioning = None cac_args:cross_attention_control.Arguments = None