From 28b74523d036c9b7bd9c600733654c33f644da8e Mon Sep 17 00:00:00 2001 From: psychedelicious <4822129+psychedelicious@users.noreply.github.com> Date: Mon, 1 Jan 2024 20:56:43 +1100 Subject: [PATCH] fix(ui): fix dynamic prompts with single prompt Closes #5292 The special handling for single prompt is totally extraneous and caused a bug. --- .../util/graph/buildLinearBatchConfig.ts | 193 +++++++----------- 1 file changed, 77 insertions(+), 116 deletions(-) diff --git a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearBatchConfig.ts b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearBatchConfig.ts index 02b5fbf6b4..19d23ee372 100644 --- a/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearBatchConfig.ts +++ b/invokeai/frontend/web/src/features/nodes/util/graph/buildLinearBatchConfig.ts @@ -23,27 +23,28 @@ export const prepareLinearUIBatch = ( const { prompts, seedBehaviour } = state.dynamicPrompts; const data: Batch['data'] = []; + const firstBatchDatumList: components['schemas']['BatchDatum'][] = []; + const secondBatchDatumList: components['schemas']['BatchDatum'][] = []; - if (prompts.length === 1) { + // add seeds first to ensure the output order groups the prompts + if (seedBehaviour === 'PER_PROMPT') { const seeds = generateSeeds({ - count: iterations, + count: prompts.length * iterations, start: shouldRandomizeSeed ? undefined : seed, }); - const zipped: components['schemas']['BatchDatum'][] = []; - if (graph.nodes[NOISE]) { - zipped.push({ + firstBatchDatumList.push({ node_path: NOISE, field_name: 'seed', items: seeds, }); } + // add to metadata if (getHasMetadata(graph)) { - // add to metadata removeMetadata(graph, 'seed'); - zipped.push({ + firstBatchDatumList.push({ node_path: METADATA, field_name: 'seed', items: seeds, @@ -51,137 +52,97 @@ export const prepareLinearUIBatch = ( } if (graph.nodes[CANVAS_COHERENCE_NOISE]) { - zipped.push({ + firstBatchDatumList.push({ node_path: CANVAS_COHERENCE_NOISE, field_name: 'seed', items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX), }); } - - data.push(zipped); } else { - // prompts.length > 1 aka dynamic prompts - const firstBatchDatumList: components['schemas']['BatchDatum'][] = []; - const secondBatchDatumList: components['schemas']['BatchDatum'][] = []; + // seedBehaviour = SeedBehaviour.PerRun + const seeds = generateSeeds({ + count: iterations, + start: shouldRandomizeSeed ? undefined : seed, + }); - // add seeds first to ensure the output order groups the prompts - if (seedBehaviour === 'PER_PROMPT') { - const seeds = generateSeeds({ - count: prompts.length * iterations, - start: shouldRandomizeSeed ? undefined : seed, - }); - - if (graph.nodes[NOISE]) { - firstBatchDatumList.push({ - node_path: NOISE, - field_name: 'seed', - items: seeds, - }); - } - - // add to metadata - if (getHasMetadata(graph)) { - removeMetadata(graph, 'seed'); - firstBatchDatumList.push({ - node_path: METADATA, - field_name: 'seed', - items: seeds, - }); - } - - if (graph.nodes[CANVAS_COHERENCE_NOISE]) { - firstBatchDatumList.push({ - node_path: CANVAS_COHERENCE_NOISE, - field_name: 'seed', - items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX), - }); - } - } else { - // seedBehaviour = SeedBehaviour.PerRun - const seeds = generateSeeds({ - count: iterations, - start: shouldRandomizeSeed ? undefined : seed, - }); - - if (graph.nodes[NOISE]) { - secondBatchDatumList.push({ - node_path: NOISE, - field_name: 'seed', - items: seeds, - }); - } - - // add to metadata - if (getHasMetadata(graph)) { - removeMetadata(graph, 'seed'); - secondBatchDatumList.push({ - node_path: METADATA, - field_name: 'seed', - items: seeds, - }); - } - - if (graph.nodes[CANVAS_COHERENCE_NOISE]) { - secondBatchDatumList.push({ - node_path: CANVAS_COHERENCE_NOISE, - field_name: 'seed', - items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX), - }); - } - data.push(secondBatchDatumList); - } - - const extendedPrompts = - seedBehaviour === 'PER_PROMPT' - ? range(iterations).flatMap(() => prompts) - : prompts; - - // zipped batch of prompts - if (graph.nodes[POSITIVE_CONDITIONING]) { - firstBatchDatumList.push({ - node_path: POSITIVE_CONDITIONING, - field_name: 'prompt', - items: extendedPrompts, + if (graph.nodes[NOISE]) { + secondBatchDatumList.push({ + node_path: NOISE, + field_name: 'seed', + items: seeds, }); } // add to metadata if (getHasMetadata(graph)) { - removeMetadata(graph, 'positive_prompt'); - firstBatchDatumList.push({ + removeMetadata(graph, 'seed'); + secondBatchDatumList.push({ node_path: METADATA, - field_name: 'positive_prompt', - items: extendedPrompts, + field_name: 'seed', + items: seeds, }); } - if (shouldConcatSDXLStylePrompt && model?.base_model === 'sdxl') { - const stylePrompts = extendedPrompts.map((p) => - [p, positiveStylePrompt].join(' ') - ); + if (graph.nodes[CANVAS_COHERENCE_NOISE]) { + secondBatchDatumList.push({ + node_path: CANVAS_COHERENCE_NOISE, + field_name: 'seed', + items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX), + }); + } + data.push(secondBatchDatumList); + } - if (graph.nodes[POSITIVE_CONDITIONING]) { - firstBatchDatumList.push({ - node_path: POSITIVE_CONDITIONING, - field_name: 'style', - items: stylePrompts, - }); - } + const extendedPrompts = + seedBehaviour === 'PER_PROMPT' + ? range(iterations).flatMap(() => prompts) + : prompts; - // add to metadata - if (getHasMetadata(graph)) { - removeMetadata(graph, 'positive_style_prompt'); - firstBatchDatumList.push({ - node_path: METADATA, - field_name: 'positive_style_prompt', - items: extendedPrompts, - }); - } + // zipped batch of prompts + if (graph.nodes[POSITIVE_CONDITIONING]) { + firstBatchDatumList.push({ + node_path: POSITIVE_CONDITIONING, + field_name: 'prompt', + items: extendedPrompts, + }); + } + + // add to metadata + if (getHasMetadata(graph)) { + removeMetadata(graph, 'positive_prompt'); + firstBatchDatumList.push({ + node_path: METADATA, + field_name: 'positive_prompt', + items: extendedPrompts, + }); + } + + if (shouldConcatSDXLStylePrompt && model?.base_model === 'sdxl') { + const stylePrompts = extendedPrompts.map((p) => + [p, positiveStylePrompt].join(' ') + ); + + if (graph.nodes[POSITIVE_CONDITIONING]) { + firstBatchDatumList.push({ + node_path: POSITIVE_CONDITIONING, + field_name: 'style', + items: stylePrompts, + }); } - data.push(firstBatchDatumList); + // add to metadata + if (getHasMetadata(graph)) { + removeMetadata(graph, 'positive_style_prompt'); + firstBatchDatumList.push({ + node_path: METADATA, + field_name: 'positive_style_prompt', + items: extendedPrompts, + }); + } } + data.push(firstBatchDatumList); + const enqueueBatchArg: BatchConfig = { prepend, batch: {