fix(ui): fix dynamic prompts with single prompt

Closes #5292

The special handling for single prompt is totally extraneous and caused a bug.
This commit is contained in:
psychedelicious 2024-01-01 20:56:43 +11:00 committed by Kent Keirsey
parent 9359c03c3c
commit 28b74523d0

View File

@ -23,27 +23,28 @@ export const prepareLinearUIBatch = (
const { prompts, seedBehaviour } = state.dynamicPrompts;
const data: Batch['data'] = [];
const firstBatchDatumList: components['schemas']['BatchDatum'][] = [];
const secondBatchDatumList: components['schemas']['BatchDatum'][] = [];
if (prompts.length === 1) {
// add seeds first to ensure the output order groups the prompts
if (seedBehaviour === 'PER_PROMPT') {
const seeds = generateSeeds({
count: iterations,
count: prompts.length * iterations,
start: shouldRandomizeSeed ? undefined : seed,
});
const zipped: components['schemas']['BatchDatum'][] = [];
if (graph.nodes[NOISE]) {
zipped.push({
firstBatchDatumList.push({
node_path: NOISE,
field_name: 'seed',
items: seeds,
});
}
// add to metadata
if (getHasMetadata(graph)) {
// add to metadata
removeMetadata(graph, 'seed');
zipped.push({
firstBatchDatumList.push({
node_path: METADATA,
field_name: 'seed',
items: seeds,
@ -51,137 +52,97 @@ export const prepareLinearUIBatch = (
}
if (graph.nodes[CANVAS_COHERENCE_NOISE]) {
zipped.push({
firstBatchDatumList.push({
node_path: CANVAS_COHERENCE_NOISE,
field_name: 'seed',
items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX),
});
}
data.push(zipped);
} else {
// prompts.length > 1 aka dynamic prompts
const firstBatchDatumList: components['schemas']['BatchDatum'][] = [];
const secondBatchDatumList: components['schemas']['BatchDatum'][] = [];
// seedBehaviour = SeedBehaviour.PerRun
const seeds = generateSeeds({
count: iterations,
start: shouldRandomizeSeed ? undefined : seed,
});
// add seeds first to ensure the output order groups the prompts
if (seedBehaviour === 'PER_PROMPT') {
const seeds = generateSeeds({
count: prompts.length * iterations,
start: shouldRandomizeSeed ? undefined : seed,
});
if (graph.nodes[NOISE]) {
firstBatchDatumList.push({
node_path: NOISE,
field_name: 'seed',
items: seeds,
});
}
// add to metadata
if (getHasMetadata(graph)) {
removeMetadata(graph, 'seed');
firstBatchDatumList.push({
node_path: METADATA,
field_name: 'seed',
items: seeds,
});
}
if (graph.nodes[CANVAS_COHERENCE_NOISE]) {
firstBatchDatumList.push({
node_path: CANVAS_COHERENCE_NOISE,
field_name: 'seed',
items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX),
});
}
} else {
// seedBehaviour = SeedBehaviour.PerRun
const seeds = generateSeeds({
count: iterations,
start: shouldRandomizeSeed ? undefined : seed,
});
if (graph.nodes[NOISE]) {
secondBatchDatumList.push({
node_path: NOISE,
field_name: 'seed',
items: seeds,
});
}
// add to metadata
if (getHasMetadata(graph)) {
removeMetadata(graph, 'seed');
secondBatchDatumList.push({
node_path: METADATA,
field_name: 'seed',
items: seeds,
});
}
if (graph.nodes[CANVAS_COHERENCE_NOISE]) {
secondBatchDatumList.push({
node_path: CANVAS_COHERENCE_NOISE,
field_name: 'seed',
items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX),
});
}
data.push(secondBatchDatumList);
}
const extendedPrompts =
seedBehaviour === 'PER_PROMPT'
? range(iterations).flatMap(() => prompts)
: prompts;
// zipped batch of prompts
if (graph.nodes[POSITIVE_CONDITIONING]) {
firstBatchDatumList.push({
node_path: POSITIVE_CONDITIONING,
field_name: 'prompt',
items: extendedPrompts,
if (graph.nodes[NOISE]) {
secondBatchDatumList.push({
node_path: NOISE,
field_name: 'seed',
items: seeds,
});
}
// add to metadata
if (getHasMetadata(graph)) {
removeMetadata(graph, 'positive_prompt');
firstBatchDatumList.push({
removeMetadata(graph, 'seed');
secondBatchDatumList.push({
node_path: METADATA,
field_name: 'positive_prompt',
items: extendedPrompts,
field_name: 'seed',
items: seeds,
});
}
if (shouldConcatSDXLStylePrompt && model?.base_model === 'sdxl') {
const stylePrompts = extendedPrompts.map((p) =>
[p, positiveStylePrompt].join(' ')
);
if (graph.nodes[CANVAS_COHERENCE_NOISE]) {
secondBatchDatumList.push({
node_path: CANVAS_COHERENCE_NOISE,
field_name: 'seed',
items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX),
});
}
data.push(secondBatchDatumList);
}
if (graph.nodes[POSITIVE_CONDITIONING]) {
firstBatchDatumList.push({
node_path: POSITIVE_CONDITIONING,
field_name: 'style',
items: stylePrompts,
});
}
const extendedPrompts =
seedBehaviour === 'PER_PROMPT'
? range(iterations).flatMap(() => prompts)
: prompts;
// add to metadata
if (getHasMetadata(graph)) {
removeMetadata(graph, 'positive_style_prompt');
firstBatchDatumList.push({
node_path: METADATA,
field_name: 'positive_style_prompt',
items: extendedPrompts,
});
}
// zipped batch of prompts
if (graph.nodes[POSITIVE_CONDITIONING]) {
firstBatchDatumList.push({
node_path: POSITIVE_CONDITIONING,
field_name: 'prompt',
items: extendedPrompts,
});
}
// add to metadata
if (getHasMetadata(graph)) {
removeMetadata(graph, 'positive_prompt');
firstBatchDatumList.push({
node_path: METADATA,
field_name: 'positive_prompt',
items: extendedPrompts,
});
}
if (shouldConcatSDXLStylePrompt && model?.base_model === 'sdxl') {
const stylePrompts = extendedPrompts.map((p) =>
[p, positiveStylePrompt].join(' ')
);
if (graph.nodes[POSITIVE_CONDITIONING]) {
firstBatchDatumList.push({
node_path: POSITIVE_CONDITIONING,
field_name: 'style',
items: stylePrompts,
});
}
data.push(firstBatchDatumList);
// add to metadata
if (getHasMetadata(graph)) {
removeMetadata(graph, 'positive_style_prompt');
firstBatchDatumList.push({
node_path: METADATA,
field_name: 'positive_style_prompt',
items: extendedPrompts,
});
}
}
data.push(firstBatchDatumList);
const enqueueBatchArg: BatchConfig = {
prepend,
batch: {