mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
fix(ui): fix dynamic prompts with single prompt
Closes #5292 The special handling for single prompt is totally extraneous and caused a bug.
This commit is contained in:
parent
9359c03c3c
commit
28b74523d0
@ -23,27 +23,28 @@ export const prepareLinearUIBatch = (
|
|||||||
const { prompts, seedBehaviour } = state.dynamicPrompts;
|
const { prompts, seedBehaviour } = state.dynamicPrompts;
|
||||||
|
|
||||||
const data: Batch['data'] = [];
|
const data: Batch['data'] = [];
|
||||||
|
const firstBatchDatumList: components['schemas']['BatchDatum'][] = [];
|
||||||
|
const secondBatchDatumList: components['schemas']['BatchDatum'][] = [];
|
||||||
|
|
||||||
if (prompts.length === 1) {
|
// add seeds first to ensure the output order groups the prompts
|
||||||
|
if (seedBehaviour === 'PER_PROMPT') {
|
||||||
const seeds = generateSeeds({
|
const seeds = generateSeeds({
|
||||||
count: iterations,
|
count: prompts.length * iterations,
|
||||||
start: shouldRandomizeSeed ? undefined : seed,
|
start: shouldRandomizeSeed ? undefined : seed,
|
||||||
});
|
});
|
||||||
|
|
||||||
const zipped: components['schemas']['BatchDatum'][] = [];
|
|
||||||
|
|
||||||
if (graph.nodes[NOISE]) {
|
if (graph.nodes[NOISE]) {
|
||||||
zipped.push({
|
firstBatchDatumList.push({
|
||||||
node_path: NOISE,
|
node_path: NOISE,
|
||||||
field_name: 'seed',
|
field_name: 'seed',
|
||||||
items: seeds,
|
items: seeds,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// add to metadata
|
||||||
if (getHasMetadata(graph)) {
|
if (getHasMetadata(graph)) {
|
||||||
// add to metadata
|
|
||||||
removeMetadata(graph, 'seed');
|
removeMetadata(graph, 'seed');
|
||||||
zipped.push({
|
firstBatchDatumList.push({
|
||||||
node_path: METADATA,
|
node_path: METADATA,
|
||||||
field_name: 'seed',
|
field_name: 'seed',
|
||||||
items: seeds,
|
items: seeds,
|
||||||
@ -51,137 +52,97 @@ export const prepareLinearUIBatch = (
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (graph.nodes[CANVAS_COHERENCE_NOISE]) {
|
if (graph.nodes[CANVAS_COHERENCE_NOISE]) {
|
||||||
zipped.push({
|
firstBatchDatumList.push({
|
||||||
node_path: CANVAS_COHERENCE_NOISE,
|
node_path: CANVAS_COHERENCE_NOISE,
|
||||||
field_name: 'seed',
|
field_name: 'seed',
|
||||||
items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX),
|
items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
data.push(zipped);
|
|
||||||
} else {
|
} else {
|
||||||
// prompts.length > 1 aka dynamic prompts
|
// seedBehaviour = SeedBehaviour.PerRun
|
||||||
const firstBatchDatumList: components['schemas']['BatchDatum'][] = [];
|
const seeds = generateSeeds({
|
||||||
const secondBatchDatumList: components['schemas']['BatchDatum'][] = [];
|
count: iterations,
|
||||||
|
start: shouldRandomizeSeed ? undefined : seed,
|
||||||
|
});
|
||||||
|
|
||||||
// add seeds first to ensure the output order groups the prompts
|
if (graph.nodes[NOISE]) {
|
||||||
if (seedBehaviour === 'PER_PROMPT') {
|
secondBatchDatumList.push({
|
||||||
const seeds = generateSeeds({
|
node_path: NOISE,
|
||||||
count: prompts.length * iterations,
|
field_name: 'seed',
|
||||||
start: shouldRandomizeSeed ? undefined : seed,
|
items: seeds,
|
||||||
});
|
|
||||||
|
|
||||||
if (graph.nodes[NOISE]) {
|
|
||||||
firstBatchDatumList.push({
|
|
||||||
node_path: NOISE,
|
|
||||||
field_name: 'seed',
|
|
||||||
items: seeds,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// add to metadata
|
|
||||||
if (getHasMetadata(graph)) {
|
|
||||||
removeMetadata(graph, 'seed');
|
|
||||||
firstBatchDatumList.push({
|
|
||||||
node_path: METADATA,
|
|
||||||
field_name: 'seed',
|
|
||||||
items: seeds,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (graph.nodes[CANVAS_COHERENCE_NOISE]) {
|
|
||||||
firstBatchDatumList.push({
|
|
||||||
node_path: CANVAS_COHERENCE_NOISE,
|
|
||||||
field_name: 'seed',
|
|
||||||
items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// seedBehaviour = SeedBehaviour.PerRun
|
|
||||||
const seeds = generateSeeds({
|
|
||||||
count: iterations,
|
|
||||||
start: shouldRandomizeSeed ? undefined : seed,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (graph.nodes[NOISE]) {
|
|
||||||
secondBatchDatumList.push({
|
|
||||||
node_path: NOISE,
|
|
||||||
field_name: 'seed',
|
|
||||||
items: seeds,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// add to metadata
|
|
||||||
if (getHasMetadata(graph)) {
|
|
||||||
removeMetadata(graph, 'seed');
|
|
||||||
secondBatchDatumList.push({
|
|
||||||
node_path: METADATA,
|
|
||||||
field_name: 'seed',
|
|
||||||
items: seeds,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (graph.nodes[CANVAS_COHERENCE_NOISE]) {
|
|
||||||
secondBatchDatumList.push({
|
|
||||||
node_path: CANVAS_COHERENCE_NOISE,
|
|
||||||
field_name: 'seed',
|
|
||||||
items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
data.push(secondBatchDatumList);
|
|
||||||
}
|
|
||||||
|
|
||||||
const extendedPrompts =
|
|
||||||
seedBehaviour === 'PER_PROMPT'
|
|
||||||
? range(iterations).flatMap(() => prompts)
|
|
||||||
: prompts;
|
|
||||||
|
|
||||||
// zipped batch of prompts
|
|
||||||
if (graph.nodes[POSITIVE_CONDITIONING]) {
|
|
||||||
firstBatchDatumList.push({
|
|
||||||
node_path: POSITIVE_CONDITIONING,
|
|
||||||
field_name: 'prompt',
|
|
||||||
items: extendedPrompts,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// add to metadata
|
// add to metadata
|
||||||
if (getHasMetadata(graph)) {
|
if (getHasMetadata(graph)) {
|
||||||
removeMetadata(graph, 'positive_prompt');
|
removeMetadata(graph, 'seed');
|
||||||
firstBatchDatumList.push({
|
secondBatchDatumList.push({
|
||||||
node_path: METADATA,
|
node_path: METADATA,
|
||||||
field_name: 'positive_prompt',
|
field_name: 'seed',
|
||||||
items: extendedPrompts,
|
items: seeds,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
if (shouldConcatSDXLStylePrompt && model?.base_model === 'sdxl') {
|
if (graph.nodes[CANVAS_COHERENCE_NOISE]) {
|
||||||
const stylePrompts = extendedPrompts.map((p) =>
|
secondBatchDatumList.push({
|
||||||
[p, positiveStylePrompt].join(' ')
|
node_path: CANVAS_COHERENCE_NOISE,
|
||||||
);
|
field_name: 'seed',
|
||||||
|
items: seeds.map((seed) => (seed + 1) % NUMPY_RAND_MAX),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
data.push(secondBatchDatumList);
|
||||||
|
}
|
||||||
|
|
||||||
if (graph.nodes[POSITIVE_CONDITIONING]) {
|
const extendedPrompts =
|
||||||
firstBatchDatumList.push({
|
seedBehaviour === 'PER_PROMPT'
|
||||||
node_path: POSITIVE_CONDITIONING,
|
? range(iterations).flatMap(() => prompts)
|
||||||
field_name: 'style',
|
: prompts;
|
||||||
items: stylePrompts,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// add to metadata
|
// zipped batch of prompts
|
||||||
if (getHasMetadata(graph)) {
|
if (graph.nodes[POSITIVE_CONDITIONING]) {
|
||||||
removeMetadata(graph, 'positive_style_prompt');
|
firstBatchDatumList.push({
|
||||||
firstBatchDatumList.push({
|
node_path: POSITIVE_CONDITIONING,
|
||||||
node_path: METADATA,
|
field_name: 'prompt',
|
||||||
field_name: 'positive_style_prompt',
|
items: extendedPrompts,
|
||||||
items: extendedPrompts,
|
});
|
||||||
});
|
}
|
||||||
}
|
|
||||||
|
// add to metadata
|
||||||
|
if (getHasMetadata(graph)) {
|
||||||
|
removeMetadata(graph, 'positive_prompt');
|
||||||
|
firstBatchDatumList.push({
|
||||||
|
node_path: METADATA,
|
||||||
|
field_name: 'positive_prompt',
|
||||||
|
items: extendedPrompts,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shouldConcatSDXLStylePrompt && model?.base_model === 'sdxl') {
|
||||||
|
const stylePrompts = extendedPrompts.map((p) =>
|
||||||
|
[p, positiveStylePrompt].join(' ')
|
||||||
|
);
|
||||||
|
|
||||||
|
if (graph.nodes[POSITIVE_CONDITIONING]) {
|
||||||
|
firstBatchDatumList.push({
|
||||||
|
node_path: POSITIVE_CONDITIONING,
|
||||||
|
field_name: 'style',
|
||||||
|
items: stylePrompts,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
data.push(firstBatchDatumList);
|
// add to metadata
|
||||||
|
if (getHasMetadata(graph)) {
|
||||||
|
removeMetadata(graph, 'positive_style_prompt');
|
||||||
|
firstBatchDatumList.push({
|
||||||
|
node_path: METADATA,
|
||||||
|
field_name: 'positive_style_prompt',
|
||||||
|
items: extendedPrompts,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
data.push(firstBatchDatumList);
|
||||||
|
|
||||||
const enqueueBatchArg: BatchConfig = {
|
const enqueueBatchArg: BatchConfig = {
|
||||||
prepend,
|
prepend,
|
||||||
batch: {
|
batch: {
|
||||||
|
Loading…
Reference in New Issue
Block a user