mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
feat: add private node for linear UI image outputting (#5106)
## What type of PR is this? (check all applicable)
- [x] Refactor
- [ ] Feature
- [ ] Bug Fix
- [x] Optimization
- [ ] Documentation Update
- [ ] Community Node Submission
## Have you discussed this change with the InvokeAI team?
- [x] Yes
- [ ] No, because:
## Description
[feat: add private node for linear UI image
outputting](4599517c6c
)
Add a LinearUIOutputInvocation node to be the new terminal node for
Linear UI graphs. This node is private and hidden from the Workflow
Editor, as it is an implementation detail.
The Linear UI was using the Save Image node for this purpose. It allowed
every linear graph to end a single node type, which handled saving
metadata and board. This substantially reduced the complexity of the
linear graphs.
This caused two related issues:
- Images were saved to disk twice
- Noticeable delay between when an image was decoded and showed up in
the UI
To resolve this, the new LinearUIOutputInvocation node will handle
adding an image to a board if one is provided.
Metadata is no longer provided in this unified node. Instead, the
metadata graph helpers now need to know the node to add metadata to and
provide it to the last node that actually outputs an image. This is a
`l2i` node for txt2img & img2img graphs, and a different
image-outputting node for canvas graphs.
HRF poses another complication, in that it changes the terminal node. To
handle this, a new metadata util is added called
`setMetadataReceivingNode()`. HRF calls this to change the node that
should receive the graph's metadata.
This resolves the duplicate images issue and improves perf without
otherwise changing the user experience.
---
Also fixed an issue with HRF metadata.
## Related Tickets & Documents
<!--
For pull requests that relate or close an issue, please include them
below.
For example having the text: "closes #1234" would connect the current
pull
request to issue 1234. And when we merge the pull request, Github will
automatically close the issue.
-->
- Closes #4688
- Closes #4645
## QA Instructions, Screenshots, Recordings
Generate some images with and without a board selected. Images should
end up in the right board per usual, but a bit quicker. Metadata should
still work.
<!--
Please provide steps on how to test changes, any hardware or
software specifications as well as any other pertinent information.
-->
This commit is contained in:
commit
6f719b2c7a
@ -8,7 +8,7 @@ import numpy
|
||||
from PIL import Image, ImageChops, ImageFilter, ImageOps
|
||||
|
||||
from invokeai.app.invocations.primitives import BoardField, ColorField, ImageField, ImageOutput
|
||||
from invokeai.app.services.image_records.image_records_common import ImageCategory, ResourceOrigin
|
||||
from invokeai.app.services.image_records.image_records_common import ImageCategory, ImageRecordChanges, ResourceOrigin
|
||||
from invokeai.app.shared.fields import FieldDescriptions
|
||||
from invokeai.backend.image_util.invisible_watermark import InvisibleWatermark
|
||||
from invokeai.backend.image_util.safety_checker import SafetyChecker
|
||||
@ -1017,3 +1017,32 @@ class SaveImageInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
||||
width=image_dto.width,
|
||||
height=image_dto.height,
|
||||
)
|
||||
|
||||
|
||||
@invocation(
|
||||
"linear_ui_output",
|
||||
title="Linear UI Image Output",
|
||||
tags=["primitives", "image"],
|
||||
category="primitives",
|
||||
version="1.0.0",
|
||||
use_cache=False,
|
||||
)
|
||||
class LinearUIOutputInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
||||
"""Handles Linear UI Image Outputting tasks."""
|
||||
|
||||
image: ImageField = InputField(description=FieldDescriptions.image)
|
||||
board: Optional[BoardField] = InputField(default=None, description=FieldDescriptions.board, input=Input.Direct)
|
||||
|
||||
def invoke(self, context: InvocationContext) -> ImageOutput:
|
||||
image_dto = context.services.images.get_dto(self.image.image_name)
|
||||
|
||||
if self.board:
|
||||
context.services.board_images.add_image_to_board(self.board.board_id, self.image.image_name)
|
||||
|
||||
context.services.images.update(self.image.image_name, changes=ImageRecordChanges(is_intermediate=False))
|
||||
|
||||
return ImageOutput(
|
||||
image=ImageField(image_name=self.image.image_name),
|
||||
width=image_dto.width,
|
||||
height=image_dto.height,
|
||||
)
|
||||
|
@ -112,7 +112,7 @@ GENERATION_MODES = Literal[
|
||||
]
|
||||
|
||||
|
||||
@invocation("core_metadata", title="Core Metadata", tags=["metadata"], category="metadata", version="1.0.0")
|
||||
@invocation("core_metadata", title="Core Metadata", tags=["metadata"], category="metadata", version="1.0.1")
|
||||
class CoreMetadataInvocation(BaseInvocation):
|
||||
"""Collects core generation metadata into a MetadataField"""
|
||||
|
||||
@ -160,7 +160,7 @@ class CoreMetadataInvocation(BaseInvocation):
|
||||
)
|
||||
|
||||
# High resolution fix metadata.
|
||||
hrf_enabled: Optional[float] = InputField(
|
||||
hrf_enabled: Optional[bool] = InputField(
|
||||
default=None,
|
||||
description="Whether or not high resolution fix was enabled.",
|
||||
)
|
||||
|
@ -8,7 +8,6 @@ import {
|
||||
selectControlAdapterById,
|
||||
} from 'features/controlAdapters/store/controlAdaptersSlice';
|
||||
import { isControlNetOrT2IAdapter } from 'features/controlAdapters/store/types';
|
||||
import { SAVE_IMAGE } from 'features/nodes/util/graphBuilders/constants';
|
||||
import { addToast } from 'features/system/store/systemSlice';
|
||||
import { t } from 'i18next';
|
||||
import { imagesApi } from 'services/api/endpoints/images';
|
||||
@ -38,6 +37,7 @@ export const addControlNetImageProcessedListener = () => {
|
||||
// ControlNet one-off procressing graph is just the processor node, no edges.
|
||||
// Also we need to grab the image.
|
||||
|
||||
const nodeId = ca.processorNode.id;
|
||||
const enqueueBatchArg: BatchConfig = {
|
||||
prepend: true,
|
||||
batch: {
|
||||
@ -46,28 +46,11 @@ export const addControlNetImageProcessedListener = () => {
|
||||
[ca.processorNode.id]: {
|
||||
...ca.processorNode,
|
||||
is_intermediate: true,
|
||||
use_cache: false,
|
||||
image: { image_name: ca.controlImage },
|
||||
},
|
||||
[SAVE_IMAGE]: {
|
||||
id: SAVE_IMAGE,
|
||||
type: 'save_image',
|
||||
is_intermediate: true,
|
||||
use_cache: false,
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
source: {
|
||||
node_id: ca.processorNode.id,
|
||||
field: 'image',
|
||||
},
|
||||
destination: {
|
||||
node_id: SAVE_IMAGE,
|
||||
field: 'image',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
runs: 1,
|
||||
},
|
||||
};
|
||||
@ -90,7 +73,7 @@ export const addControlNetImageProcessedListener = () => {
|
||||
socketInvocationComplete.match(action) &&
|
||||
action.payload.data.queue_batch_id ===
|
||||
enqueueResult.batch.batch_id &&
|
||||
action.payload.data.source_node_id === SAVE_IMAGE
|
||||
action.payload.data.source_node_id === nodeId
|
||||
);
|
||||
|
||||
// We still have to check the output type
|
||||
|
@ -157,6 +157,8 @@ const ImageMetadataActions = (props: Props) => {
|
||||
return null;
|
||||
}
|
||||
|
||||
console.log(metadata);
|
||||
|
||||
return (
|
||||
<>
|
||||
{metadata.created_by && (
|
||||
|
@ -23,7 +23,7 @@ import {
|
||||
RESIZE_HRF,
|
||||
VAE_LOADER,
|
||||
} from './constants';
|
||||
import { upsertMetadata } from './metadata';
|
||||
import { setMetadataReceivingNode, upsertMetadata } from './metadata';
|
||||
|
||||
// Copy certain connections from previous DENOISE_LATENTS to new DENOISE_LATENTS_HRF.
|
||||
function copyConnectionsToDenoiseLatentsHrf(graph: NonNullableGraph): void {
|
||||
@ -369,4 +369,5 @@ export const addHrfToGraph = (
|
||||
hrf_enabled: hrfEnabled,
|
||||
hrf_method: hrfMethod,
|
||||
});
|
||||
setMetadataReceivingNode(graph, LATENTS_TO_IMAGE_HRF_HR);
|
||||
};
|
||||
|
@ -1,20 +1,20 @@
|
||||
import { RootState } from 'app/store/store';
|
||||
import { NonNullableGraph } from 'features/nodes/types/types';
|
||||
import { activeTabNameSelector } from 'features/ui/store/uiSelectors';
|
||||
import { SaveImageInvocation } from 'services/api/types';
|
||||
import { LinearUIOutputInvocation } from 'services/api/types';
|
||||
import {
|
||||
CANVAS_OUTPUT,
|
||||
LATENTS_TO_IMAGE,
|
||||
LATENTS_TO_IMAGE_HRF_HR,
|
||||
LINEAR_UI_OUTPUT,
|
||||
NSFW_CHECKER,
|
||||
SAVE_IMAGE,
|
||||
WATERMARKER,
|
||||
} from './constants';
|
||||
|
||||
/**
|
||||
* Set the `use_cache` field on the linear/canvas graph's final image output node to False.
|
||||
*/
|
||||
export const addSaveImageNode = (
|
||||
export const addLinearUIOutputNode = (
|
||||
state: RootState,
|
||||
graph: NonNullableGraph
|
||||
): void => {
|
||||
@ -23,18 +23,18 @@ export const addSaveImageNode = (
|
||||
activeTabName === 'unifiedCanvas' ? !state.canvas.shouldAutoSave : false;
|
||||
const { autoAddBoardId } = state.gallery;
|
||||
|
||||
const saveImageNode: SaveImageInvocation = {
|
||||
id: SAVE_IMAGE,
|
||||
type: 'save_image',
|
||||
const linearUIOutputNode: LinearUIOutputInvocation = {
|
||||
id: LINEAR_UI_OUTPUT,
|
||||
type: 'linear_ui_output',
|
||||
is_intermediate,
|
||||
use_cache: false,
|
||||
board: autoAddBoardId === 'none' ? undefined : { board_id: autoAddBoardId },
|
||||
};
|
||||
|
||||
graph.nodes[SAVE_IMAGE] = saveImageNode;
|
||||
graph.nodes[LINEAR_UI_OUTPUT] = linearUIOutputNode;
|
||||
|
||||
const destination = {
|
||||
node_id: SAVE_IMAGE,
|
||||
node_id: LINEAR_UI_OUTPUT,
|
||||
field: 'image',
|
||||
};
|
||||
|
@ -4,9 +4,9 @@ import { ESRGANModelName } from 'features/parameters/store/postprocessingSlice';
|
||||
import {
|
||||
ESRGANInvocation,
|
||||
Graph,
|
||||
SaveImageInvocation,
|
||||
LinearUIOutputInvocation,
|
||||
} from 'services/api/types';
|
||||
import { REALESRGAN as ESRGAN, SAVE_IMAGE } from './constants';
|
||||
import { REALESRGAN as ESRGAN, LINEAR_UI_OUTPUT } from './constants';
|
||||
import { addCoreMetadataNode, upsertMetadata } from './metadata';
|
||||
|
||||
type Arg = {
|
||||
@ -28,9 +28,9 @@ export const buildAdHocUpscaleGraph = ({
|
||||
is_intermediate: true,
|
||||
};
|
||||
|
||||
const saveImageNode: SaveImageInvocation = {
|
||||
id: SAVE_IMAGE,
|
||||
type: 'save_image',
|
||||
const linearUIOutputNode: LinearUIOutputInvocation = {
|
||||
id: LINEAR_UI_OUTPUT,
|
||||
type: 'linear_ui_output',
|
||||
use_cache: false,
|
||||
is_intermediate: false,
|
||||
board: autoAddBoardId === 'none' ? undefined : { board_id: autoAddBoardId },
|
||||
@ -40,7 +40,7 @@ export const buildAdHocUpscaleGraph = ({
|
||||
id: `adhoc-esrgan-graph`,
|
||||
nodes: {
|
||||
[ESRGAN]: realesrganNode,
|
||||
[SAVE_IMAGE]: saveImageNode,
|
||||
[LINEAR_UI_OUTPUT]: linearUIOutputNode,
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
@ -49,14 +49,14 @@ export const buildAdHocUpscaleGraph = ({
|
||||
field: 'image',
|
||||
},
|
||||
destination: {
|
||||
node_id: SAVE_IMAGE,
|
||||
node_id: LINEAR_UI_OUTPUT,
|
||||
field: 'image',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
addCoreMetadataNode(graph, {});
|
||||
addCoreMetadataNode(graph, {}, ESRGAN);
|
||||
upsertMetadata(graph, {
|
||||
esrgan_model: esrganModelName,
|
||||
});
|
||||
|
@ -6,7 +6,7 @@ import { addControlNetToLinearGraph } from './addControlNetToLinearGraph';
|
||||
import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addLoRAsToGraph } from './addLoRAsToGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -308,10 +308,14 @@ export const buildCanvasImageToImageGraph = (
|
||||
});
|
||||
}
|
||||
|
||||
addCoreMetadataNode(graph, {
|
||||
addCoreMetadataNode(
|
||||
graph,
|
||||
{
|
||||
generation_mode: 'img2img',
|
||||
cfg_scale,
|
||||
width: !isUsingScaledDimensions ? width : scaledBoundingBoxDimensions.width,
|
||||
width: !isUsingScaledDimensions
|
||||
? width
|
||||
: scaledBoundingBoxDimensions.width,
|
||||
height: !isUsingScaledDimensions
|
||||
? height
|
||||
: scaledBoundingBoxDimensions.height,
|
||||
@ -325,7 +329,9 @@ export const buildCanvasImageToImageGraph = (
|
||||
clip_skip: clipSkip,
|
||||
strength,
|
||||
init_image: initialImage.image_name,
|
||||
});
|
||||
},
|
||||
CANVAS_OUTPUT
|
||||
);
|
||||
|
||||
// Add Seamless To Graph
|
||||
if (seamlessXAxis || seamlessYAxis) {
|
||||
@ -357,7 +363,7 @@ export const buildCanvasImageToImageGraph = (
|
||||
addWatermarkerToGraph(state, graph, CANVAS_OUTPUT);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -13,7 +13,7 @@ import { addControlNetToLinearGraph } from './addControlNetToLinearGraph';
|
||||
import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addLoRAsToGraph } from './addLoRAsToGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -666,7 +666,7 @@ export const buildCanvasInpaintGraph = (
|
||||
addWatermarkerToGraph(state, graph, CANVAS_OUTPUT);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -12,7 +12,7 @@ import { addControlNetToLinearGraph } from './addControlNetToLinearGraph';
|
||||
import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addLoRAsToGraph } from './addLoRAsToGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -770,7 +770,7 @@ export const buildCanvasOutpaintGraph = (
|
||||
addWatermarkerToGraph(state, graph, CANVAS_OUTPUT);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -7,7 +7,7 @@ import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSDXLLoRAsToGraph } from './addSDXLLoRAstoGraph';
|
||||
import { addSDXLRefinerToGraph } from './addSDXLRefinerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
import { addWatermarkerToGraph } from './addWatermarkerToGraph';
|
||||
@ -319,10 +319,14 @@ export const buildCanvasSDXLImageToImageGraph = (
|
||||
});
|
||||
}
|
||||
|
||||
addCoreMetadataNode(graph, {
|
||||
addCoreMetadataNode(
|
||||
graph,
|
||||
{
|
||||
generation_mode: 'img2img',
|
||||
cfg_scale,
|
||||
width: !isUsingScaledDimensions ? width : scaledBoundingBoxDimensions.width,
|
||||
width: !isUsingScaledDimensions
|
||||
? width
|
||||
: scaledBoundingBoxDimensions.width,
|
||||
height: !isUsingScaledDimensions
|
||||
? height
|
||||
: scaledBoundingBoxDimensions.height,
|
||||
@ -335,7 +339,9 @@ export const buildCanvasSDXLImageToImageGraph = (
|
||||
scheduler,
|
||||
strength,
|
||||
init_image: initialImage.image_name,
|
||||
});
|
||||
},
|
||||
CANVAS_OUTPUT
|
||||
);
|
||||
|
||||
// Add Seamless To Graph
|
||||
if (seamlessXAxis || seamlessYAxis) {
|
||||
@ -380,7 +386,7 @@ export const buildCanvasSDXLImageToImageGraph = (
|
||||
addWatermarkerToGraph(state, graph, CANVAS_OUTPUT);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -14,7 +14,7 @@ import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSDXLLoRAsToGraph } from './addSDXLLoRAstoGraph';
|
||||
import { addSDXLRefinerToGraph } from './addSDXLRefinerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -696,7 +696,7 @@ export const buildCanvasSDXLInpaintGraph = (
|
||||
addWatermarkerToGraph(state, graph, CANVAS_OUTPUT);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -13,7 +13,7 @@ import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSDXLLoRAsToGraph } from './addSDXLLoRAstoGraph';
|
||||
import { addSDXLRefinerToGraph } from './addSDXLRefinerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -799,7 +799,7 @@ export const buildCanvasSDXLOutpaintGraph = (
|
||||
addWatermarkerToGraph(state, graph, CANVAS_OUTPUT);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -10,7 +10,7 @@ import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSDXLLoRAsToGraph } from './addSDXLLoRAstoGraph';
|
||||
import { addSDXLRefinerToGraph } from './addSDXLRefinerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -301,10 +301,14 @@ export const buildCanvasSDXLTextToImageGraph = (
|
||||
});
|
||||
}
|
||||
|
||||
addCoreMetadataNode(graph, {
|
||||
addCoreMetadataNode(
|
||||
graph,
|
||||
{
|
||||
generation_mode: 'txt2img',
|
||||
cfg_scale,
|
||||
width: !isUsingScaledDimensions ? width : scaledBoundingBoxDimensions.width,
|
||||
width: !isUsingScaledDimensions
|
||||
? width
|
||||
: scaledBoundingBoxDimensions.width,
|
||||
height: !isUsingScaledDimensions
|
||||
? height
|
||||
: scaledBoundingBoxDimensions.height,
|
||||
@ -315,7 +319,9 @@ export const buildCanvasSDXLTextToImageGraph = (
|
||||
steps,
|
||||
rand_device: use_cpu ? 'cpu' : 'cuda',
|
||||
scheduler,
|
||||
});
|
||||
},
|
||||
CANVAS_OUTPUT
|
||||
);
|
||||
|
||||
// Add Seamless To Graph
|
||||
if (seamlessXAxis || seamlessYAxis) {
|
||||
@ -360,7 +366,7 @@ export const buildCanvasSDXLTextToImageGraph = (
|
||||
addWatermarkerToGraph(state, graph, CANVAS_OUTPUT);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -9,7 +9,7 @@ import { addControlNetToLinearGraph } from './addControlNetToLinearGraph';
|
||||
import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addLoRAsToGraph } from './addLoRAsToGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -289,10 +289,14 @@ export const buildCanvasTextToImageGraph = (
|
||||
});
|
||||
}
|
||||
|
||||
addCoreMetadataNode(graph, {
|
||||
addCoreMetadataNode(
|
||||
graph,
|
||||
{
|
||||
generation_mode: 'txt2img',
|
||||
cfg_scale,
|
||||
width: !isUsingScaledDimensions ? width : scaledBoundingBoxDimensions.width,
|
||||
width: !isUsingScaledDimensions
|
||||
? width
|
||||
: scaledBoundingBoxDimensions.width,
|
||||
height: !isUsingScaledDimensions
|
||||
? height
|
||||
: scaledBoundingBoxDimensions.height,
|
||||
@ -304,7 +308,9 @@ export const buildCanvasTextToImageGraph = (
|
||||
rand_device: use_cpu ? 'cpu' : 'cuda',
|
||||
scheduler,
|
||||
clip_skip: clipSkip,
|
||||
});
|
||||
},
|
||||
CANVAS_OUTPUT
|
||||
);
|
||||
|
||||
// Add Seamless To Graph
|
||||
if (seamlessXAxis || seamlessYAxis) {
|
||||
@ -336,7 +342,7 @@ export const buildCanvasTextToImageGraph = (
|
||||
addWatermarkerToGraph(state, graph, CANVAS_OUTPUT);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -9,7 +9,7 @@ import { addControlNetToLinearGraph } from './addControlNetToLinearGraph';
|
||||
import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addLoRAsToGraph } from './addLoRAsToGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -311,7 +311,9 @@ export const buildLinearImageToImageGraph = (
|
||||
});
|
||||
}
|
||||
|
||||
addCoreMetadataNode(graph, {
|
||||
addCoreMetadataNode(
|
||||
graph,
|
||||
{
|
||||
generation_mode: 'img2img',
|
||||
cfg_scale,
|
||||
height,
|
||||
@ -326,7 +328,9 @@ export const buildLinearImageToImageGraph = (
|
||||
clip_skip: clipSkip,
|
||||
strength,
|
||||
init_image: initialImage.imageName,
|
||||
});
|
||||
},
|
||||
IMAGE_TO_LATENTS
|
||||
);
|
||||
|
||||
// Add Seamless To Graph
|
||||
if (seamlessXAxis || seamlessYAxis) {
|
||||
@ -358,7 +362,7 @@ export const buildLinearImageToImageGraph = (
|
||||
addWatermarkerToGraph(state, graph);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -10,7 +10,7 @@ import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSDXLLoRAsToGraph } from './addSDXLLoRAstoGraph';
|
||||
import { addSDXLRefinerToGraph } from './addSDXLRefinerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -331,7 +331,9 @@ export const buildLinearSDXLImageToImageGraph = (
|
||||
});
|
||||
}
|
||||
|
||||
addCoreMetadataNode(graph, {
|
||||
addCoreMetadataNode(
|
||||
graph,
|
||||
{
|
||||
generation_mode: 'sdxl_img2img',
|
||||
cfg_scale,
|
||||
height,
|
||||
@ -347,7 +349,9 @@ export const buildLinearSDXLImageToImageGraph = (
|
||||
init_image: initialImage.imageName,
|
||||
positive_style_prompt: positiveStylePrompt,
|
||||
negative_style_prompt: negativeStylePrompt,
|
||||
});
|
||||
},
|
||||
IMAGE_TO_LATENTS
|
||||
);
|
||||
|
||||
// Add Seamless To Graph
|
||||
if (seamlessXAxis || seamlessYAxis) {
|
||||
@ -388,7 +392,7 @@ export const buildLinearSDXLImageToImageGraph = (
|
||||
addWatermarkerToGraph(state, graph);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -6,7 +6,7 @@ import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSDXLLoRAsToGraph } from './addSDXLLoRAstoGraph';
|
||||
import { addSDXLRefinerToGraph } from './addSDXLRefinerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -225,7 +225,9 @@ export const buildLinearSDXLTextToImageGraph = (
|
||||
],
|
||||
};
|
||||
|
||||
addCoreMetadataNode(graph, {
|
||||
addCoreMetadataNode(
|
||||
graph,
|
||||
{
|
||||
generation_mode: 'sdxl_txt2img',
|
||||
cfg_scale,
|
||||
height,
|
||||
@ -239,7 +241,9 @@ export const buildLinearSDXLTextToImageGraph = (
|
||||
scheduler,
|
||||
positive_style_prompt: positiveStylePrompt,
|
||||
negative_style_prompt: negativeStylePrompt,
|
||||
});
|
||||
},
|
||||
LATENTS_TO_IMAGE
|
||||
);
|
||||
|
||||
// Add Seamless To Graph
|
||||
if (seamlessXAxis || seamlessYAxis) {
|
||||
@ -280,7 +284,7 @@ export const buildLinearSDXLTextToImageGraph = (
|
||||
addWatermarkerToGraph(state, graph);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -10,7 +10,7 @@ import { addHrfToGraph } from './addHrfToGraph';
|
||||
import { addIPAdapterToLinearGraph } from './addIPAdapterToLinearGraph';
|
||||
import { addLoRAsToGraph } from './addLoRAsToGraph';
|
||||
import { addNSFWCheckerToGraph } from './addNSFWCheckerToGraph';
|
||||
import { addSaveImageNode } from './addSaveImageNode';
|
||||
import { addLinearUIOutputNode } from './addLinearUIOutputNode';
|
||||
import { addSeamlessToLinearGraph } from './addSeamlessToLinearGraph';
|
||||
import { addT2IAdaptersToLinearGraph } from './addT2IAdapterToLinearGraph';
|
||||
import { addVAEToGraph } from './addVAEToGraph';
|
||||
@ -234,7 +234,9 @@ export const buildLinearTextToImageGraph = (
|
||||
],
|
||||
};
|
||||
|
||||
addCoreMetadataNode(graph, {
|
||||
addCoreMetadataNode(
|
||||
graph,
|
||||
{
|
||||
generation_mode: 'txt2img',
|
||||
cfg_scale,
|
||||
height,
|
||||
@ -247,7 +249,9 @@ export const buildLinearTextToImageGraph = (
|
||||
rand_device: use_cpu ? 'cpu' : 'cuda',
|
||||
scheduler,
|
||||
clip_skip: clipSkip,
|
||||
});
|
||||
},
|
||||
LATENTS_TO_IMAGE
|
||||
);
|
||||
|
||||
// Add Seamless To Graph
|
||||
if (seamlessXAxis || seamlessYAxis) {
|
||||
@ -286,7 +290,7 @@ export const buildLinearTextToImageGraph = (
|
||||
addWatermarkerToGraph(state, graph);
|
||||
}
|
||||
|
||||
addSaveImageNode(state, graph);
|
||||
addLinearUIOutputNode(state, graph);
|
||||
|
||||
return graph;
|
||||
};
|
||||
|
@ -9,7 +9,7 @@ export const LATENTS_TO_IMAGE_HRF_LR = 'latents_to_image_hrf_lr';
|
||||
export const IMAGE_TO_LATENTS_HRF = 'image_to_latents_hrf';
|
||||
export const RESIZE_HRF = 'resize_hrf';
|
||||
export const ESRGAN_HRF = 'esrgan_hrf';
|
||||
export const SAVE_IMAGE = 'save_image';
|
||||
export const LINEAR_UI_OUTPUT = 'linear_ui_output';
|
||||
export const NSFW_CHECKER = 'nsfw_checker';
|
||||
export const WATERMARKER = 'invisible_watermark';
|
||||
export const NOISE = 'noise';
|
||||
|
@ -1,11 +1,12 @@
|
||||
import { NonNullableGraph } from 'features/nodes/types/types';
|
||||
import { CoreMetadataInvocation } from 'services/api/types';
|
||||
import { JsonObject } from 'type-fest';
|
||||
import { METADATA, SAVE_IMAGE } from './constants';
|
||||
import { METADATA } from './constants';
|
||||
|
||||
export const addCoreMetadataNode = (
|
||||
graph: NonNullableGraph,
|
||||
metadata: Partial<CoreMetadataInvocation>
|
||||
metadata: Partial<CoreMetadataInvocation>,
|
||||
nodeId: string
|
||||
): void => {
|
||||
graph.nodes[METADATA] = {
|
||||
id: METADATA,
|
||||
@ -19,7 +20,7 @@ export const addCoreMetadataNode = (
|
||||
field: 'metadata',
|
||||
},
|
||||
destination: {
|
||||
node_id: SAVE_IMAGE,
|
||||
node_id: nodeId,
|
||||
field: 'metadata',
|
||||
},
|
||||
});
|
||||
@ -64,3 +65,21 @@ export const getHasMetadata = (graph: NonNullableGraph): boolean => {
|
||||
|
||||
return Boolean(metadataNode);
|
||||
};
|
||||
|
||||
export const setMetadataReceivingNode = (
|
||||
graph: NonNullableGraph,
|
||||
nodeId: string
|
||||
) => {
|
||||
graph.edges = graph.edges.filter((edge) => edge.source.node_id !== METADATA);
|
||||
|
||||
graph.edges.push({
|
||||
source: {
|
||||
node_id: METADATA,
|
||||
field: 'metadata',
|
||||
},
|
||||
destination: {
|
||||
node_id: nodeId,
|
||||
field: 'metadata',
|
||||
},
|
||||
});
|
||||
};
|
||||
|
@ -19,7 +19,7 @@ const RESERVED_INPUT_FIELD_NAMES = ['id', 'type', 'use_cache'];
|
||||
const RESERVED_OUTPUT_FIELD_NAMES = ['type'];
|
||||
const RESERVED_FIELD_TYPES = ['IsIntermediate'];
|
||||
|
||||
const invocationDenylist: AnyInvocationType[] = ['graph'];
|
||||
const invocationDenylist: AnyInvocationType[] = ['graph', 'linear_ui_output'];
|
||||
|
||||
const isReservedInputField = (nodeType: string, fieldName: string) => {
|
||||
if (RESERVED_INPUT_FIELD_NAMES.includes(fieldName)) {
|
||||
|
File diff suppressed because one or more lines are too long
@ -142,7 +142,7 @@ export type DivideInvocation = s['DivideInvocation'];
|
||||
export type ImageNSFWBlurInvocation = s['ImageNSFWBlurInvocation'];
|
||||
export type ImageWatermarkInvocation = s['ImageWatermarkInvocation'];
|
||||
export type SeamlessModeInvocation = s['SeamlessModeInvocation'];
|
||||
export type SaveImageInvocation = s['SaveImageInvocation'];
|
||||
export type LinearUIOutputInvocation = s['LinearUIOutputInvocation'];
|
||||
export type MetadataInvocation = s['MetadataInvocation'];
|
||||
export type CoreMetadataInvocation = s['CoreMetadataInvocation'];
|
||||
export type MetadataItemInvocation = s['MetadataItemInvocation'];
|
||||
|
Loading…
Reference in New Issue
Block a user