diff --git a/invokeai/app/api/routers/images.py b/invokeai/app/api/routers/images.py
index 8e3824ce93..2bc0b48251 100644
--- a/invokeai/app/api/routers/images.py
+++ b/invokeai/app/api/routers/images.py
@@ -233,21 +233,14 @@ async def get_image_workflow(
)
async def get_image_full(
image_name: str = Path(description="The name of full-resolution image file to get"),
-) -> FileResponse:
+) -> Response:
"""Gets a full-resolution image file"""
try:
path = ApiDependencies.invoker.services.images.get_path(image_name)
-
- if not ApiDependencies.invoker.services.images.validate_path(path):
- raise HTTPException(status_code=404)
-
- response = FileResponse(
- path,
- media_type="image/png",
- filename=image_name,
- content_disposition_type="inline",
- )
+ with open(path, "rb") as f:
+ content = f.read()
+ response = Response(content, media_type="image/png")
response.headers["Cache-Control"] = f"max-age={IMAGE_MAX_AGE}"
return response
except Exception:
@@ -268,15 +261,14 @@ async def get_image_full(
)
async def get_image_thumbnail(
image_name: str = Path(description="The name of thumbnail image file to get"),
-) -> FileResponse:
+) -> Response:
"""Gets a thumbnail image file"""
try:
path = ApiDependencies.invoker.services.images.get_path(image_name, thumbnail=True)
- if not ApiDependencies.invoker.services.images.validate_path(path):
- raise HTTPException(status_code=404)
-
- response = FileResponse(path, media_type="image/webp", content_disposition_type="inline")
+ with open(path, "rb") as f:
+ content = f.read()
+ response = Response(content, media_type="image/webp")
response.headers["Cache-Control"] = f"max-age={IMAGE_MAX_AGE}"
return response
except Exception:
diff --git a/invokeai/app/api_app.py b/invokeai/app/api_app.py
index dca0bc139d..88820a0c4c 100644
--- a/invokeai/app/api_app.py
+++ b/invokeai/app/api_app.py
@@ -161,6 +161,7 @@ def invoke_api() -> None:
# Taken from https://waylonwalker.com/python-find-available-port/, thanks Waylon!
# https://github.com/WaylonWalker
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.settimeout(1)
if s.connect_ex(("localhost", port)) == 0:
return find_port(port=port + 1)
else:
diff --git a/invokeai/app/invocations/fields.py b/invokeai/app/invocations/fields.py
index 8374a58959..f9a483f84c 100644
--- a/invokeai/app/invocations/fields.py
+++ b/invokeai/app/invocations/fields.py
@@ -48,6 +48,7 @@ class UIType(str, Enum, metaclass=MetaEnum):
ControlNetModel = "ControlNetModelField"
IPAdapterModel = "IPAdapterModelField"
T2IAdapterModel = "T2IAdapterModelField"
+ SpandrelImageToImageModel = "SpandrelImageToImageModelField"
# endregion
# region Misc Field Types
@@ -134,6 +135,7 @@ class FieldDescriptions:
sdxl_main_model = "SDXL Main model (UNet, VAE, CLIP1, CLIP2) to load"
sdxl_refiner_model = "SDXL Refiner Main Modde (UNet, VAE, CLIP2) to load"
onnx_main_model = "ONNX Main model (UNet, VAE, CLIP) to load"
+ spandrel_image_to_image_model = "Image-to-Image model"
lora_weight = "The weight at which the LoRA is applied to each model"
compel_prompt = "Prompt to be parsed by Compel to create a conditioning tensor"
raw_prompt = "Raw prompt text (no parsing)"
diff --git a/invokeai/app/invocations/spandrel_image_to_image.py b/invokeai/app/invocations/spandrel_image_to_image.py
new file mode 100644
index 0000000000..bbe31af644
--- /dev/null
+++ b/invokeai/app/invocations/spandrel_image_to_image.py
@@ -0,0 +1,144 @@
+import numpy as np
+import torch
+from PIL import Image
+from tqdm import tqdm
+
+from invokeai.app.invocations.baseinvocation import BaseInvocation, invocation
+from invokeai.app.invocations.fields import (
+ FieldDescriptions,
+ ImageField,
+ InputField,
+ UIType,
+ WithBoard,
+ WithMetadata,
+)
+from invokeai.app.invocations.model import ModelIdentifierField
+from invokeai.app.invocations.primitives import ImageOutput
+from invokeai.app.services.session_processor.session_processor_common import CanceledException
+from invokeai.app.services.shared.invocation_context import InvocationContext
+from invokeai.backend.spandrel_image_to_image_model import SpandrelImageToImageModel
+from invokeai.backend.tiles.tiles import calc_tiles_min_overlap
+from invokeai.backend.tiles.utils import TBLR, Tile
+
+
+@invocation("spandrel_image_to_image", title="Image-to-Image", tags=["upscale"], category="upscale", version="1.1.0")
+class SpandrelImageToImageInvocation(BaseInvocation, WithMetadata, WithBoard):
+ """Run any spandrel image-to-image model (https://github.com/chaiNNer-org/spandrel)."""
+
+ image: ImageField = InputField(description="The input image")
+ image_to_image_model: ModelIdentifierField = InputField(
+ title="Image-to-Image Model",
+ description=FieldDescriptions.spandrel_image_to_image_model,
+ ui_type=UIType.SpandrelImageToImageModel,
+ )
+ tile_size: int = InputField(
+ default=512, description="The tile size for tiled image-to-image. Set to 0 to disable tiling."
+ )
+
+ def _scale_tile(self, tile: Tile, scale: int) -> Tile:
+ return Tile(
+ coords=TBLR(
+ top=tile.coords.top * scale,
+ bottom=tile.coords.bottom * scale,
+ left=tile.coords.left * scale,
+ right=tile.coords.right * scale,
+ ),
+ overlap=TBLR(
+ top=tile.overlap.top * scale,
+ bottom=tile.overlap.bottom * scale,
+ left=tile.overlap.left * scale,
+ right=tile.overlap.right * scale,
+ ),
+ )
+
+ @torch.inference_mode()
+ def invoke(self, context: InvocationContext) -> ImageOutput:
+ # Images are converted to RGB, because most models don't support an alpha channel. In the future, we may want to
+ # revisit this.
+ image = context.images.get_pil(self.image.image_name, mode="RGB")
+
+ # Compute the image tiles.
+ if self.tile_size > 0:
+ min_overlap = 20
+ tiles = calc_tiles_min_overlap(
+ image_height=image.height,
+ image_width=image.width,
+ tile_height=self.tile_size,
+ tile_width=self.tile_size,
+ min_overlap=min_overlap,
+ )
+ else:
+ # No tiling. Generate a single tile that covers the entire image.
+ min_overlap = 0
+ tiles = [
+ Tile(
+ coords=TBLR(top=0, bottom=image.height, left=0, right=image.width),
+ overlap=TBLR(top=0, bottom=0, left=0, right=0),
+ )
+ ]
+
+ # Sort tiles first by left x coordinate, then by top y coordinate. During tile processing, we want to iterate
+ # over tiles left-to-right, top-to-bottom.
+ tiles = sorted(tiles, key=lambda x: x.coords.left)
+ tiles = sorted(tiles, key=lambda x: x.coords.top)
+
+ # Prepare input image for inference.
+ image_tensor = SpandrelImageToImageModel.pil_to_tensor(image)
+
+ # Load the model.
+ spandrel_model_info = context.models.load(self.image_to_image_model)
+
+ # Run the model on each tile.
+ with spandrel_model_info as spandrel_model:
+ assert isinstance(spandrel_model, SpandrelImageToImageModel)
+
+ # Scale the tiles for re-assembling the final image.
+ scale = spandrel_model.scale
+ scaled_tiles = [self._scale_tile(tile, scale=scale) for tile in tiles]
+
+ # Prepare the output tensor.
+ _, channels, height, width = image_tensor.shape
+ output_tensor = torch.zeros(
+ (height * scale, width * scale, channels), dtype=torch.uint8, device=torch.device("cpu")
+ )
+
+ image_tensor = image_tensor.to(device=spandrel_model.device, dtype=spandrel_model.dtype)
+
+ for tile, scaled_tile in tqdm(list(zip(tiles, scaled_tiles, strict=True)), desc="Upscaling Tiles"):
+ # Exit early if the invocation has been canceled.
+ if context.util.is_canceled():
+ raise CanceledException
+
+ # Extract the current tile from the input tensor.
+ input_tile = image_tensor[
+ :, :, tile.coords.top : tile.coords.bottom, tile.coords.left : tile.coords.right
+ ].to(device=spandrel_model.device, dtype=spandrel_model.dtype)
+
+ # Run the model on the tile.
+ output_tile = spandrel_model.run(input_tile)
+
+ # Convert the output tile into the output tensor's format.
+ # (N, C, H, W) -> (C, H, W)
+ output_tile = output_tile.squeeze(0)
+ # (C, H, W) -> (H, W, C)
+ output_tile = output_tile.permute(1, 2, 0)
+ output_tile = output_tile.clamp(0, 1)
+ output_tile = (output_tile * 255).to(dtype=torch.uint8, device=torch.device("cpu"))
+
+ # Merge the output tile into the output tensor.
+ # We only keep half of the overlap on the top and left side of the tile. We do this in case there are
+ # edge artifacts. We don't bother with any 'blending' in the current implementation - for most upscalers
+ # it seems unnecessary, but we may find a need in the future.
+ top_overlap = scaled_tile.overlap.top // 2
+ left_overlap = scaled_tile.overlap.left // 2
+ output_tensor[
+ scaled_tile.coords.top + top_overlap : scaled_tile.coords.bottom,
+ scaled_tile.coords.left + left_overlap : scaled_tile.coords.right,
+ :,
+ ] = output_tile[top_overlap:, left_overlap:, :]
+
+ # Convert the output tensor to a PIL image.
+ np_image = output_tensor.detach().numpy().astype(np.uint8)
+ pil_image = Image.fromarray(np_image)
+ image_dto = context.images.save(image=pil_image)
+ return ImageOutput.build(image_dto)
diff --git a/invokeai/app/services/workflow_records/default_workflows/ESRGAN Upscaling with Canny ControlNet.json b/invokeai/app/services/workflow_records/default_workflows/ESRGAN Upscaling with Canny ControlNet.json
index dd98eca18f..2cadcae961 100644
--- a/invokeai/app/services/workflow_records/default_workflows/ESRGAN Upscaling with Canny ControlNet.json
+++ b/invokeai/app/services/workflow_records/default_workflows/ESRGAN Upscaling with Canny ControlNet.json
@@ -2,7 +2,7 @@
"name": "ESRGAN Upscaling with Canny ControlNet",
"author": "InvokeAI",
"description": "Sample workflow for using Upscaling with ControlNet with SD1.5",
- "version": "2.0.0",
+ "version": "2.1.0",
"contact": "invoke@invoke.ai",
"tags": "upscale, controlnet, default",
"notes": "",
@@ -36,14 +36,13 @@
"version": "3.0.0",
"category": "default"
},
- "id": "0e71a27e-a22b-4a9b-b20a-6d789abff2bc",
"nodes": [
{
- "id": "e8bf67fe-67de-4227-87eb-79e86afdfc74",
+ "id": "63b6ab7e-5b05-4d1b-a3b1-42d8e53ce16b",
"type": "invocation",
"data": {
- "id": "e8bf67fe-67de-4227-87eb-79e86afdfc74",
- "version": "1.1.1",
+ "id": "63b6ab7e-5b05-4d1b-a3b1-42d8e53ce16b",
+ "version": "1.2.0",
"nodePack": "invokeai",
"label": "",
"notes": "",
@@ -57,6 +56,10 @@
"clip": {
"name": "clip",
"label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
}
},
"isOpen": true,
@@ -65,122 +68,63 @@
},
"position": {
"x": 1250,
- "y": 1500
+ "y": 1200
}
},
{
- "id": "d8ace142-c05f-4f1d-8982-88dc7473958d",
+ "id": "5ca498a4-c8c8-4580-a396-0c984317205d",
"type": "invocation",
"data": {
- "id": "d8ace142-c05f-4f1d-8982-88dc7473958d",
- "version": "1.0.2",
+ "id": "5ca498a4-c8c8-4580-a396-0c984317205d",
+ "version": "1.1.0",
"nodePack": "invokeai",
"label": "",
"notes": "",
- "type": "main_model_loader",
+ "type": "i2l",
"inputs": {
- "model": {
- "name": "model",
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
"label": "",
- "value": {
- "key": "5cd43ca0-dd0a-418d-9f7e-35b2b9d5e106",
- "hash": "blake3:6987f323017f597213cc3264250edf57056d21a40a0a85d83a1a33a7d44dc41a",
- "name": "Deliberate_v5",
- "base": "sd-1",
- "type": "main"
- }
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 700,
- "y": 1375
- }
- },
- {
- "id": "771bdf6a-0813-4099-a5d8-921a138754d4",
- "type": "invocation",
- "data": {
- "id": "771bdf6a-0813-4099-a5d8-921a138754d4",
- "version": "1.0.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "image",
- "inputs": {
- "image": {
- "name": "image",
- "label": "Image To Upscale",
- "value": {
- "image_name": "d2e42ba6-d420-496b-82db-91c9b75956c1.png"
- }
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 344.5593065887157,
- "y": 1698.161491368619
- }
- },
- {
- "id": "f7564dd2-9539-47f2-ac13-190804461f4e",
- "type": "invocation",
- "data": {
- "id": "f7564dd2-9539-47f2-ac13-190804461f4e",
- "version": "1.3.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "esrgan",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "image": {
- "name": "image",
- "label": ""
- },
- "model_name": {
- "name": "model_name",
- "label": "Upscaler Model",
- "value": "RealESRGAN_x2plus.pth"
+ "value": false
},
"tile_size": {
"name": "tile_size",
"label": "",
- "value": 400
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": false
}
},
- "isOpen": true,
+ "isOpen": false,
"isIntermediate": true,
"useCache": true
},
"position": {
- "x": 717.3863693661265,
- "y": 1721.9215053134815
+ "x": 1650,
+ "y": 1675
}
},
{
- "id": "1d887701-df21-4966-ae6e-a7d82307d7bd",
+ "id": "3ed9b2ef-f4ec-40a7-94db-92e63b583ec0",
"type": "invocation",
"data": {
- "id": "1d887701-df21-4966-ae6e-a7d82307d7bd",
- "version": "1.3.2",
+ "id": "3ed9b2ef-f4ec-40a7-94db-92e63b583ec0",
+ "version": "1.3.0",
"nodePack": "invokeai",
"label": "",
"notes": "",
- "type": "canny_image_processor",
+ "type": "l2i",
"inputs": {
"board": {
"name": "board",
@@ -190,38 +134,37 @@
"name": "metadata",
"label": ""
},
- "image": {
- "name": "image",
+ "latents": {
+ "name": "latents",
"label": ""
},
- "detect_resolution": {
- "name": "detect_resolution",
- "label": "",
- "value": 512
+ "vae": {
+ "name": "vae",
+ "label": ""
},
- "image_resolution": {
- "name": "image_resolution",
+ "tiled": {
+ "name": "tiled",
"label": "",
- "value": 512
+ "value": false
},
- "low_threshold": {
- "name": "low_threshold",
+ "tile_size": {
+ "name": "tile_size",
"label": "",
- "value": 100
+ "value": 0
},
- "high_threshold": {
- "name": "high_threshold",
+ "fp32": {
+ "name": "fp32",
"label": "",
- "value": 200
+ "value": false
}
},
"isOpen": true,
- "isIntermediate": true,
+ "isIntermediate": false,
"useCache": true
},
"position": {
- "x": 1200,
- "y": 1900
+ "x": 2559.4751127537957,
+ "y": 1246.6000376741406
}
},
{
@@ -229,7 +172,7 @@
"type": "invocation",
"data": {
"id": "ca1d020c-89a8-4958-880a-016d28775cfa",
- "version": "1.1.1",
+ "version": "1.1.2",
"nodePack": "invokeai",
"label": "",
"notes": "",
@@ -285,6 +228,193 @@
"y": 1902.9649340196056
}
},
+ {
+ "id": "1d887701-df21-4966-ae6e-a7d82307d7bd",
+ "type": "invocation",
+ "data": {
+ "id": "1d887701-df21-4966-ae6e-a7d82307d7bd",
+ "version": "1.3.3",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "canny_image_processor",
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "detect_resolution": {
+ "name": "detect_resolution",
+ "label": "",
+ "value": 512
+ },
+ "image_resolution": {
+ "name": "image_resolution",
+ "label": "",
+ "value": 512
+ },
+ "low_threshold": {
+ "name": "low_threshold",
+ "label": "",
+ "value": 100
+ },
+ "high_threshold": {
+ "name": "high_threshold",
+ "label": "",
+ "value": 200
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 1200,
+ "y": 1900
+ }
+ },
+ {
+ "id": "d8ace142-c05f-4f1d-8982-88dc7473958d",
+ "type": "invocation",
+ "data": {
+ "id": "d8ace142-c05f-4f1d-8982-88dc7473958d",
+ "version": "1.0.3",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "main_model_loader",
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": "",
+ "value": {
+ "key": "5cd43ca0-dd0a-418d-9f7e-35b2b9d5e106",
+ "hash": "blake3:6987f323017f597213cc3264250edf57056d21a40a0a85d83a1a33a7d44dc41a",
+ "name": "Deliberate_v5",
+ "base": "sd-1",
+ "type": "main"
+ }
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 700,
+ "y": 1375
+ }
+ },
+ {
+ "id": "e8bf67fe-67de-4227-87eb-79e86afdfc74",
+ "type": "invocation",
+ "data": {
+ "id": "e8bf67fe-67de-4227-87eb-79e86afdfc74",
+ "version": "1.2.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "compel",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "",
+ "value": ""
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 1250,
+ "y": 1500
+ }
+ },
+ {
+ "id": "771bdf6a-0813-4099-a5d8-921a138754d4",
+ "type": "invocation",
+ "data": {
+ "id": "771bdf6a-0813-4099-a5d8-921a138754d4",
+ "version": "1.0.2",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "image",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": "Image To Upscale"
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 344.5593065887157,
+ "y": 1698.161491368619
+ }
+ },
+ {
+ "id": "f7564dd2-9539-47f2-ac13-190804461f4e",
+ "type": "invocation",
+ "data": {
+ "id": "f7564dd2-9539-47f2-ac13-190804461f4e",
+ "version": "1.3.2",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "esrgan",
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "model_name": {
+ "name": "model_name",
+ "label": "Upscaler Model",
+ "value": "RealESRGAN_x2plus.pth"
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 400
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 717.3863693661265,
+ "y": 1721.9215053134815
+ }
+ },
{
"id": "f50624ce-82bf-41d0-bdf7-8aab11a80d48",
"type": "invocation",
@@ -413,122 +543,6 @@
"y": 1232.6219060454753
}
},
- {
- "id": "3ed9b2ef-f4ec-40a7-94db-92e63b583ec0",
- "type": "invocation",
- "data": {
- "id": "3ed9b2ef-f4ec-40a7-94db-92e63b583ec0",
- "version": "1.2.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "l2i",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "latents": {
- "name": "latents",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": false
- }
- },
- "isOpen": true,
- "isIntermediate": false,
- "useCache": true
- },
- "position": {
- "x": 2559.4751127537957,
- "y": 1246.6000376741406
- }
- },
- {
- "id": "5ca498a4-c8c8-4580-a396-0c984317205d",
- "type": "invocation",
- "data": {
- "id": "5ca498a4-c8c8-4580-a396-0c984317205d",
- "version": "1.0.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "i2l",
- "inputs": {
- "image": {
- "name": "image",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": false
- }
- },
- "isOpen": false,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 1650,
- "y": 1675
- }
- },
- {
- "id": "63b6ab7e-5b05-4d1b-a3b1-42d8e53ce16b",
- "type": "invocation",
- "data": {
- "id": "63b6ab7e-5b05-4d1b-a3b1-42d8e53ce16b",
- "version": "1.1.1",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "compel",
- "inputs": {
- "prompt": {
- "name": "prompt",
- "label": "",
- "value": ""
- },
- "clip": {
- "name": "clip",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 1250,
- "y": 1200
- }
- },
{
"id": "eb8f6f8a-c7b1-4914-806e-045ee2717a35",
"type": "invocation",
diff --git a/invokeai/app/services/workflow_records/default_workflows/Face Detailer with IP-Adapter & Canny (See Note in Details).json b/invokeai/app/services/workflow_records/default_workflows/Face Detailer with IP-Adapter & Canny (See Note in Details).json
index 8c7dcee30c..481ba85e64 100644
--- a/invokeai/app/services/workflow_records/default_workflows/Face Detailer with IP-Adapter & Canny (See Note in Details).json
+++ b/invokeai/app/services/workflow_records/default_workflows/Face Detailer with IP-Adapter & Canny (See Note in Details).json
@@ -2,7 +2,7 @@
"name": "Face Detailer with IP-Adapter & Canny (See Note in Details)",
"author": "kosmoskatten",
"description": "A workflow to add detail to and improve faces. This workflow is most effective when used with a model that creates realistic outputs. ",
- "version": "2.0.0",
+ "version": "2.1.0",
"contact": "invoke@invoke.ai",
"tags": "face detailer, IP-Adapter, Canny",
"notes": "Set this image as the blur mask: https://i.imgur.com/Gxi61zP.png",
@@ -37,16 +37,219 @@
}
],
"meta": {
- "category": "default",
- "version": "3.0.0"
+ "version": "3.0.0",
+ "category": "default"
},
"nodes": [
{
- "id": "44f2c190-eb03-460d-8d11-a94d13b33f19",
+ "id": "c6359181-6479-40ec-bf3a-b7e8451683b8",
"type": "invocation",
"data": {
- "id": "44f2c190-eb03-460d-8d11-a94d13b33f19",
- "version": "1.1.1",
+ "id": "c6359181-6479-40ec-bf3a-b7e8451683b8",
+ "version": "1.0.3",
+ "label": "",
+ "notes": "",
+ "type": "main_model_loader",
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 2031.5518710051792,
+ "y": -492.1742944307074
+ }
+ },
+ {
+ "id": "8fe598c6-d447-44fa-a165-4975af77d080",
+ "type": "invocation",
+ "data": {
+ "id": "8fe598c6-d447-44fa-a165-4975af77d080",
+ "version": "1.3.3",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "canny_image_processor",
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "detect_resolution": {
+ "name": "detect_resolution",
+ "label": "",
+ "value": 512
+ },
+ "image_resolution": {
+ "name": "image_resolution",
+ "label": "",
+ "value": 512
+ },
+ "low_threshold": {
+ "name": "low_threshold",
+ "label": "",
+ "value": 100
+ },
+ "high_threshold": {
+ "name": "high_threshold",
+ "label": "",
+ "value": 200
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 3519.4131037388597,
+ "y": 576.7946795840575
+ }
+ },
+ {
+ "id": "f60b6161-8f26-42f6-89ff-545e6011e501",
+ "type": "invocation",
+ "data": {
+ "id": "f60b6161-8f26-42f6-89ff-545e6011e501",
+ "version": "1.1.2",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "controlnet",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "control_model": {
+ "name": "control_model",
+ "label": "Control Model (select canny)",
+ "value": {
+ "key": "5bdaacf7-a7a3-4fb8-b394-cc0ffbb8941d",
+ "hash": "blake3:260c7f8e10aefea9868cfc68d89970e91033bd37132b14b903e70ee05ebf530e",
+ "name": "sd-controlnet-canny",
+ "base": "sd-1",
+ "type": "controlnet"
+ }
+ },
+ "control_weight": {
+ "name": "control_weight",
+ "label": "",
+ "value": 0.5
+ },
+ "begin_step_percent": {
+ "name": "begin_step_percent",
+ "label": "",
+ "value": 0
+ },
+ "end_step_percent": {
+ "name": "end_step_percent",
+ "label": "",
+ "value": 0.5
+ },
+ "control_mode": {
+ "name": "control_mode",
+ "label": "",
+ "value": "balanced"
+ },
+ "resize_mode": {
+ "name": "resize_mode",
+ "label": "",
+ "value": "just_resize"
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 3950,
+ "y": 150
+ }
+ },
+ {
+ "id": "22b750db-b85e-486b-b278-ac983e329813",
+ "type": "invocation",
+ "data": {
+ "id": "22b750db-b85e-486b-b278-ac983e329813",
+ "version": "1.4.1",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "ip_adapter",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "ip_adapter_model": {
+ "name": "ip_adapter_model",
+ "label": "IP-Adapter Model (select IP Adapter Face)",
+ "value": {
+ "key": "1cc210bb-4d0a-4312-b36c-b5d46c43768e",
+ "hash": "blake3:3d669dffa7471b357b4df088b99ffb6bf4d4383d5e0ef1de5ec1c89728a3d5a5",
+ "name": "ip_adapter_sd15",
+ "base": "sd-1",
+ "type": "ip_adapter"
+ }
+ },
+ "clip_vision_model": {
+ "name": "clip_vision_model",
+ "label": "",
+ "value": "ViT-H"
+ },
+ "weight": {
+ "name": "weight",
+ "label": "",
+ "value": 0.5
+ },
+ "method": {
+ "name": "method",
+ "label": "",
+ "value": "full"
+ },
+ "begin_step_percent": {
+ "name": "begin_step_percent",
+ "label": "",
+ "value": 0
+ },
+ "end_step_percent": {
+ "name": "end_step_percent",
+ "label": "",
+ "value": 0.8
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 3575,
+ "y": -200
+ }
+ },
+ {
+ "id": "f4d15b64-c4a6-42a5-90fc-e4ed07a0ca65",
+ "type": "invocation",
+ "data": {
+ "id": "f4d15b64-c4a6-42a5-90fc-e4ed07a0ca65",
+ "version": "1.2.0",
"nodePack": "invokeai",
"label": "",
"notes": "",
@@ -60,6 +263,140 @@
"clip": {
"name": "clip",
"label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 2550,
+ "y": -525
+ }
+ },
+ {
+ "id": "2224ed72-2453-4252-bd89-3085240e0b6f",
+ "type": "invocation",
+ "data": {
+ "id": "2224ed72-2453-4252-bd89-3085240e0b6f",
+ "version": "1.3.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "l2i",
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "latents": {
+ "name": "latents",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": true
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": false,
+ "useCache": true
+ },
+ "position": {
+ "x": 4980.1395106966565,
+ "y": -255.9158921745602
+ }
+ },
+ {
+ "id": "de8b1a48-a2e4-42ca-90bb-66058bffd534",
+ "type": "invocation",
+ "data": {
+ "id": "de8b1a48-a2e4-42ca-90bb-66058bffd534",
+ "version": "1.1.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "i2l",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": true
+ }
+ },
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 3100,
+ "y": -275
+ }
+ },
+ {
+ "id": "44f2c190-eb03-460d-8d11-a94d13b33f19",
+ "type": "invocation",
+ "data": {
+ "id": "44f2c190-eb03-460d-8d11-a94d13b33f19",
+ "version": "1.2.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "compel",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "",
+ "value": ""
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
}
},
"isOpen": true,
@@ -251,45 +588,6 @@
"y": 0
}
},
- {
- "id": "de8b1a48-a2e4-42ca-90bb-66058bffd534",
- "type": "invocation",
- "data": {
- "id": "de8b1a48-a2e4-42ca-90bb-66058bffd534",
- "version": "1.0.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "i2l",
- "inputs": {
- "image": {
- "name": "image",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": true
- }
- },
- "isOpen": false,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 3100,
- "y": -275
- }
- },
{
"id": "bd06261d-a74a-4d1f-8374-745ed6194bc2",
"type": "invocation",
@@ -418,53 +716,6 @@
"y": -175
}
},
- {
- "id": "2224ed72-2453-4252-bd89-3085240e0b6f",
- "type": "invocation",
- "data": {
- "id": "2224ed72-2453-4252-bd89-3085240e0b6f",
- "version": "1.2.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "l2i",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "latents": {
- "name": "latents",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": true
- }
- },
- "isOpen": true,
- "isIntermediate": false,
- "useCache": true
- },
- "position": {
- "x": 4980.1395106966565,
- "y": -255.9158921745602
- }
- },
{
"id": "2974e5b3-3d41-4b6f-9953-cd21e8f3a323",
"type": "invocation",
@@ -692,201 +943,6 @@
"y": -275
}
},
- {
- "id": "f4d15b64-c4a6-42a5-90fc-e4ed07a0ca65",
- "type": "invocation",
- "data": {
- "id": "f4d15b64-c4a6-42a5-90fc-e4ed07a0ca65",
- "version": "1.1.1",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "compel",
- "inputs": {
- "prompt": {
- "name": "prompt",
- "label": "",
- "value": ""
- },
- "clip": {
- "name": "clip",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 2550,
- "y": -525
- }
- },
- {
- "id": "22b750db-b85e-486b-b278-ac983e329813",
- "type": "invocation",
- "data": {
- "id": "22b750db-b85e-486b-b278-ac983e329813",
- "version": "1.2.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "ip_adapter",
- "inputs": {
- "image": {
- "name": "image",
- "label": ""
- },
- "ip_adapter_model": {
- "name": "ip_adapter_model",
- "label": "IP-Adapter Model (select IP Adapter Face)",
- "value": {
- "key": "1cc210bb-4d0a-4312-b36c-b5d46c43768e",
- "hash": "blake3:3d669dffa7471b357b4df088b99ffb6bf4d4383d5e0ef1de5ec1c89728a3d5a5",
- "name": "ip_adapter_sd15",
- "base": "sd-1",
- "type": "ip_adapter"
- }
- },
- "weight": {
- "name": "weight",
- "label": "",
- "value": 0.5
- },
- "begin_step_percent": {
- "name": "begin_step_percent",
- "label": "",
- "value": 0
- },
- "end_step_percent": {
- "name": "end_step_percent",
- "label": "",
- "value": 0.8
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 3575,
- "y": -200
- }
- },
- {
- "id": "f60b6161-8f26-42f6-89ff-545e6011e501",
- "type": "invocation",
- "data": {
- "id": "f60b6161-8f26-42f6-89ff-545e6011e501",
- "version": "1.1.1",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "controlnet",
- "inputs": {
- "image": {
- "name": "image",
- "label": ""
- },
- "control_model": {
- "name": "control_model",
- "label": "Control Model (select canny)",
- "value": {
- "key": "5bdaacf7-a7a3-4fb8-b394-cc0ffbb8941d",
- "hash": "blake3:260c7f8e10aefea9868cfc68d89970e91033bd37132b14b903e70ee05ebf530e",
- "name": "sd-controlnet-canny",
- "base": "sd-1",
- "type": "controlnet"
- }
- },
- "control_weight": {
- "name": "control_weight",
- "label": "",
- "value": 0.5
- },
- "begin_step_percent": {
- "name": "begin_step_percent",
- "label": "",
- "value": 0
- },
- "end_step_percent": {
- "name": "end_step_percent",
- "label": "",
- "value": 0.5
- },
- "control_mode": {
- "name": "control_mode",
- "label": "",
- "value": "balanced"
- },
- "resize_mode": {
- "name": "resize_mode",
- "label": "",
- "value": "just_resize"
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 3950,
- "y": 150
- }
- },
- {
- "id": "8fe598c6-d447-44fa-a165-4975af77d080",
- "type": "invocation",
- "data": {
- "id": "8fe598c6-d447-44fa-a165-4975af77d080",
- "version": "1.3.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "canny_image_processor",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "image": {
- "name": "image",
- "label": ""
- },
- "detect_resolution": {
- "name": "detect_resolution",
- "label": "",
- "value": 512
- },
- "image_resolution": {
- "name": "image_resolution",
- "label": "",
- "value": 512
- },
- "low_threshold": {
- "name": "low_threshold",
- "label": "",
- "value": 100
- },
- "high_threshold": {
- "name": "high_threshold",
- "label": "",
- "value": 200
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 3519.4131037388597,
- "y": 576.7946795840575
- }
- },
{
"id": "4bd4ae80-567f-4366-b8c6-3bb06f4fb46a",
"type": "invocation",
@@ -1035,30 +1091,6 @@
"x": 2578.2364832140506,
"y": 78.7948456497351
}
- },
- {
- "id": "c6359181-6479-40ec-bf3a-b7e8451683b8",
- "type": "invocation",
- "data": {
- "id": "c6359181-6479-40ec-bf3a-b7e8451683b8",
- "version": "1.0.2",
- "label": "",
- "notes": "",
- "type": "main_model_loader",
- "inputs": {
- "model": {
- "name": "model",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 2031.5518710051792,
- "y": -492.1742944307074
- }
}
],
"edges": [
diff --git a/invokeai/app/services/workflow_records/default_workflows/Multi ControlNet (Canny & Depth).json b/invokeai/app/services/workflow_records/default_workflows/Multi ControlNet (Canny & Depth).json
index d859094216..3ff99b5eb3 100644
--- a/invokeai/app/services/workflow_records/default_workflows/Multi ControlNet (Canny & Depth).json
+++ b/invokeai/app/services/workflow_records/default_workflows/Multi ControlNet (Canny & Depth).json
@@ -2,7 +2,7 @@
"name": "Multi ControlNet (Canny & Depth)",
"author": "InvokeAI",
"description": "A sample workflow using canny & depth ControlNets to guide the generation process. ",
- "version": "2.0.0",
+ "version": "2.1.0",
"contact": "invoke@invoke.ai",
"tags": "ControlNet, canny, depth",
"notes": "",
@@ -37,140 +37,104 @@
}
],
"meta": {
- "category": "default",
- "version": "3.0.0"
+ "version": "3.0.0",
+ "category": "default"
},
"nodes": [
{
- "id": "8e860e51-5045-456e-bf04-9a62a2a5c49e",
+ "id": "9db25398-c869-4a63-8815-c6559341ef12",
"type": "invocation",
"data": {
- "id": "8e860e51-5045-456e-bf04-9a62a2a5c49e",
- "version": "1.0.2",
+ "id": "9db25398-c869-4a63-8815-c6559341ef12",
+ "version": "1.3.0",
"nodePack": "invokeai",
"label": "",
"notes": "",
- "type": "image",
+ "type": "l2i",
"inputs": {
- "image": {
- "name": "image",
- "label": "Depth Input Image"
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 3666.135718057363,
- "y": 186.66887319822808
- }
- },
- {
- "id": "a33199c2-8340-401e-b8a2-42ffa875fc1c",
- "type": "invocation",
- "data": {
- "id": "a33199c2-8340-401e-b8a2-42ffa875fc1c",
- "version": "1.1.1",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "controlnet",
- "inputs": {
- "image": {
- "name": "image",
+ "board": {
+ "name": "board",
"label": ""
},
- "control_model": {
- "name": "control_model",
- "label": "Control Model (select depth)",
- "value": {
- "key": "87e8855c-671f-4c9e-bbbb-8ed47ccb4aac",
- "hash": "blake3:2550bf22a53942dfa28ab2fed9d10d80851112531f44d977168992edf9d0534c",
- "name": "control_v11f1p_sd15_depth",
- "base": "sd-1",
- "type": "controlnet"
- }
+ "metadata": {
+ "name": "metadata",
+ "label": ""
},
- "control_weight": {
- "name": "control_weight",
+ "latents": {
+ "name": "latents",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
"label": "",
- "value": 1
+ "value": false
},
- "begin_step_percent": {
- "name": "begin_step_percent",
+ "tile_size": {
+ "name": "tile_size",
"label": "",
"value": 0
},
- "end_step_percent": {
- "name": "end_step_percent",
+ "fp32": {
+ "name": "fp32",
"label": "",
- "value": 1
- },
- "control_mode": {
- "name": "control_mode",
- "label": "",
- "value": "balanced"
- },
- "resize_mode": {
- "name": "resize_mode",
- "label": "",
- "value": "just_resize"
+ "value": false
}
},
"isOpen": true,
- "isIntermediate": true,
+ "isIntermediate": false,
"useCache": true
},
"position": {
- "x": 4477.604342844504,
- "y": -49.39005411272677
- }
- },
- {
- "id": "273e3f96-49ea-4dc5-9d5b-9660390f14e1",
- "type": "invocation",
- "data": {
- "id": "273e3f96-49ea-4dc5-9d5b-9660390f14e1",
- "version": "1.1.1",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "compel",
- "inputs": {
- "prompt": {
- "name": "prompt",
- "label": "Negative Prompt",
- "value": ""
- },
- "clip": {
- "name": "clip",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 4075,
+ "x": 5675,
"y": -825
}
},
{
- "id": "54486974-835b-4d81-8f82-05f9f32ce9e9",
+ "id": "c826ba5e-9676-4475-b260-07b85e88753c",
"type": "invocation",
"data": {
- "id": "54486974-835b-4d81-8f82-05f9f32ce9e9",
- "version": "1.0.2",
+ "id": "c826ba5e-9676-4475-b260-07b85e88753c",
+ "version": "1.3.3",
"nodePack": "invokeai",
"label": "",
"notes": "",
- "type": "main_model_loader",
+ "type": "canny_image_processor",
"inputs": {
- "model": {
- "name": "model",
+ "board": {
+ "name": "board",
"label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "detect_resolution": {
+ "name": "detect_resolution",
+ "label": "",
+ "value": 512
+ },
+ "image_resolution": {
+ "name": "image_resolution",
+ "label": "",
+ "value": 512
+ },
+ "low_threshold": {
+ "name": "low_threshold",
+ "label": "",
+ "value": 100
+ },
+ "high_threshold": {
+ "name": "high_threshold",
+ "label": "",
+ "value": 200
}
},
"isOpen": true,
@@ -178,29 +142,52 @@
"useCache": true
},
"position": {
- "x": 3600,
- "y": -1000
+ "x": 4095.757337055795,
+ "y": -455.63440891935863
}
},
{
- "id": "7ce68934-3419-42d4-ac70-82cfc9397306",
+ "id": "018b1214-c2af-43a7-9910-fb687c6726d7",
"type": "invocation",
"data": {
- "id": "7ce68934-3419-42d4-ac70-82cfc9397306",
- "version": "1.1.1",
+ "id": "018b1214-c2af-43a7-9910-fb687c6726d7",
+ "version": "1.2.4",
"nodePack": "invokeai",
"label": "",
"notes": "",
- "type": "compel",
+ "type": "midas_depth_image_processor",
"inputs": {
- "prompt": {
- "name": "prompt",
- "label": "Positive Prompt",
- "value": ""
- },
- "clip": {
- "name": "clip",
+ "board": {
+ "name": "board",
"label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "a_mult": {
+ "name": "a_mult",
+ "label": "",
+ "value": 2
+ },
+ "bg_th": {
+ "name": "bg_th",
+ "label": "",
+ "value": 0.1
+ },
+ "detect_resolution": {
+ "name": "detect_resolution",
+ "label": "",
+ "value": 512
+ },
+ "image_resolution": {
+ "name": "image_resolution",
+ "label": "",
+ "value": 512
}
},
"isOpen": true,
@@ -208,8 +195,8 @@
"useCache": true
},
"position": {
- "x": 4075,
- "y": -1125
+ "x": 4082.783145980783,
+ "y": 0.01629251229994111
}
},
{
@@ -217,7 +204,7 @@
"type": "invocation",
"data": {
"id": "d204d184-f209-4fae-a0a1-d152800844e1",
- "version": "1.1.1",
+ "version": "1.1.2",
"nodePack": "invokeai",
"label": "",
"notes": "",
@@ -273,6 +260,185 @@
"y": -618.4221638099414
}
},
+ {
+ "id": "7ce68934-3419-42d4-ac70-82cfc9397306",
+ "type": "invocation",
+ "data": {
+ "id": "7ce68934-3419-42d4-ac70-82cfc9397306",
+ "version": "1.2.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "compel",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "Positive Prompt",
+ "value": ""
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 4075,
+ "y": -1125
+ }
+ },
+ {
+ "id": "54486974-835b-4d81-8f82-05f9f32ce9e9",
+ "type": "invocation",
+ "data": {
+ "id": "54486974-835b-4d81-8f82-05f9f32ce9e9",
+ "version": "1.0.3",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "main_model_loader",
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 3600,
+ "y": -1000
+ }
+ },
+ {
+ "id": "273e3f96-49ea-4dc5-9d5b-9660390f14e1",
+ "type": "invocation",
+ "data": {
+ "id": "273e3f96-49ea-4dc5-9d5b-9660390f14e1",
+ "version": "1.2.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "compel",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "Negative Prompt",
+ "value": ""
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 4075,
+ "y": -825
+ }
+ },
+ {
+ "id": "a33199c2-8340-401e-b8a2-42ffa875fc1c",
+ "type": "invocation",
+ "data": {
+ "id": "a33199c2-8340-401e-b8a2-42ffa875fc1c",
+ "version": "1.1.2",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "controlnet",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "control_model": {
+ "name": "control_model",
+ "label": "Control Model (select depth)",
+ "value": {
+ "key": "87e8855c-671f-4c9e-bbbb-8ed47ccb4aac",
+ "hash": "blake3:2550bf22a53942dfa28ab2fed9d10d80851112531f44d977168992edf9d0534c",
+ "name": "control_v11f1p_sd15_depth",
+ "base": "sd-1",
+ "type": "controlnet"
+ }
+ },
+ "control_weight": {
+ "name": "control_weight",
+ "label": "",
+ "value": 1
+ },
+ "begin_step_percent": {
+ "name": "begin_step_percent",
+ "label": "",
+ "value": 0
+ },
+ "end_step_percent": {
+ "name": "end_step_percent",
+ "label": "",
+ "value": 1
+ },
+ "control_mode": {
+ "name": "control_mode",
+ "label": "",
+ "value": "balanced"
+ },
+ "resize_mode": {
+ "name": "resize_mode",
+ "label": "",
+ "value": "just_resize"
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 4477.604342844504,
+ "y": -49.39005411272677
+ }
+ },
+ {
+ "id": "8e860e51-5045-456e-bf04-9a62a2a5c49e",
+ "type": "invocation",
+ "data": {
+ "id": "8e860e51-5045-456e-bf04-9a62a2a5c49e",
+ "version": "1.0.2",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "image",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": "Depth Input Image"
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 3666.135718057363,
+ "y": 186.66887319822808
+ }
+ },
{
"id": "c4b23e64-7986-40c4-9cad-46327b12e204",
"type": "invocation",
@@ -322,159 +488,6 @@
"y": -575
}
},
- {
- "id": "018b1214-c2af-43a7-9910-fb687c6726d7",
- "type": "invocation",
- "data": {
- "id": "018b1214-c2af-43a7-9910-fb687c6726d7",
- "version": "1.2.3",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "midas_depth_image_processor",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "image": {
- "name": "image",
- "label": ""
- },
- "a_mult": {
- "name": "a_mult",
- "label": "",
- "value": 2
- },
- "bg_th": {
- "name": "bg_th",
- "label": "",
- "value": 0.1
- },
- "detect_resolution": {
- "name": "detect_resolution",
- "label": "",
- "value": 512
- },
- "image_resolution": {
- "name": "image_resolution",
- "label": "",
- "value": 512
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 4082.783145980783,
- "y": 0.01629251229994111
- }
- },
- {
- "id": "c826ba5e-9676-4475-b260-07b85e88753c",
- "type": "invocation",
- "data": {
- "id": "c826ba5e-9676-4475-b260-07b85e88753c",
- "version": "1.3.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "canny_image_processor",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "image": {
- "name": "image",
- "label": ""
- },
- "detect_resolution": {
- "name": "detect_resolution",
- "label": "",
- "value": 512
- },
- "image_resolution": {
- "name": "image_resolution",
- "label": "",
- "value": 512
- },
- "low_threshold": {
- "name": "low_threshold",
- "label": "",
- "value": 100
- },
- "high_threshold": {
- "name": "high_threshold",
- "label": "",
- "value": 200
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 4095.757337055795,
- "y": -455.63440891935863
- }
- },
- {
- "id": "9db25398-c869-4a63-8815-c6559341ef12",
- "type": "invocation",
- "data": {
- "id": "9db25398-c869-4a63-8815-c6559341ef12",
- "version": "1.2.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "l2i",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "latents": {
- "name": "latents",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": false
- }
- },
- "isOpen": true,
- "isIntermediate": false,
- "useCache": true
- },
- "position": {
- "x": 5675,
- "y": -825
- }
- },
{
"id": "ac481b7f-08bf-4a9d-9e0c-3a82ea5243ce",
"type": "invocation",
diff --git a/invokeai/app/services/workflow_records/default_workflows/MultiDiffusion SDXL (Beta).json b/invokeai/app/services/workflow_records/default_workflows/MultiDiffusion SDXL (Beta).json
new file mode 100644
index 0000000000..b2842315c4
--- /dev/null
+++ b/invokeai/app/services/workflow_records/default_workflows/MultiDiffusion SDXL (Beta).json
@@ -0,0 +1,2181 @@
+{
+ "name": "MultiDiffusion SDXL (Beta)",
+ "author": "Invoke",
+ "description": "A workflow to upscale an input image with tiled upscaling, using SDXL based models.",
+ "version": "1.0.0",
+ "contact": "invoke@invoke.ai",
+ "tags": "tiled, upscaling, sdxl",
+ "notes": "",
+ "exposedFields": [
+ {
+ "nodeId": "1ba845a6-eb88-49a1-a490-5fe6754f3ec9",
+ "fieldName": "value"
+ },
+ {
+ "nodeId": "c3b60a50-8039-4924-90e3-8c608e1fecb5",
+ "fieldName": "board"
+ },
+ {
+ "nodeId": "5ca87ace-edf9-49c7-a424-cd42416b86a7",
+ "fieldName": "image"
+ },
+ {
+ "nodeId": "1dd915a3-6756-48ed-b68b-ee3b4bd06c1d",
+ "fieldName": "a"
+ },
+ {
+ "nodeId": "696de0e1-cdd2-42e8-abeb-57a926bc6df6",
+ "fieldName": "a"
+ },
+ {
+ "nodeId": "bd094e2f-41e5-4b61-9f7b-56cf337d53fa",
+ "fieldName": "a"
+ },
+ {
+ "nodeId": "e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fd",
+ "fieldName": "model"
+ },
+ {
+ "nodeId": "f0cd0d2f-9614-43f7-9944-a75b8d5ccd65",
+ "fieldName": "model_name"
+ },
+ {
+ "nodeId": "c26bff37-4f12-482f-ba45-3a5d729b4c4f",
+ "fieldName": "value"
+ },
+ {
+ "nodeId": "f5ca24ee-21c5-4c8c-8d3c-371b5079b086",
+ "fieldName": "value"
+ },
+ {
+ "nodeId": "094bc4ed-5c68-4342-84f4-51056c755796",
+ "fieldName": "value"
+ },
+ {
+ "nodeId": "100b3143-b3fb-4ff3-bb3c-8d4d3f89ae3a",
+ "fieldName": "vae_model"
+ },
+ {
+ "nodeId": "f936ebb3-6902-4df9-a775-6a68bac2da70",
+ "fieldName": "model"
+ }
+ ],
+ "meta": {
+ "version": "3.0.0",
+ "category": "default"
+ },
+ "nodes": [
+ {
+ "id": "f936ebb3-6902-4df9-a775-6a68bac2da70",
+ "type": "invocation",
+ "data": {
+ "id": "f936ebb3-6902-4df9-a775-6a68bac2da70",
+ "type": "model_identifier",
+ "version": "1.0.0",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": "ControlNet Model - choose xinsir's tile ControlNet",
+ "value": {
+ "key": "845b6959-1657-4164-be33-fe0f63ad1752",
+ "hash": "random:3b602344599a53b4e4c80a2259362e122543e6f9e8e428be76ab910f9368704b",
+ "name": "controlnet-tile-sdxl-1.0",
+ "base": "sdxl",
+ "type": "controlnet"
+ }
+ }
+ }
+ },
+ "position": {
+ "x": -3983.6167650620723,
+ "y": -1329.1431151846386
+ }
+ },
+ {
+ "id": "00239057-20d4-4cd2-a010-28727b256ea2",
+ "type": "invocation",
+ "data": {
+ "id": "00239057-20d4-4cd2-a010-28727b256ea2",
+ "type": "rand_int",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": false,
+ "inputs": {
+ "low": {
+ "name": "low",
+ "label": "",
+ "value": 0
+ },
+ "high": {
+ "name": "high",
+ "label": "",
+ "value": 2147483647
+ }
+ }
+ },
+ "position": {
+ "x": -4000,
+ "y": -1800
+ }
+ },
+ {
+ "id": "094bc4ed-5c68-4342-84f4-51056c755796",
+ "type": "invocation",
+ "data": {
+ "id": "094bc4ed-5c68-4342-84f4-51056c755796",
+ "type": "boolean",
+ "version": "1.0.1",
+ "label": "Tiled Option",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "value": {
+ "name": "value",
+ "label": "Tiled VAE (Saves VRAM, Color Inconsistency)",
+ "value": false
+ }
+ }
+ },
+ "position": {
+ "x": -2746.0467136971292,
+ "y": -2219.070070545694
+ }
+ },
+ {
+ "id": "f5ca24ee-21c5-4c8c-8d3c-371b5079b086",
+ "type": "invocation",
+ "data": {
+ "id": "f5ca24ee-21c5-4c8c-8d3c-371b5079b086",
+ "type": "string",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "value": {
+ "name": "value",
+ "label": "Negative Prompt (Optional)",
+ "value": ""
+ }
+ }
+ },
+ "position": {
+ "x": -3525,
+ "y": -2525
+ }
+ },
+ {
+ "id": "c26bff37-4f12-482f-ba45-3a5d729b4c4f",
+ "type": "invocation",
+ "data": {
+ "id": "c26bff37-4f12-482f-ba45-3a5d729b4c4f",
+ "type": "string",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "value": {
+ "name": "value",
+ "label": "Positive Prompt (Optional)",
+ "value": ""
+ }
+ }
+ },
+ "position": {
+ "x": -3525,
+ "y": -2825
+ }
+ },
+ {
+ "id": "6daa9526-382b-491d-964f-f53fc308664f",
+ "type": "invocation",
+ "data": {
+ "id": "6daa9526-382b-491d-964f-f53fc308664f",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "ADD"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 0.35
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 100
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -1450
+ }
+ },
+ {
+ "id": "f1afd295-860f-48b6-a76a-90609bf2cc11",
+ "type": "invocation",
+ "data": {
+ "id": "f1afd295-860f-48b6-a76a-90609bf2cc11",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "MUL"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 1
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 0.013
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -1550
+ }
+ },
+ {
+ "id": "88ae723e-4933-4371-b52d-3ada52a59d36",
+ "type": "invocation",
+ "data": {
+ "id": "88ae723e-4933-4371-b52d-3ada52a59d36",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "ADD"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 0
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 100
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -1500
+ }
+ },
+ {
+ "id": "1dd915a3-6756-48ed-b68b-ee3b4bd06c1d",
+ "type": "invocation",
+ "data": {
+ "id": "1dd915a3-6756-48ed-b68b-ee3b4bd06c1d",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "Creativity Input",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "MUL"
+ },
+ "a": {
+ "name": "a",
+ "label": "Creativity Control (-10 to 10)",
+ "value": 5
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": -1
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -2125
+ }
+ },
+ {
+ "id": "c8f5c671-8c87-4d96-a75e-a9937ac6bc03",
+ "type": "invocation",
+ "data": {
+ "id": "c8f5c671-8c87-4d96-a75e-a9937ac6bc03",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "DIV"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 1
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 100
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -1975
+ }
+ },
+ {
+ "id": "14e65dbe-4249-4b25-9a63-3a10cfaeb61c",
+ "type": "invocation",
+ "data": {
+ "id": "14e65dbe-4249-4b25-9a63-3a10cfaeb61c",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "ADD"
+ },
+ "a": {
+ "name": "a",
+ "label": "A",
+ "value": 0
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 10
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -2075
+ }
+ },
+ {
+ "id": "49a8cc12-aa19-48c5-b6b3-04e0b603b384",
+ "type": "invocation",
+ "data": {
+ "id": "49a8cc12-aa19-48c5-b6b3-04e0b603b384",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "MUL"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 1
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 4.99
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -2025
+ }
+ },
+ {
+ "id": "e4d5ca7c-8fcf-4c59-9c58-67194c80dc73",
+ "type": "invocation",
+ "data": {
+ "id": "e4d5ca7c-8fcf-4c59-9c58-67194c80dc73",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "ADD"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 0
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 1
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -1925
+ }
+ },
+ {
+ "id": "696de0e1-cdd2-42e8-abeb-57a926bc6df6",
+ "type": "invocation",
+ "data": {
+ "id": "696de0e1-cdd2-42e8-abeb-57a926bc6df6",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "Sharpness Input",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "ADD"
+ },
+ "a": {
+ "name": "a",
+ "label": "Sharpness Control (-10 to 10)",
+ "value": 0
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 10
+ }
+ }
+ },
+ "position": {
+ "x": -4750,
+ "y": -2275
+ }
+ },
+ {
+ "id": "79390b60-4077-4f94-ad0a-4229cc73ddb2",
+ "type": "invocation",
+ "data": {
+ "id": "79390b60-4077-4f94-ad0a-4229cc73ddb2",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "MUL"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 1
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 3.75
+ }
+ }
+ },
+ "position": {
+ "x": -4750,
+ "y": -2000
+ }
+ },
+ {
+ "id": "4950132a-2d06-4571-b2c0-55cb37a31e9b",
+ "type": "invocation",
+ "data": {
+ "id": "4950132a-2d06-4571-b2c0-55cb37a31e9b",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "ADD"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 25
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 1
+ }
+ }
+ },
+ "position": {
+ "x": -4750,
+ "y": -1950
+ }
+ },
+ {
+ "id": "bd094e2f-41e5-4b61-9f7b-56cf337d53fa",
+ "type": "invocation",
+ "data": {
+ "id": "bd094e2f-41e5-4b61-9f7b-56cf337d53fa",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "Structural Input",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "ADD"
+ },
+ "a": {
+ "name": "a",
+ "label": "Structural Control (-10 to 10)",
+ "value": 0
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 10
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -1700
+ }
+ },
+ {
+ "id": "bc53651f-208b-440c-be30-f93f72ae700e",
+ "type": "invocation",
+ "data": {
+ "id": "bc53651f-208b-440c-be30-f93f72ae700e",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "MUL"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 1
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 0.025
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -1650
+ }
+ },
+ {
+ "id": "67346654-cac0-446a-8cde-9af4b5a029a6",
+ "type": "invocation",
+ "data": {
+ "id": "67346654-cac0-446a-8cde-9af4b5a029a6",
+ "type": "float_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "ADD"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 0.3
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 1
+ }
+ }
+ },
+ "position": {
+ "x": -3500,
+ "y": -1600
+ }
+ },
+ {
+ "id": "6636a27a-f130-4a13-b3e5-50b44e4a566f",
+ "type": "invocation",
+ "data": {
+ "id": "6636a27a-f130-4a13-b3e5-50b44e4a566f",
+ "type": "collect",
+ "version": "1.0.0",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "item": {
+ "name": "item",
+ "label": ""
+ }
+ }
+ },
+ "position": {
+ "x": -3125,
+ "y": -1500
+ }
+ },
+ {
+ "id": "b78f53b6-2eae-4956-97b4-7e73768d1491",
+ "type": "invocation",
+ "data": {
+ "id": "b78f53b6-2eae-4956-97b4-7e73768d1491",
+ "type": "controlnet",
+ "version": "1.1.2",
+ "label": "ControlNet (use xinsir's tile ControlNet)",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "control_model": {
+ "name": "control_model",
+ "label": ""
+ },
+ "control_weight": {
+ "name": "control_weight",
+ "label": "",
+ "value": 0.6
+ },
+ "begin_step_percent": {
+ "name": "begin_step_percent",
+ "label": "",
+ "value": 0
+ },
+ "end_step_percent": {
+ "name": "end_step_percent",
+ "label": "",
+ "value": 0.5
+ },
+ "control_mode": {
+ "name": "control_mode",
+ "label": "",
+ "value": "balanced"
+ },
+ "resize_mode": {
+ "name": "resize_mode",
+ "label": "",
+ "value": "just_resize"
+ }
+ }
+ },
+ "position": {
+ "x": -3493.4229674963885,
+ "y": -1359.2223984776113
+ }
+ },
+ {
+ "id": "27215391-b20e-412a-b854-7fa5927f5437",
+ "type": "invocation",
+ "data": {
+ "id": "27215391-b20e-412a-b854-7fa5927f5437",
+ "type": "sdxl_compel_prompt",
+ "version": "1.2.0",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "",
+ "value": ""
+ },
+ "style": {
+ "name": "style",
+ "label": "",
+ "value": ""
+ },
+ "original_width": {
+ "name": "original_width",
+ "label": "",
+ "value": 4096
+ },
+ "original_height": {
+ "name": "original_height",
+ "label": "",
+ "value": 4096
+ },
+ "crop_top": {
+ "name": "crop_top",
+ "label": "",
+ "value": 0
+ },
+ "crop_left": {
+ "name": "crop_left",
+ "label": "",
+ "value": 0
+ },
+ "target_width": {
+ "name": "target_width",
+ "label": "",
+ "value": 1024
+ },
+ "target_height": {
+ "name": "target_height",
+ "label": "",
+ "value": 1024
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "clip2": {
+ "name": "clip2",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ }
+ },
+ "position": {
+ "x": -3525,
+ "y": -2300
+ }
+ },
+ {
+ "id": "100b3143-b3fb-4ff3-bb3c-8d4d3f89ae3a",
+ "type": "invocation",
+ "data": {
+ "id": "100b3143-b3fb-4ff3-bb3c-8d4d3f89ae3a",
+ "type": "vae_loader",
+ "version": "1.0.3",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "vae_model": {
+ "name": "vae_model",
+ "label": "",
+ "value": {
+ "key": "4bc2bddf-94d9-4efe-a8e2-5eda28710f4c",
+ "hash": "random:67e47a77a1fcef9c0f5cd5d889d71c191f07383a0bf587f1849b2bc3f359440a",
+ "name": "sdxl-vae-fp16-fix",
+ "base": "sdxl",
+ "type": "vae"
+ }
+ }
+ }
+ },
+ "position": {
+ "x": -4000,
+ "y": -2575
+ }
+ },
+ {
+ "id": "e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fd",
+ "type": "invocation",
+ "data": {
+ "id": "e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fd",
+ "type": "sdxl_model_loader",
+ "version": "1.0.3",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": "SDXL Model"
+ }
+ }
+ },
+ "position": {
+ "x": -4000,
+ "y": -2825
+ }
+ },
+ {
+ "id": "6142b69a-323f-4ecd-a7e5-67dc61349c51",
+ "type": "invocation",
+ "data": {
+ "id": "6142b69a-323f-4ecd-a7e5-67dc61349c51",
+ "type": "sdxl_compel_prompt",
+ "version": "1.2.0",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "",
+ "value": ""
+ },
+ "style": {
+ "name": "style",
+ "label": "",
+ "value": ""
+ },
+ "original_width": {
+ "name": "original_width",
+ "label": "",
+ "value": 4096
+ },
+ "original_height": {
+ "name": "original_height",
+ "label": "",
+ "value": 4096
+ },
+ "crop_top": {
+ "name": "crop_top",
+ "label": "",
+ "value": 0
+ },
+ "crop_left": {
+ "name": "crop_left",
+ "label": "",
+ "value": 0
+ },
+ "target_width": {
+ "name": "target_width",
+ "label": "",
+ "value": 1024
+ },
+ "target_height": {
+ "name": "target_height",
+ "label": "",
+ "value": 1024
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "clip2": {
+ "name": "clip2",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ }
+ },
+ "position": {
+ "x": -3525,
+ "y": -2600
+ }
+ },
+ {
+ "id": "041c59cc-f9e4-4dc9-8b31-84648c5f3ebe",
+ "type": "invocation",
+ "data": {
+ "id": "041c59cc-f9e4-4dc9-8b31-84648c5f3ebe",
+ "type": "unsharp_mask",
+ "version": "1.2.2",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "radius": {
+ "name": "radius",
+ "label": "",
+ "value": 2
+ },
+ "strength": {
+ "name": "strength",
+ "label": "",
+ "value": 50
+ }
+ }
+ },
+ "position": {
+ "x": -4400,
+ "y": -1875
+ }
+ },
+ {
+ "id": "53c2d5fd-863d-4950-93e0-628f3d61b493",
+ "type": "invocation",
+ "data": {
+ "id": "53c2d5fd-863d-4950-93e0-628f3d61b493",
+ "type": "unsharp_mask",
+ "version": "1.2.2",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "radius": {
+ "name": "radius",
+ "label": "",
+ "value": 2
+ },
+ "strength": {
+ "name": "strength",
+ "label": "",
+ "value": 50
+ }
+ }
+ },
+ "position": {
+ "x": -4750,
+ "y": -1875
+ }
+ },
+ {
+ "id": "117f982a-03da-49b1-bf9f-29711160ac02",
+ "type": "invocation",
+ "data": {
+ "id": "117f982a-03da-49b1-bf9f-29711160ac02",
+ "type": "i2l",
+ "version": "1.1.0",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": false
+ }
+ }
+ },
+ "position": {
+ "x": -4000,
+ "y": -1875
+ }
+ },
+ {
+ "id": "c3b60a50-8039-4924-90e3-8c608e1fecb5",
+ "type": "invocation",
+ "data": {
+ "id": "c3b60a50-8039-4924-90e3-8c608e1fecb5",
+ "type": "l2i",
+ "version": "1.3.0",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": false,
+ "useCache": true,
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": "Output Board"
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "latents": {
+ "name": "latents",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": false
+ }
+ }
+ },
+ "position": {
+ "x": -2750,
+ "y": -2575
+ }
+ },
+ {
+ "id": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "type": "invocation",
+ "data": {
+ "id": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "type": "tiled_multi_diffusion_denoise_latents",
+ "version": "1.0.0",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "positive_conditioning": {
+ "name": "positive_conditioning",
+ "label": ""
+ },
+ "negative_conditioning": {
+ "name": "negative_conditioning",
+ "label": ""
+ },
+ "noise": {
+ "name": "noise",
+ "label": ""
+ },
+ "latents": {
+ "name": "latents",
+ "label": ""
+ },
+ "tile_height": {
+ "name": "tile_height",
+ "label": "",
+ "value": 1024
+ },
+ "tile_width": {
+ "name": "tile_width",
+ "label": "",
+ "value": 1024
+ },
+ "tile_overlap": {
+ "name": "tile_overlap",
+ "label": "",
+ "value": 128
+ },
+ "steps": {
+ "name": "steps",
+ "label": "",
+ "value": 25
+ },
+ "cfg_scale": {
+ "name": "cfg_scale",
+ "label": "",
+ "value": 5
+ },
+ "denoising_start": {
+ "name": "denoising_start",
+ "label": "",
+ "value": 0.6
+ },
+ "denoising_end": {
+ "name": "denoising_end",
+ "label": "",
+ "value": 1
+ },
+ "scheduler": {
+ "name": "scheduler",
+ "label": "",
+ "value": "kdpm_2"
+ },
+ "unet": {
+ "name": "unet",
+ "label": ""
+ },
+ "cfg_rescale_multiplier": {
+ "name": "cfg_rescale_multiplier",
+ "label": "",
+ "value": 0
+ },
+ "control": {
+ "name": "control",
+ "label": ""
+ }
+ }
+ },
+ "position": {
+ "x": -3125,
+ "y": -2575
+ }
+ },
+ {
+ "id": "1ba845a6-eb88-49a1-a490-5fe6754f3ec9",
+ "type": "invocation",
+ "data": {
+ "id": "1ba845a6-eb88-49a1-a490-5fe6754f3ec9",
+ "type": "integer",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "value": {
+ "name": "value",
+ "label": "Scale (2x, 4x)",
+ "value": 2
+ }
+ }
+ },
+ "position": {
+ "x": -4400,
+ "y": -2175
+ }
+ },
+ {
+ "id": "d350feac-9686-4e0d-bd46-a96bd2630818",
+ "type": "invocation",
+ "data": {
+ "id": "d350feac-9686-4e0d-bd46-a96bd2630818",
+ "type": "integer_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "MUL"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 1
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 1
+ }
+ }
+ },
+ "position": {
+ "x": -4400,
+ "y": -1950
+ }
+ },
+ {
+ "id": "5b256f14-caab-40ff-b8f0-9679cd542163",
+ "type": "invocation",
+ "data": {
+ "id": "5b256f14-caab-40ff-b8f0-9679cd542163",
+ "type": "integer_math",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "operation": {
+ "name": "operation",
+ "label": "",
+ "value": "MUL"
+ },
+ "a": {
+ "name": "a",
+ "label": "",
+ "value": 1
+ },
+ "b": {
+ "name": "b",
+ "label": "",
+ "value": 1
+ }
+ }
+ },
+ "position": {
+ "x": -4400,
+ "y": -2000
+ }
+ },
+ {
+ "id": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "type": "invocation",
+ "data": {
+ "id": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "type": "img_resize",
+ "version": "1.2.2",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "width": {
+ "name": "width",
+ "label": "",
+ "value": 512
+ },
+ "height": {
+ "name": "height",
+ "label": "",
+ "value": 512
+ },
+ "resample_mode": {
+ "name": "resample_mode",
+ "label": "",
+ "value": "lanczos"
+ }
+ }
+ },
+ "position": {
+ "x": -4375,
+ "y": -1825
+ }
+ },
+ {
+ "id": "be4082d6-e238-40ea-a9df-fc0d725e8895",
+ "type": "invocation",
+ "data": {
+ "id": "be4082d6-e238-40ea-a9df-fc0d725e8895",
+ "type": "controlnet",
+ "version": "1.1.2",
+ "label": "ControlNet (use xinsir's tile ControlNet)",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "control_model": {
+ "name": "control_model",
+ "label": ""
+ },
+ "control_weight": {
+ "name": "control_weight",
+ "label": "",
+ "value": 0.25
+ },
+ "begin_step_percent": {
+ "name": "begin_step_percent",
+ "label": "",
+ "value": 0.5
+ },
+ "end_step_percent": {
+ "name": "end_step_percent",
+ "label": "",
+ "value": 0.8
+ },
+ "control_mode": {
+ "name": "control_mode",
+ "label": "Control Mode",
+ "value": "balanced"
+ },
+ "resize_mode": {
+ "name": "resize_mode",
+ "label": "",
+ "value": "just_resize"
+ }
+ }
+ },
+ "position": {
+ "x": -3131.577032503611,
+ "y": -1392.1075609956667
+ }
+ },
+ {
+ "id": "8923451b-5a27-4395-b7f2-dce875fca6f5",
+ "type": "invocation",
+ "data": {
+ "id": "8923451b-5a27-4395-b7f2-dce875fca6f5",
+ "type": "noise",
+ "version": "1.0.2",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "seed": {
+ "name": "seed",
+ "label": "",
+ "value": 3
+ },
+ "width": {
+ "name": "width",
+ "label": "",
+ "value": 512
+ },
+ "height": {
+ "name": "height",
+ "label": "",
+ "value": 512
+ },
+ "use_cpu": {
+ "name": "use_cpu",
+ "label": "",
+ "value": true
+ }
+ }
+ },
+ "position": {
+ "x": -4000,
+ "y": -1750
+ }
+ },
+ {
+ "id": "f0cd0d2f-9614-43f7-9944-a75b8d5ccd65",
+ "type": "invocation",
+ "data": {
+ "id": "f0cd0d2f-9614-43f7-9944-a75b8d5ccd65",
+ "type": "esrgan",
+ "version": "1.3.2",
+ "label": "",
+ "notes": "",
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true,
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "model_name": {
+ "name": "model_name",
+ "label": "Upscaling Model",
+ "value": "RealESRGAN_x4plus.pth"
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 500
+ }
+ }
+ },
+ "position": {
+ "x": -4750,
+ "y": -1825
+ }
+ },
+ {
+ "id": "7dbb756b-7d79-431c-a46d-d8f7b082c127",
+ "type": "invocation",
+ "data": {
+ "id": "7dbb756b-7d79-431c-a46d-d8f7b082c127",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "type": "float_to_int",
+ "inputs": {
+ "value": {
+ "name": "value",
+ "label": "",
+ "value": 0
+ },
+ "multiple": {
+ "name": "multiple",
+ "label": "",
+ "value": 8
+ },
+ "method": {
+ "name": "method",
+ "label": "",
+ "value": "Floor"
+ }
+ },
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -4000,
+ "y": -1950
+ }
+ },
+ {
+ "id": "5ca87ace-edf9-49c7-a424-cd42416b86a7",
+ "type": "invocation",
+ "data": {
+ "id": "5ca87ace-edf9-49c7-a424-cd42416b86a7",
+ "version": "1.0.2",
+ "label": "",
+ "notes": "",
+ "type": "image",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": "Image to Upscale"
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -4750,
+ "y": -2850
+ }
+ },
+ {
+ "id": "f5d9bf3b-2646-4b17-9894-20fd2b4218ea",
+ "type": "invocation",
+ "data": {
+ "id": "f5d9bf3b-2646-4b17-9894-20fd2b4218ea",
+ "version": "1.0.1",
+ "label": "",
+ "notes": "",
+ "type": "float_to_int",
+ "inputs": {
+ "value": {
+ "name": "value",
+ "label": "",
+ "value": 8
+ },
+ "multiple": {
+ "name": "multiple",
+ "label": "",
+ "value": 8
+ },
+ "method": {
+ "name": "method",
+ "label": "",
+ "value": "Floor"
+ }
+ },
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -4000,
+ "y": -2000
+ }
+ }
+ ],
+ "edges": [
+ {
+ "id": "reactflow__edge-f936ebb3-6902-4df9-a775-6a68bac2da70model-be4082d6-e238-40ea-a9df-fc0d725e8895control_model",
+ "type": "default",
+ "source": "f936ebb3-6902-4df9-a775-6a68bac2da70",
+ "target": "be4082d6-e238-40ea-a9df-fc0d725e8895",
+ "sourceHandle": "model",
+ "targetHandle": "control_model"
+ },
+ {
+ "id": "reactflow__edge-f936ebb3-6902-4df9-a775-6a68bac2da70model-b78f53b6-2eae-4956-97b4-7e73768d1491control_model",
+ "type": "default",
+ "source": "f936ebb3-6902-4df9-a775-6a68bac2da70",
+ "target": "b78f53b6-2eae-4956-97b4-7e73768d1491",
+ "sourceHandle": "model",
+ "targetHandle": "control_model"
+ },
+ {
+ "id": "49a8cc12-aa19-48c5-b6b3-04e0b603b384-c8f5c671-8c87-4d96-a75e-a9937ac6bc03-collapsed",
+ "type": "collapsed",
+ "source": "49a8cc12-aa19-48c5-b6b3-04e0b603b384",
+ "target": "c8f5c671-8c87-4d96-a75e-a9937ac6bc03"
+ },
+ {
+ "id": "14e65dbe-4249-4b25-9a63-3a10cfaeb61c-49a8cc12-aa19-48c5-b6b3-04e0b603b384-collapsed",
+ "type": "collapsed",
+ "source": "14e65dbe-4249-4b25-9a63-3a10cfaeb61c",
+ "target": "49a8cc12-aa19-48c5-b6b3-04e0b603b384"
+ },
+ {
+ "id": "1dd915a3-6756-48ed-b68b-ee3b4bd06c1d-14e65dbe-4249-4b25-9a63-3a10cfaeb61c-collapsed",
+ "type": "collapsed",
+ "source": "1dd915a3-6756-48ed-b68b-ee3b4bd06c1d",
+ "target": "14e65dbe-4249-4b25-9a63-3a10cfaeb61c"
+ },
+ {
+ "id": "reactflow__edge-00239057-20d4-4cd2-a010-28727b256ea2value-8923451b-5a27-4395-b7f2-dce875fca6f5seed",
+ "type": "default",
+ "source": "00239057-20d4-4cd2-a010-28727b256ea2",
+ "target": "8923451b-5a27-4395-b7f2-dce875fca6f5",
+ "sourceHandle": "value",
+ "targetHandle": "seed"
+ },
+ {
+ "id": "reactflow__edge-094bc4ed-5c68-4342-84f4-51056c755796value-c3b60a50-8039-4924-90e3-8c608e1fecb5tiled",
+ "type": "default",
+ "source": "094bc4ed-5c68-4342-84f4-51056c755796",
+ "target": "c3b60a50-8039-4924-90e3-8c608e1fecb5",
+ "sourceHandle": "value",
+ "targetHandle": "tiled"
+ },
+ {
+ "id": "reactflow__edge-094bc4ed-5c68-4342-84f4-51056c755796value-117f982a-03da-49b1-bf9f-29711160ac02tiled",
+ "type": "default",
+ "source": "094bc4ed-5c68-4342-84f4-51056c755796",
+ "target": "117f982a-03da-49b1-bf9f-29711160ac02",
+ "sourceHandle": "value",
+ "targetHandle": "tiled"
+ },
+ {
+ "id": "c8f5c671-8c87-4d96-a75e-a9937ac6bc03-e4d5ca7c-8fcf-4c59-9c58-67194c80dc73-collapsed",
+ "type": "collapsed",
+ "source": "c8f5c671-8c87-4d96-a75e-a9937ac6bc03",
+ "target": "e4d5ca7c-8fcf-4c59-9c58-67194c80dc73"
+ },
+ {
+ "id": "d350feac-9686-4e0d-bd46-a96bd2630818-7dbb756b-7d79-431c-a46d-d8f7b082c127-collapsed",
+ "type": "collapsed",
+ "source": "d350feac-9686-4e0d-bd46-a96bd2630818",
+ "target": "7dbb756b-7d79-431c-a46d-d8f7b082c127"
+ },
+ {
+ "id": "5b256f14-caab-40ff-b8f0-9679cd542163-f5d9bf3b-2646-4b17-9894-20fd2b4218ea-collapsed",
+ "type": "collapsed",
+ "source": "5b256f14-caab-40ff-b8f0-9679cd542163",
+ "target": "f5d9bf3b-2646-4b17-9894-20fd2b4218ea"
+ },
+ {
+ "id": "4950132a-2d06-4571-b2c0-55cb37a31e9b-041c59cc-f9e4-4dc9-8b31-84648c5f3ebe-collapsed",
+ "type": "collapsed",
+ "source": "4950132a-2d06-4571-b2c0-55cb37a31e9b",
+ "target": "041c59cc-f9e4-4dc9-8b31-84648c5f3ebe"
+ },
+ {
+ "id": "4950132a-2d06-4571-b2c0-55cb37a31e9b-53c2d5fd-863d-4950-93e0-628f3d61b493-collapsed",
+ "type": "collapsed",
+ "source": "4950132a-2d06-4571-b2c0-55cb37a31e9b",
+ "target": "53c2d5fd-863d-4950-93e0-628f3d61b493"
+ },
+ {
+ "id": "reactflow__edge-f5ca24ee-21c5-4c8c-8d3c-371b5079b086value-27215391-b20e-412a-b854-7fa5927f5437style",
+ "type": "default",
+ "source": "f5ca24ee-21c5-4c8c-8d3c-371b5079b086",
+ "target": "27215391-b20e-412a-b854-7fa5927f5437",
+ "sourceHandle": "value",
+ "targetHandle": "style"
+ },
+ {
+ "id": "reactflow__edge-f5ca24ee-21c5-4c8c-8d3c-371b5079b086value-27215391-b20e-412a-b854-7fa5927f5437prompt",
+ "type": "default",
+ "source": "f5ca24ee-21c5-4c8c-8d3c-371b5079b086",
+ "target": "27215391-b20e-412a-b854-7fa5927f5437",
+ "sourceHandle": "value",
+ "targetHandle": "prompt"
+ },
+ {
+ "id": "reactflow__edge-c26bff37-4f12-482f-ba45-3a5d729b4c4fvalue-6142b69a-323f-4ecd-a7e5-67dc61349c51style",
+ "type": "default",
+ "source": "c26bff37-4f12-482f-ba45-3a5d729b4c4f",
+ "target": "6142b69a-323f-4ecd-a7e5-67dc61349c51",
+ "sourceHandle": "value",
+ "targetHandle": "style"
+ },
+ {
+ "id": "reactflow__edge-c26bff37-4f12-482f-ba45-3a5d729b4c4fvalue-6142b69a-323f-4ecd-a7e5-67dc61349c51prompt",
+ "type": "default",
+ "source": "c26bff37-4f12-482f-ba45-3a5d729b4c4f",
+ "target": "6142b69a-323f-4ecd-a7e5-67dc61349c51",
+ "sourceHandle": "value",
+ "targetHandle": "prompt"
+ },
+ {
+ "id": "88ae723e-4933-4371-b52d-3ada52a59d36-6daa9526-382b-491d-964f-f53fc308664f-collapsed",
+ "type": "collapsed",
+ "source": "88ae723e-4933-4371-b52d-3ada52a59d36",
+ "target": "6daa9526-382b-491d-964f-f53fc308664f"
+ },
+ {
+ "id": "f1afd295-860f-48b6-a76a-90609bf2cc11-88ae723e-4933-4371-b52d-3ada52a59d36-collapsed",
+ "type": "collapsed",
+ "source": "f1afd295-860f-48b6-a76a-90609bf2cc11",
+ "target": "88ae723e-4933-4371-b52d-3ada52a59d36"
+ },
+ {
+ "id": "bc53651f-208b-440c-be30-f93f72ae700e-67346654-cac0-446a-8cde-9af4b5a029a6-collapsed",
+ "type": "collapsed",
+ "source": "bc53651f-208b-440c-be30-f93f72ae700e",
+ "target": "67346654-cac0-446a-8cde-9af4b5a029a6"
+ },
+ {
+ "id": "reactflow__edge-67346654-cac0-446a-8cde-9af4b5a029a6value-be4082d6-e238-40ea-a9df-fc0d725e8895begin_step_percent",
+ "type": "default",
+ "source": "67346654-cac0-446a-8cde-9af4b5a029a6",
+ "target": "be4082d6-e238-40ea-a9df-fc0d725e8895",
+ "sourceHandle": "value",
+ "targetHandle": "begin_step_percent"
+ },
+ {
+ "id": "reactflow__edge-67346654-cac0-446a-8cde-9af4b5a029a6value-b78f53b6-2eae-4956-97b4-7e73768d1491end_step_percent",
+ "type": "default",
+ "source": "67346654-cac0-446a-8cde-9af4b5a029a6",
+ "target": "b78f53b6-2eae-4956-97b4-7e73768d1491",
+ "sourceHandle": "value",
+ "targetHandle": "end_step_percent"
+ },
+ {
+ "id": "bd094e2f-41e5-4b61-9f7b-56cf337d53fa-f1afd295-860f-48b6-a76a-90609bf2cc11-collapsed",
+ "type": "collapsed",
+ "source": "bd094e2f-41e5-4b61-9f7b-56cf337d53fa",
+ "target": "f1afd295-860f-48b6-a76a-90609bf2cc11"
+ },
+ {
+ "id": "bd094e2f-41e5-4b61-9f7b-56cf337d53fa-bc53651f-208b-440c-be30-f93f72ae700e-collapsed",
+ "type": "collapsed",
+ "source": "bd094e2f-41e5-4b61-9f7b-56cf337d53fa",
+ "target": "bc53651f-208b-440c-be30-f93f72ae700e"
+ },
+ {
+ "id": "reactflow__edge-bc53651f-208b-440c-be30-f93f72ae700evalue-67346654-cac0-446a-8cde-9af4b5a029a6b",
+ "type": "default",
+ "source": "bc53651f-208b-440c-be30-f93f72ae700e",
+ "target": "67346654-cac0-446a-8cde-9af4b5a029a6",
+ "sourceHandle": "value",
+ "targetHandle": "b",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-6daa9526-382b-491d-964f-f53fc308664fvalue-b78f53b6-2eae-4956-97b4-7e73768d1491control_weight",
+ "type": "default",
+ "source": "6daa9526-382b-491d-964f-f53fc308664f",
+ "target": "b78f53b6-2eae-4956-97b4-7e73768d1491",
+ "sourceHandle": "value",
+ "targetHandle": "control_weight"
+ },
+ {
+ "id": "reactflow__edge-88ae723e-4933-4371-b52d-3ada52a59d36value-6daa9526-382b-491d-964f-f53fc308664fb",
+ "type": "default",
+ "source": "88ae723e-4933-4371-b52d-3ada52a59d36",
+ "target": "6daa9526-382b-491d-964f-f53fc308664f",
+ "sourceHandle": "value",
+ "targetHandle": "b",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-88ae723e-4933-4371-b52d-3ada52a59d36value-be4082d6-e238-40ea-a9df-fc0d725e8895control_weight",
+ "type": "default",
+ "source": "88ae723e-4933-4371-b52d-3ada52a59d36",
+ "target": "be4082d6-e238-40ea-a9df-fc0d725e8895",
+ "sourceHandle": "value",
+ "targetHandle": "control_weight"
+ },
+ {
+ "id": "reactflow__edge-f1afd295-860f-48b6-a76a-90609bf2cc11value-88ae723e-4933-4371-b52d-3ada52a59d36b",
+ "type": "default",
+ "source": "f1afd295-860f-48b6-a76a-90609bf2cc11",
+ "target": "88ae723e-4933-4371-b52d-3ada52a59d36",
+ "sourceHandle": "value",
+ "targetHandle": "b",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-bd094e2f-41e5-4b61-9f7b-56cf337d53favalue-f1afd295-860f-48b6-a76a-90609bf2cc11a",
+ "type": "default",
+ "source": "bd094e2f-41e5-4b61-9f7b-56cf337d53fa",
+ "target": "f1afd295-860f-48b6-a76a-90609bf2cc11",
+ "sourceHandle": "value",
+ "targetHandle": "a",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-1dd915a3-6756-48ed-b68b-ee3b4bd06c1dvalue-14e65dbe-4249-4b25-9a63-3a10cfaeb61ca",
+ "type": "default",
+ "source": "1dd915a3-6756-48ed-b68b-ee3b4bd06c1d",
+ "target": "14e65dbe-4249-4b25-9a63-3a10cfaeb61c",
+ "sourceHandle": "value",
+ "targetHandle": "a",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-e4d5ca7c-8fcf-4c59-9c58-67194c80dc73value-8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7adenoising_start",
+ "type": "default",
+ "source": "e4d5ca7c-8fcf-4c59-9c58-67194c80dc73",
+ "target": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "sourceHandle": "value",
+ "targetHandle": "denoising_start"
+ },
+ {
+ "id": "reactflow__edge-c8f5c671-8c87-4d96-a75e-a9937ac6bc03value-e4d5ca7c-8fcf-4c59-9c58-67194c80dc73b",
+ "type": "default",
+ "source": "c8f5c671-8c87-4d96-a75e-a9937ac6bc03",
+ "target": "e4d5ca7c-8fcf-4c59-9c58-67194c80dc73",
+ "sourceHandle": "value",
+ "targetHandle": "b",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-49a8cc12-aa19-48c5-b6b3-04e0b603b384value-c8f5c671-8c87-4d96-a75e-a9937ac6bc03a",
+ "type": "default",
+ "source": "49a8cc12-aa19-48c5-b6b3-04e0b603b384",
+ "target": "c8f5c671-8c87-4d96-a75e-a9937ac6bc03",
+ "sourceHandle": "value",
+ "targetHandle": "a",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-14e65dbe-4249-4b25-9a63-3a10cfaeb61cvalue-49a8cc12-aa19-48c5-b6b3-04e0b603b384a",
+ "type": "default",
+ "source": "14e65dbe-4249-4b25-9a63-3a10cfaeb61c",
+ "target": "49a8cc12-aa19-48c5-b6b3-04e0b603b384",
+ "sourceHandle": "value",
+ "targetHandle": "a",
+ "hidden": true
+ },
+ {
+ "id": "79390b60-4077-4f94-ad0a-4229cc73ddb2-4950132a-2d06-4571-b2c0-55cb37a31e9b-collapsed",
+ "type": "collapsed",
+ "source": "79390b60-4077-4f94-ad0a-4229cc73ddb2",
+ "target": "4950132a-2d06-4571-b2c0-55cb37a31e9b"
+ },
+ {
+ "id": "reactflow__edge-4950132a-2d06-4571-b2c0-55cb37a31e9bvalue-041c59cc-f9e4-4dc9-8b31-84648c5f3ebestrength",
+ "type": "default",
+ "source": "4950132a-2d06-4571-b2c0-55cb37a31e9b",
+ "target": "041c59cc-f9e4-4dc9-8b31-84648c5f3ebe",
+ "sourceHandle": "value",
+ "targetHandle": "strength",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-4950132a-2d06-4571-b2c0-55cb37a31e9bvalue-53c2d5fd-863d-4950-93e0-628f3d61b493strength",
+ "type": "default",
+ "source": "4950132a-2d06-4571-b2c0-55cb37a31e9b",
+ "target": "53c2d5fd-863d-4950-93e0-628f3d61b493",
+ "sourceHandle": "value",
+ "targetHandle": "strength",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-79390b60-4077-4f94-ad0a-4229cc73ddb2value-4950132a-2d06-4571-b2c0-55cb37a31e9bb",
+ "type": "default",
+ "source": "79390b60-4077-4f94-ad0a-4229cc73ddb2",
+ "target": "4950132a-2d06-4571-b2c0-55cb37a31e9b",
+ "sourceHandle": "value",
+ "targetHandle": "b",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-696de0e1-cdd2-42e8-abeb-57a926bc6df6value-79390b60-4077-4f94-ad0a-4229cc73ddb2a",
+ "type": "default",
+ "source": "696de0e1-cdd2-42e8-abeb-57a926bc6df6",
+ "target": "79390b60-4077-4f94-ad0a-4229cc73ddb2",
+ "sourceHandle": "value",
+ "targetHandle": "a"
+ },
+ {
+ "id": "reactflow__edge-bd094e2f-41e5-4b61-9f7b-56cf337d53favalue-bc53651f-208b-440c-be30-f93f72ae700ea",
+ "type": "default",
+ "source": "bd094e2f-41e5-4b61-9f7b-56cf337d53fa",
+ "target": "bc53651f-208b-440c-be30-f93f72ae700e",
+ "sourceHandle": "value",
+ "targetHandle": "a",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-6636a27a-f130-4a13-b3e5-50b44e4a566fcollection-8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7acontrol",
+ "type": "default",
+ "source": "6636a27a-f130-4a13-b3e5-50b44e4a566f",
+ "target": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "sourceHandle": "collection",
+ "targetHandle": "control"
+ },
+ {
+ "id": "reactflow__edge-b78f53b6-2eae-4956-97b4-7e73768d1491control-6636a27a-f130-4a13-b3e5-50b44e4a566fitem",
+ "type": "default",
+ "source": "b78f53b6-2eae-4956-97b4-7e73768d1491",
+ "target": "6636a27a-f130-4a13-b3e5-50b44e4a566f",
+ "sourceHandle": "control",
+ "targetHandle": "item"
+ },
+ {
+ "id": "reactflow__edge-be4082d6-e238-40ea-a9df-fc0d725e8895control-6636a27a-f130-4a13-b3e5-50b44e4a566fitem",
+ "type": "default",
+ "source": "be4082d6-e238-40ea-a9df-fc0d725e8895",
+ "target": "6636a27a-f130-4a13-b3e5-50b44e4a566f",
+ "sourceHandle": "control",
+ "targetHandle": "item"
+ },
+ {
+ "id": "reactflow__edge-7671553a-cd4b-4e25-8332-9d5667e64493image-b78f53b6-2eae-4956-97b4-7e73768d1491image",
+ "type": "default",
+ "source": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "target": "b78f53b6-2eae-4956-97b4-7e73768d1491",
+ "sourceHandle": "image",
+ "targetHandle": "image"
+ },
+ {
+ "id": "reactflow__edge-e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fdclip2-27215391-b20e-412a-b854-7fa5927f5437clip2",
+ "type": "default",
+ "source": "e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fd",
+ "target": "27215391-b20e-412a-b854-7fa5927f5437",
+ "sourceHandle": "clip2",
+ "targetHandle": "clip2"
+ },
+ {
+ "id": "reactflow__edge-e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fdclip-27215391-b20e-412a-b854-7fa5927f5437clip",
+ "type": "default",
+ "source": "e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fd",
+ "target": "27215391-b20e-412a-b854-7fa5927f5437",
+ "sourceHandle": "clip",
+ "targetHandle": "clip"
+ },
+ {
+ "id": "reactflow__edge-e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fdclip2-6142b69a-323f-4ecd-a7e5-67dc61349c51clip2",
+ "type": "default",
+ "source": "e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fd",
+ "target": "6142b69a-323f-4ecd-a7e5-67dc61349c51",
+ "sourceHandle": "clip2",
+ "targetHandle": "clip2"
+ },
+ {
+ "id": "reactflow__edge-6142b69a-323f-4ecd-a7e5-67dc61349c51conditioning-8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7apositive_conditioning",
+ "type": "default",
+ "source": "6142b69a-323f-4ecd-a7e5-67dc61349c51",
+ "target": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "sourceHandle": "conditioning",
+ "targetHandle": "positive_conditioning"
+ },
+ {
+ "id": "reactflow__edge-27215391-b20e-412a-b854-7fa5927f5437conditioning-8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7anegative_conditioning",
+ "type": "default",
+ "source": "27215391-b20e-412a-b854-7fa5927f5437",
+ "target": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "sourceHandle": "conditioning",
+ "targetHandle": "negative_conditioning"
+ },
+ {
+ "id": "reactflow__edge-e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fdunet-8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7aunet",
+ "type": "default",
+ "source": "e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fd",
+ "target": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "sourceHandle": "unet",
+ "targetHandle": "unet"
+ },
+ {
+ "id": "reactflow__edge-100b3143-b3fb-4ff3-bb3c-8d4d3f89ae3avae-117f982a-03da-49b1-bf9f-29711160ac02vae",
+ "type": "default",
+ "source": "100b3143-b3fb-4ff3-bb3c-8d4d3f89ae3a",
+ "target": "117f982a-03da-49b1-bf9f-29711160ac02",
+ "sourceHandle": "vae",
+ "targetHandle": "vae"
+ },
+ {
+ "id": "reactflow__edge-100b3143-b3fb-4ff3-bb3c-8d4d3f89ae3avae-c3b60a50-8039-4924-90e3-8c608e1fecb5vae",
+ "type": "default",
+ "source": "100b3143-b3fb-4ff3-bb3c-8d4d3f89ae3a",
+ "target": "c3b60a50-8039-4924-90e3-8c608e1fecb5",
+ "sourceHandle": "vae",
+ "targetHandle": "vae"
+ },
+ {
+ "id": "reactflow__edge-e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fdclip-6142b69a-323f-4ecd-a7e5-67dc61349c51clip",
+ "type": "default",
+ "source": "e277e4b7-01cd-4daa-86ab-7bfa3cdcd9fd",
+ "target": "6142b69a-323f-4ecd-a7e5-67dc61349c51",
+ "sourceHandle": "clip",
+ "targetHandle": "clip"
+ },
+ {
+ "id": "reactflow__edge-041c59cc-f9e4-4dc9-8b31-84648c5f3ebeimage-7671553a-cd4b-4e25-8332-9d5667e64493image",
+ "type": "default",
+ "source": "041c59cc-f9e4-4dc9-8b31-84648c5f3ebe",
+ "target": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "sourceHandle": "image",
+ "targetHandle": "image"
+ },
+ {
+ "id": "reactflow__edge-f0cd0d2f-9614-43f7-9944-a75b8d5ccd65image-041c59cc-f9e4-4dc9-8b31-84648c5f3ebeimage",
+ "type": "default",
+ "source": "f0cd0d2f-9614-43f7-9944-a75b8d5ccd65",
+ "target": "041c59cc-f9e4-4dc9-8b31-84648c5f3ebe",
+ "sourceHandle": "image",
+ "targetHandle": "image"
+ },
+ {
+ "id": "reactflow__edge-53c2d5fd-863d-4950-93e0-628f3d61b493image-f0cd0d2f-9614-43f7-9944-a75b8d5ccd65image",
+ "type": "default",
+ "source": "53c2d5fd-863d-4950-93e0-628f3d61b493",
+ "target": "f0cd0d2f-9614-43f7-9944-a75b8d5ccd65",
+ "sourceHandle": "image",
+ "targetHandle": "image"
+ },
+ {
+ "id": "reactflow__edge-5ca87ace-edf9-49c7-a424-cd42416b86a7image-53c2d5fd-863d-4950-93e0-628f3d61b493image",
+ "type": "default",
+ "source": "5ca87ace-edf9-49c7-a424-cd42416b86a7",
+ "target": "53c2d5fd-863d-4950-93e0-628f3d61b493",
+ "sourceHandle": "image",
+ "targetHandle": "image"
+ },
+ {
+ "id": "reactflow__edge-8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7alatents-c3b60a50-8039-4924-90e3-8c608e1fecb5latents",
+ "type": "default",
+ "source": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "target": "c3b60a50-8039-4924-90e3-8c608e1fecb5",
+ "sourceHandle": "latents",
+ "targetHandle": "latents"
+ },
+ {
+ "id": "reactflow__edge-117f982a-03da-49b1-bf9f-29711160ac02latents-8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7alatents",
+ "type": "default",
+ "source": "117f982a-03da-49b1-bf9f-29711160ac02",
+ "target": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "sourceHandle": "latents",
+ "targetHandle": "latents"
+ },
+ {
+ "id": "reactflow__edge-8923451b-5a27-4395-b7f2-dce875fca6f5noise-8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7anoise",
+ "type": "default",
+ "source": "8923451b-5a27-4395-b7f2-dce875fca6f5",
+ "target": "8dba0d37-cd2e-4fe5-ae9f-5464b85a8a7a",
+ "sourceHandle": "noise",
+ "targetHandle": "noise"
+ },
+ {
+ "id": "reactflow__edge-d350feac-9686-4e0d-bd46-a96bd2630818value-7dbb756b-7d79-431c-a46d-d8f7b082c127value",
+ "type": "default",
+ "source": "d350feac-9686-4e0d-bd46-a96bd2630818",
+ "target": "7dbb756b-7d79-431c-a46d-d8f7b082c127",
+ "sourceHandle": "value",
+ "targetHandle": "value",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-5b256f14-caab-40ff-b8f0-9679cd542163value-f5d9bf3b-2646-4b17-9894-20fd2b4218eavalue",
+ "type": "default",
+ "source": "5b256f14-caab-40ff-b8f0-9679cd542163",
+ "target": "f5d9bf3b-2646-4b17-9894-20fd2b4218ea",
+ "sourceHandle": "value",
+ "targetHandle": "value",
+ "hidden": true
+ },
+ {
+ "id": "reactflow__edge-7671553a-cd4b-4e25-8332-9d5667e64493height-8923451b-5a27-4395-b7f2-dce875fca6f5height",
+ "type": "default",
+ "source": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "target": "8923451b-5a27-4395-b7f2-dce875fca6f5",
+ "sourceHandle": "height",
+ "targetHandle": "height"
+ },
+ {
+ "id": "reactflow__edge-7671553a-cd4b-4e25-8332-9d5667e64493width-8923451b-5a27-4395-b7f2-dce875fca6f5width",
+ "type": "default",
+ "source": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "target": "8923451b-5a27-4395-b7f2-dce875fca6f5",
+ "sourceHandle": "width",
+ "targetHandle": "width"
+ },
+ {
+ "id": "reactflow__edge-7671553a-cd4b-4e25-8332-9d5667e64493image-117f982a-03da-49b1-bf9f-29711160ac02image",
+ "type": "default",
+ "source": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "target": "117f982a-03da-49b1-bf9f-29711160ac02",
+ "sourceHandle": "image",
+ "targetHandle": "image"
+ },
+ {
+ "id": "reactflow__edge-7671553a-cd4b-4e25-8332-9d5667e64493image-be4082d6-e238-40ea-a9df-fc0d725e8895image",
+ "type": "default",
+ "source": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "target": "be4082d6-e238-40ea-a9df-fc0d725e8895",
+ "sourceHandle": "image",
+ "targetHandle": "image"
+ },
+ {
+ "id": "reactflow__edge-7dbb756b-7d79-431c-a46d-d8f7b082c127value-7671553a-cd4b-4e25-8332-9d5667e64493height",
+ "type": "default",
+ "source": "7dbb756b-7d79-431c-a46d-d8f7b082c127",
+ "target": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "sourceHandle": "value",
+ "targetHandle": "height"
+ },
+ {
+ "id": "reactflow__edge-f5d9bf3b-2646-4b17-9894-20fd2b4218eavalue-7671553a-cd4b-4e25-8332-9d5667e64493width",
+ "type": "default",
+ "source": "f5d9bf3b-2646-4b17-9894-20fd2b4218ea",
+ "target": "7671553a-cd4b-4e25-8332-9d5667e64493",
+ "sourceHandle": "value",
+ "targetHandle": "width"
+ },
+ {
+ "id": "reactflow__edge-5ca87ace-edf9-49c7-a424-cd42416b86a7height-d350feac-9686-4e0d-bd46-a96bd2630818a",
+ "type": "default",
+ "source": "5ca87ace-edf9-49c7-a424-cd42416b86a7",
+ "target": "d350feac-9686-4e0d-bd46-a96bd2630818",
+ "sourceHandle": "height",
+ "targetHandle": "a"
+ },
+ {
+ "id": "reactflow__edge-1ba845a6-eb88-49a1-a490-5fe6754f3ec9value-d350feac-9686-4e0d-bd46-a96bd2630818b",
+ "type": "default",
+ "source": "1ba845a6-eb88-49a1-a490-5fe6754f3ec9",
+ "target": "d350feac-9686-4e0d-bd46-a96bd2630818",
+ "sourceHandle": "value",
+ "targetHandle": "b"
+ },
+ {
+ "id": "reactflow__edge-1ba845a6-eb88-49a1-a490-5fe6754f3ec9value-5b256f14-caab-40ff-b8f0-9679cd542163b",
+ "type": "default",
+ "source": "1ba845a6-eb88-49a1-a490-5fe6754f3ec9",
+ "target": "5b256f14-caab-40ff-b8f0-9679cd542163",
+ "sourceHandle": "value",
+ "targetHandle": "b"
+ },
+ {
+ "id": "reactflow__edge-5ca87ace-edf9-49c7-a424-cd42416b86a7width-5b256f14-caab-40ff-b8f0-9679cd542163a",
+ "type": "default",
+ "source": "5ca87ace-edf9-49c7-a424-cd42416b86a7",
+ "target": "5b256f14-caab-40ff-b8f0-9679cd542163",
+ "sourceHandle": "width",
+ "targetHandle": "a"
+ }
+ ]
+}
diff --git a/invokeai/app/services/workflow_records/default_workflows/Prompt from File.json b/invokeai/app/services/workflow_records/default_workflows/Prompt from File.json
index 765b236714..de902bc77e 100644
--- a/invokeai/app/services/workflow_records/default_workflows/Prompt from File.json
+++ b/invokeai/app/services/workflow_records/default_workflows/Prompt from File.json
@@ -2,7 +2,7 @@
"name": "Prompt from File",
"author": "InvokeAI",
"description": "Sample workflow using Prompt from File node",
- "version": "2.0.0",
+ "version": "2.1.0",
"contact": "invoke@invoke.ai",
"tags": "text2image, prompt from file, default",
"notes": "",
@@ -37,16 +37,68 @@
}
],
"meta": {
- "category": "default",
- "version": "3.0.0"
+ "version": "3.0.0",
+ "category": "default"
},
"nodes": [
{
- "id": "c2eaf1ba-5708-4679-9e15-945b8b432692",
+ "id": "491ec988-3c77-4c37-af8a-39a0c4e7a2a1",
"type": "invocation",
"data": {
- "id": "c2eaf1ba-5708-4679-9e15-945b8b432692",
- "version": "1.1.1",
+ "id": "491ec988-3c77-4c37-af8a-39a0c4e7a2a1",
+ "version": "1.3.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "l2i",
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "latents": {
+ "name": "latents",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": false
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 2037.861329274915,
+ "y": -329.8393457509562
+ }
+ },
+ {
+ "id": "fc9d0e35-a6de-4a19-84e1-c72497c823f6",
+ "type": "invocation",
+ "data": {
+ "id": "fc9d0e35-a6de-4a19-84e1-c72497c823f6",
+ "version": "1.2.0",
"nodePack": "invokeai",
"label": "",
"notes": "",
@@ -60,6 +112,69 @@
"clip": {
"name": "clip",
"label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 925,
+ "y": -275
+ }
+ },
+ {
+ "id": "d6353b7f-b447-4e17-8f2e-80a88c91d426",
+ "type": "invocation",
+ "data": {
+ "id": "d6353b7f-b447-4e17-8f2e-80a88c91d426",
+ "version": "1.0.3",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "main_model_loader",
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 0,
+ "y": -375
+ }
+ },
+ {
+ "id": "c2eaf1ba-5708-4679-9e15-945b8b432692",
+ "type": "invocation",
+ "data": {
+ "id": "c2eaf1ba-5708-4679-9e15-945b8b432692",
+ "version": "1.2.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "compel",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "",
+ "value": ""
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
}
},
"isOpen": false,
@@ -141,61 +256,6 @@
"y": -400
}
},
- {
- "id": "d6353b7f-b447-4e17-8f2e-80a88c91d426",
- "type": "invocation",
- "data": {
- "id": "d6353b7f-b447-4e17-8f2e-80a88c91d426",
- "version": "1.0.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "main_model_loader",
- "inputs": {
- "model": {
- "name": "model",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 0,
- "y": -375
- }
- },
- {
- "id": "fc9d0e35-a6de-4a19-84e1-c72497c823f6",
- "type": "invocation",
- "data": {
- "id": "fc9d0e35-a6de-4a19-84e1-c72497c823f6",
- "version": "1.1.1",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "compel",
- "inputs": {
- "prompt": {
- "name": "prompt",
- "label": "",
- "value": ""
- },
- "clip": {
- "name": "clip",
- "label": ""
- }
- },
- "isOpen": false,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 925,
- "y": -275
- }
- },
{
"id": "0eb5f3f5-1b91-49eb-9ef0-41d67c7eae77",
"type": "invocation",
@@ -268,53 +328,6 @@
"y": -50
}
},
- {
- "id": "491ec988-3c77-4c37-af8a-39a0c4e7a2a1",
- "type": "invocation",
- "data": {
- "id": "491ec988-3c77-4c37-af8a-39a0c4e7a2a1",
- "version": "1.2.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "l2i",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "latents": {
- "name": "latents",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": false
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 2037.861329274915,
- "y": -329.8393457509562
- }
- },
{
"id": "2fb1577f-0a56-4f12-8711-8afcaaaf1d5e",
"type": "invocation",
diff --git a/invokeai/app/services/workflow_records/default_workflows/Text to Image - SD1.5.json b/invokeai/app/services/workflow_records/default_workflows/Text to Image - SD1.5.json
index d3d52150bc..65f894724c 100644
--- a/invokeai/app/services/workflow_records/default_workflows/Text to Image - SD1.5.json
+++ b/invokeai/app/services/workflow_records/default_workflows/Text to Image - SD1.5.json
@@ -2,7 +2,7 @@
"name": "Text to Image - SD1.5",
"author": "InvokeAI",
"description": "Sample text to image workflow for Stable Diffusion 1.5/2",
- "version": "2.0.0",
+ "version": "2.1.0",
"contact": "invoke@invoke.ai",
"tags": "text2image, SD1.5, SD2, default",
"notes": "",
@@ -33,16 +33,127 @@
}
],
"meta": {
- "category": "default",
- "version": "3.0.0"
+ "version": "3.0.0",
+ "category": "default"
},
"nodes": [
+ {
+ "id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
+ "type": "invocation",
+ "data": {
+ "id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
+ "version": "1.3.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "l2i",
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "latents": {
+ "name": "latents",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": true
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": false,
+ "useCache": true
+ },
+ "position": {
+ "x": 1800,
+ "y": 25
+ }
+ },
+ {
+ "id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
+ "type": "invocation",
+ "data": {
+ "id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
+ "version": "1.2.0",
+ "nodePack": "invokeai",
+ "label": "Positive Compel Prompt",
+ "notes": "",
+ "type": "compel",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "Positive Prompt",
+ "value": "Super cute tiger cub, national geographic award-winning photograph"
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 1000,
+ "y": 25
+ }
+ },
+ {
+ "id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
+ "type": "invocation",
+ "data": {
+ "id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
+ "version": "1.0.3",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "main_model_loader",
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 600,
+ "y": 25
+ }
+ },
{
"id": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
"type": "invocation",
"data": {
"id": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
- "version": "1.1.1",
+ "version": "1.2.0",
"nodePack": "invokeai",
"label": "Negative Compel Prompt",
"notes": "",
@@ -56,6 +167,10 @@
"clip": {
"name": "clip",
"label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
}
},
"isOpen": true,
@@ -108,61 +223,6 @@
"y": 325
}
},
- {
- "id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
- "type": "invocation",
- "data": {
- "id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
- "version": "1.0.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "main_model_loader",
- "inputs": {
- "model": {
- "name": "model",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 600,
- "y": 25
- }
- },
- {
- "id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
- "type": "invocation",
- "data": {
- "id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
- "version": "1.1.1",
- "nodePack": "invokeai",
- "label": "Positive Compel Prompt",
- "notes": "",
- "type": "compel",
- "inputs": {
- "prompt": {
- "name": "prompt",
- "label": "Positive Prompt",
- "value": "Super cute tiger cub, national geographic award-winning photograph"
- },
- "clip": {
- "name": "clip",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 1000,
- "y": 25
- }
- },
{
"id": "ea94bc37-d995-4a83-aa99-4af42479f2f2",
"type": "invocation",
@@ -280,53 +340,6 @@
"x": 1400,
"y": 25
}
- },
- {
- "id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
- "type": "invocation",
- "data": {
- "id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
- "version": "1.2.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "l2i",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "latents": {
- "name": "latents",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": true
- }
- },
- "isOpen": true,
- "isIntermediate": false,
- "useCache": true
- },
- "position": {
- "x": 1800,
- "y": 25
- }
}
],
"edges": [
diff --git a/invokeai/app/services/workflow_records/default_workflows/Text to Image - SDXL.json b/invokeai/app/services/workflow_records/default_workflows/Text to Image - SDXL.json
index 1527bbceb1..0f4777169e 100644
--- a/invokeai/app/services/workflow_records/default_workflows/Text to Image - SDXL.json
+++ b/invokeai/app/services/workflow_records/default_workflows/Text to Image - SDXL.json
@@ -2,7 +2,7 @@
"name": "Text to Image - SDXL",
"author": "InvokeAI",
"description": "Sample text to image workflow for SDXL",
- "version": "2.0.0",
+ "version": "2.1.0",
"contact": "invoke@invoke.ai",
"tags": "text2image, SDXL, default",
"notes": "",
@@ -29,10 +29,271 @@
}
],
"meta": {
- "category": "default",
- "version": "3.0.0"
+ "version": "3.0.0",
+ "category": "default"
},
"nodes": [
+ {
+ "id": "0093692f-9cf4-454d-a5b8-62f0e3eb3bb8",
+ "type": "invocation",
+ "data": {
+ "id": "0093692f-9cf4-454d-a5b8-62f0e3eb3bb8",
+ "version": "1.0.3",
+ "label": "",
+ "notes": "",
+ "type": "vae_loader",
+ "inputs": {
+ "vae_model": {
+ "name": "vae_model",
+ "label": "VAE (use the FP16 model)",
+ "value": {
+ "key": "f20f9e5c-1bce-4c46-a84d-34ebfa7df069",
+ "hash": "blake3:9705ab1c31fa96b308734214fb7571a958621c7a9247eed82b7d277145f8d9fa",
+ "name": "sdxl-vae-fp16-fix",
+ "base": "sdxl",
+ "type": "vae"
+ }
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 375,
+ "y": -225
+ }
+ },
+ {
+ "id": "63e91020-83b2-4f35-b174-ad9692aabb48",
+ "type": "invocation",
+ "data": {
+ "id": "63e91020-83b2-4f35-b174-ad9692aabb48",
+ "version": "1.3.0",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "l2i",
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "latents": {
+ "name": "latents",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": false
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": false,
+ "useCache": false
+ },
+ "position": {
+ "x": 1475,
+ "y": -500
+ }
+ },
+ {
+ "id": "faf965a4-7530-427b-b1f3-4ba6505c2a08",
+ "type": "invocation",
+ "data": {
+ "id": "faf965a4-7530-427b-b1f3-4ba6505c2a08",
+ "version": "1.2.0",
+ "nodePack": "invokeai",
+ "label": "SDXL Positive Compel Prompt",
+ "notes": "",
+ "type": "sdxl_compel_prompt",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "Positive Prompt",
+ "value": ""
+ },
+ "style": {
+ "name": "style",
+ "label": "Positive Style",
+ "value": ""
+ },
+ "original_width": {
+ "name": "original_width",
+ "label": "",
+ "value": 1024
+ },
+ "original_height": {
+ "name": "original_height",
+ "label": "",
+ "value": 1024
+ },
+ "crop_top": {
+ "name": "crop_top",
+ "label": "",
+ "value": 0
+ },
+ "crop_left": {
+ "name": "crop_left",
+ "label": "",
+ "value": 0
+ },
+ "target_width": {
+ "name": "target_width",
+ "label": "",
+ "value": 1024
+ },
+ "target_height": {
+ "name": "target_height",
+ "label": "",
+ "value": 1024
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "clip2": {
+ "name": "clip2",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 750,
+ "y": -175
+ }
+ },
+ {
+ "id": "30d3289c-773c-4152-a9d2-bd8a99c8fd22",
+ "type": "invocation",
+ "data": {
+ "id": "30d3289c-773c-4152-a9d2-bd8a99c8fd22",
+ "version": "1.0.3",
+ "nodePack": "invokeai",
+ "label": "",
+ "notes": "",
+ "type": "sdxl_model_loader",
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": "",
+ "value": {
+ "key": "4a63b226-e8ff-4da4-854e-0b9f04b562ba",
+ "hash": "blake3:d279309ea6e5ee6e8fd52504275865cc280dac71cbf528c5b07c98b888bddaba",
+ "name": "dreamshaper-xl-v2-turbo",
+ "base": "sdxl",
+ "type": "main"
+ }
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 375,
+ "y": -500
+ }
+ },
+ {
+ "id": "3193ad09-a7c2-4bf4-a3a9-1c61cc33a204",
+ "type": "invocation",
+ "data": {
+ "id": "3193ad09-a7c2-4bf4-a3a9-1c61cc33a204",
+ "version": "1.2.0",
+ "nodePack": "invokeai",
+ "label": "SDXL Negative Compel Prompt",
+ "notes": "",
+ "type": "sdxl_compel_prompt",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "Negative Prompt",
+ "value": ""
+ },
+ "style": {
+ "name": "style",
+ "label": "Negative Style",
+ "value": ""
+ },
+ "original_width": {
+ "name": "original_width",
+ "label": "",
+ "value": 1024
+ },
+ "original_height": {
+ "name": "original_height",
+ "label": "",
+ "value": 1024
+ },
+ "crop_top": {
+ "name": "crop_top",
+ "label": "",
+ "value": 0
+ },
+ "crop_left": {
+ "name": "crop_left",
+ "label": "",
+ "value": 0
+ },
+ "target_width": {
+ "name": "target_width",
+ "label": "",
+ "value": 1024
+ },
+ "target_height": {
+ "name": "target_height",
+ "label": "",
+ "value": 1024
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "clip2": {
+ "name": "clip2",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 750,
+ "y": 200
+ }
+ },
{
"id": "3774ec24-a69e-4254-864c-097d07a6256f",
"type": "invocation",
@@ -88,75 +349,6 @@
"y": -125
}
},
- {
- "id": "3193ad09-a7c2-4bf4-a3a9-1c61cc33a204",
- "type": "invocation",
- "data": {
- "id": "3193ad09-a7c2-4bf4-a3a9-1c61cc33a204",
- "version": "1.1.1",
- "nodePack": "invokeai",
- "label": "SDXL Negative Compel Prompt",
- "notes": "",
- "type": "sdxl_compel_prompt",
- "inputs": {
- "prompt": {
- "name": "prompt",
- "label": "Negative Prompt",
- "value": ""
- },
- "style": {
- "name": "style",
- "label": "Negative Style",
- "value": ""
- },
- "original_width": {
- "name": "original_width",
- "label": "",
- "value": 1024
- },
- "original_height": {
- "name": "original_height",
- "label": "",
- "value": 1024
- },
- "crop_top": {
- "name": "crop_top",
- "label": "",
- "value": 0
- },
- "crop_left": {
- "name": "crop_left",
- "label": "",
- "value": 0
- },
- "target_width": {
- "name": "target_width",
- "label": "",
- "value": 1024
- },
- "target_height": {
- "name": "target_height",
- "label": "",
- "value": 1024
- },
- "clip": {
- "name": "clip",
- "label": ""
- },
- "clip2": {
- "name": "clip2",
- "label": ""
- }
- },
- "isOpen": false,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 750,
- "y": 200
- }
- },
{
"id": "55705012-79b9-4aac-9f26-c0b10309785b",
"type": "invocation",
@@ -229,154 +421,6 @@
"y": -50
}
},
- {
- "id": "30d3289c-773c-4152-a9d2-bd8a99c8fd22",
- "type": "invocation",
- "data": {
- "id": "30d3289c-773c-4152-a9d2-bd8a99c8fd22",
- "version": "1.0.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "sdxl_model_loader",
- "inputs": {
- "model": {
- "name": "model",
- "label": "",
- "value": {
- "key": "4a63b226-e8ff-4da4-854e-0b9f04b562ba",
- "hash": "blake3:d279309ea6e5ee6e8fd52504275865cc280dac71cbf528c5b07c98b888bddaba",
- "name": "dreamshaper-xl-v2-turbo",
- "base": "sdxl",
- "type": "main"
- }
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 375,
- "y": -500
- }
- },
- {
- "id": "faf965a4-7530-427b-b1f3-4ba6505c2a08",
- "type": "invocation",
- "data": {
- "id": "faf965a4-7530-427b-b1f3-4ba6505c2a08",
- "version": "1.1.1",
- "nodePack": "invokeai",
- "label": "SDXL Positive Compel Prompt",
- "notes": "",
- "type": "sdxl_compel_prompt",
- "inputs": {
- "prompt": {
- "name": "prompt",
- "label": "Positive Prompt",
- "value": ""
- },
- "style": {
- "name": "style",
- "label": "Positive Style",
- "value": ""
- },
- "original_width": {
- "name": "original_width",
- "label": "",
- "value": 1024
- },
- "original_height": {
- "name": "original_height",
- "label": "",
- "value": 1024
- },
- "crop_top": {
- "name": "crop_top",
- "label": "",
- "value": 0
- },
- "crop_left": {
- "name": "crop_left",
- "label": "",
- "value": 0
- },
- "target_width": {
- "name": "target_width",
- "label": "",
- "value": 1024
- },
- "target_height": {
- "name": "target_height",
- "label": "",
- "value": 1024
- },
- "clip": {
- "name": "clip",
- "label": ""
- },
- "clip2": {
- "name": "clip2",
- "label": ""
- }
- },
- "isOpen": false,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 750,
- "y": -175
- }
- },
- {
- "id": "63e91020-83b2-4f35-b174-ad9692aabb48",
- "type": "invocation",
- "data": {
- "id": "63e91020-83b2-4f35-b174-ad9692aabb48",
- "version": "1.2.2",
- "nodePack": "invokeai",
- "label": "",
- "notes": "",
- "type": "l2i",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "latents": {
- "name": "latents",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": false
- }
- },
- "isOpen": true,
- "isIntermediate": false,
- "useCache": false
- },
- "position": {
- "x": 1475,
- "y": -500
- }
- },
{
"id": "50a36525-3c0a-4cc5-977c-e4bfc3fd6dfb",
"type": "invocation",
@@ -464,37 +508,6 @@
"y": -500
}
},
- {
- "id": "0093692f-9cf4-454d-a5b8-62f0e3eb3bb8",
- "type": "invocation",
- "data": {
- "id": "0093692f-9cf4-454d-a5b8-62f0e3eb3bb8",
- "version": "1.0.2",
- "label": "",
- "notes": "",
- "type": "vae_loader",
- "inputs": {
- "vae_model": {
- "name": "vae_model",
- "label": "VAE (use the FP16 model)",
- "value": {
- "key": "f20f9e5c-1bce-4c46-a84d-34ebfa7df069",
- "hash": "blake3:9705ab1c31fa96b308734214fb7571a958621c7a9247eed82b7d277145f8d9fa",
- "name": "sdxl-vae-fp16-fix",
- "base": "sdxl",
- "type": "vae"
- }
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 375,
- "y": -225
- }
- },
{
"id": "ade2c0d3-0384-4157-b39b-29ce429cfa15",
"type": "invocation",
diff --git a/invokeai/app/services/workflow_records/default_workflows/Text to Image with LoRA.json b/invokeai/app/services/workflow_records/default_workflows/Text to Image with LoRA.json
index 6df02b675d..b4df4b921c 100644
--- a/invokeai/app/services/workflow_records/default_workflows/Text to Image with LoRA.json
+++ b/invokeai/app/services/workflow_records/default_workflows/Text to Image with LoRA.json
@@ -2,7 +2,7 @@
"name": "Text to Image with LoRA",
"author": "InvokeAI",
"description": "Simple text to image workflow with a LoRA",
- "version": "2.0.0",
+ "version": "2.1.0",
"contact": "invoke@invoke.ai",
"tags": "text to image, lora, default",
"notes": "",
@@ -37,28 +37,83 @@
}
],
"meta": {
- "category": "default",
- "version": "3.0.0"
+ "version": "3.0.0",
+ "category": "default"
},
"nodes": [
{
- "id": "85b77bb2-c67a-416a-b3e8-291abe746c44",
+ "id": "a9683c0a-6b1f-4a5e-8187-c57e764b3400",
"type": "invocation",
"data": {
- "id": "85b77bb2-c67a-416a-b3e8-291abe746c44",
- "version": "1.1.1",
+ "id": "a9683c0a-6b1f-4a5e-8187-c57e764b3400",
+ "version": "1.3.0",
+ "label": "",
+ "notes": "",
+ "type": "l2i",
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "latents": {
+ "name": "latents",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": false
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": false,
+ "useCache": true
+ },
+ "position": {
+ "x": 4450,
+ "y": -550
+ }
+ },
+ {
+ "id": "c3fa6872-2599-4a82-a596-b3446a66cf8b",
+ "type": "invocation",
+ "data": {
+ "id": "c3fa6872-2599-4a82-a596-b3446a66cf8b",
+ "version": "1.2.0",
"label": "",
"notes": "",
"type": "compel",
"inputs": {
"prompt": {
"name": "prompt",
- "label": "Negative Prompt",
- "value": ""
+ "label": "Positive Prompt",
+ "value": "super cute tiger cub"
},
"clip": {
"name": "clip",
"label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
}
},
"isOpen": true,
@@ -67,31 +122,7 @@
},
"position": {
"x": 3425,
- "y": -300
- }
- },
- {
- "id": "24e9d7ed-4836-4ec4-8f9e-e747721f9818",
- "type": "invocation",
- "data": {
- "id": "24e9d7ed-4836-4ec4-8f9e-e747721f9818",
- "version": "1.0.2",
- "label": "",
- "notes": "",
- "type": "main_model_loader",
- "inputs": {
- "model": {
- "name": "model",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": 2500,
- "y": -600
+ "y": -575
}
},
{
@@ -99,7 +130,7 @@
"type": "invocation",
"data": {
"id": "c41e705b-f2e3-4d1a-83c4-e34bb9344966",
- "version": "1.0.2",
+ "version": "1.0.3",
"label": "",
"notes": "",
"type": "lora_loader",
@@ -132,23 +163,51 @@
}
},
{
- "id": "c3fa6872-2599-4a82-a596-b3446a66cf8b",
+ "id": "24e9d7ed-4836-4ec4-8f9e-e747721f9818",
"type": "invocation",
"data": {
- "id": "c3fa6872-2599-4a82-a596-b3446a66cf8b",
- "version": "1.1.1",
+ "id": "24e9d7ed-4836-4ec4-8f9e-e747721f9818",
+ "version": "1.0.3",
+ "label": "",
+ "notes": "",
+ "type": "main_model_loader",
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": 2500,
+ "y": -600
+ }
+ },
+ {
+ "id": "85b77bb2-c67a-416a-b3e8-291abe746c44",
+ "type": "invocation",
+ "data": {
+ "id": "85b77bb2-c67a-416a-b3e8-291abe746c44",
+ "version": "1.2.0",
"label": "",
"notes": "",
"type": "compel",
"inputs": {
"prompt": {
"name": "prompt",
- "label": "Positive Prompt",
- "value": "super cute tiger cub"
+ "label": "Negative Prompt",
+ "value": ""
},
"clip": {
"name": "clip",
"label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
}
},
"isOpen": true,
@@ -157,7 +216,7 @@
},
"position": {
"x": 3425,
- "y": -575
+ "y": -300
}
},
{
@@ -315,52 +374,6 @@
"x": 3425,
"y": 0
}
- },
- {
- "id": "a9683c0a-6b1f-4a5e-8187-c57e764b3400",
- "type": "invocation",
- "data": {
- "id": "a9683c0a-6b1f-4a5e-8187-c57e764b3400",
- "version": "1.2.2",
- "label": "",
- "notes": "",
- "type": "l2i",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "latents": {
- "name": "latents",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": false
- }
- },
- "isOpen": true,
- "isIntermediate": false,
- "useCache": true
- },
- "position": {
- "x": 4450,
- "y": -550
- }
}
],
"edges": [
diff --git a/invokeai/app/services/workflow_records/default_workflows/Tiled Upscaling (Beta).json b/invokeai/app/services/workflow_records/default_workflows/Tiled Upscaling (Beta).json
index bb0e9062e4..426fe49c41 100644
--- a/invokeai/app/services/workflow_records/default_workflows/Tiled Upscaling (Beta).json
+++ b/invokeai/app/services/workflow_records/default_workflows/Tiled Upscaling (Beta).json
@@ -2,7 +2,7 @@
"name": "Tiled Upscaling (Beta)",
"author": "Invoke",
"description": "A workflow to upscale an input image with tiled upscaling. ",
- "version": "2.0.0",
+ "version": "2.1.0",
"contact": "invoke@invoke.ai",
"tags": "tiled, upscaling, sd1.5",
"notes": "",
@@ -41,10 +41,318 @@
}
],
"meta": {
- "category": "default",
- "version": "3.0.0"
+ "version": "3.0.0",
+ "category": "default"
},
"nodes": [
+ {
+ "id": "2ff466b8-5e2a-4d8f-923a-a3884c7ecbc5",
+ "type": "invocation",
+ "data": {
+ "id": "2ff466b8-5e2a-4d8f-923a-a3884c7ecbc5",
+ "version": "1.0.3",
+ "label": "",
+ "notes": "",
+ "type": "main_model_loader",
+ "inputs": {
+ "model": {
+ "name": "model",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -4514.466823162653,
+ "y": -1235.7908800002283
+ }
+ },
+ {
+ "id": "287f134f-da8d-41d1-884e-5940e8f7b816",
+ "type": "invocation",
+ "data": {
+ "id": "287f134f-da8d-41d1-884e-5940e8f7b816",
+ "version": "1.4.1",
+ "label": "",
+ "notes": "",
+ "type": "ip_adapter",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "ip_adapter_model": {
+ "name": "ip_adapter_model",
+ "label": "IP-Adapter Model (select ip_adapter_sd15)",
+ "value": {
+ "key": "1cc210bb-4d0a-4312-b36c-b5d46c43768e",
+ "hash": "blake3:3d669dffa7471b357b4df088b99ffb6bf4d4383d5e0ef1de5ec1c89728a3d5a5",
+ "name": "ip_adapter_sd15",
+ "base": "sd-1",
+ "type": "ip_adapter"
+ }
+ },
+ "clip_vision_model": {
+ "name": "clip_vision_model",
+ "label": "",
+ "value": "ViT-H"
+ },
+ "weight": {
+ "name": "weight",
+ "label": "",
+ "value": 0.2
+ },
+ "method": {
+ "name": "method",
+ "label": "",
+ "value": "full"
+ },
+ "begin_step_percent": {
+ "name": "begin_step_percent",
+ "label": "",
+ "value": 0
+ },
+ "end_step_percent": {
+ "name": "end_step_percent",
+ "label": "",
+ "value": 1
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -2855.8555540799207,
+ "y": -183.58854843775742
+ }
+ },
+ {
+ "id": "b76fe66f-7884-43ad-b72c-fadc81d7a73c",
+ "type": "invocation",
+ "data": {
+ "id": "b76fe66f-7884-43ad-b72c-fadc81d7a73c",
+ "version": "1.3.0",
+ "label": "",
+ "notes": "",
+ "type": "l2i",
+ "inputs": {
+ "board": {
+ "name": "board",
+ "label": ""
+ },
+ "metadata": {
+ "name": "metadata",
+ "label": ""
+ },
+ "latents": {
+ "name": "latents",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": false
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -1999.770193862987,
+ "y": -1075
+ }
+ },
+ {
+ "id": "d334f2da-016a-4524-9911-bdab85546888",
+ "type": "invocation",
+ "data": {
+ "id": "d334f2da-016a-4524-9911-bdab85546888",
+ "version": "1.1.2",
+ "label": "",
+ "notes": "",
+ "type": "controlnet",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "control_model": {
+ "name": "control_model",
+ "label": "Control Model (select contro_v11f1e_sd15_tile)",
+ "value": {
+ "key": "773843c8-db1f-4502-8f65-59782efa7960",
+ "hash": "blake3:f0812e13758f91baf4e54b7dbb707b70642937d3b2098cd2b94cc36d3eba308e",
+ "name": "control_v11f1e_sd15_tile",
+ "base": "sd-1",
+ "type": "controlnet"
+ }
+ },
+ "control_weight": {
+ "name": "control_weight",
+ "label": "",
+ "value": 1
+ },
+ "begin_step_percent": {
+ "name": "begin_step_percent",
+ "label": "",
+ "value": 0
+ },
+ "end_step_percent": {
+ "name": "end_step_percent",
+ "label": "Structural Control",
+ "value": 1
+ },
+ "control_mode": {
+ "name": "control_mode",
+ "label": "",
+ "value": "more_control"
+ },
+ "resize_mode": {
+ "name": "resize_mode",
+ "label": "",
+ "value": "just_resize"
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -2481.9569385477016,
+ "y": -181.06590482739782
+ }
+ },
+ {
+ "id": "338b883c-3728-4f18-b3a6-6e7190c2f850",
+ "type": "invocation",
+ "data": {
+ "id": "338b883c-3728-4f18-b3a6-6e7190c2f850",
+ "version": "1.1.0",
+ "label": "",
+ "notes": "",
+ "type": "i2l",
+ "inputs": {
+ "image": {
+ "name": "image",
+ "label": ""
+ },
+ "vae": {
+ "name": "vae",
+ "label": ""
+ },
+ "tiled": {
+ "name": "tiled",
+ "label": "",
+ "value": false
+ },
+ "tile_size": {
+ "name": "tile_size",
+ "label": "",
+ "value": 0
+ },
+ "fp32": {
+ "name": "fp32",
+ "label": "",
+ "value": false
+ }
+ },
+ "isOpen": false,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -2908.4791167517287,
+ "y": -408.87504820159086
+ }
+ },
+ {
+ "id": "947c3f88-0305-4695-8355-df4abac64b1c",
+ "type": "invocation",
+ "data": {
+ "id": "947c3f88-0305-4695-8355-df4abac64b1c",
+ "version": "1.2.0",
+ "label": "",
+ "notes": "",
+ "type": "compel",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "",
+ "value": ""
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -4014.4136788915944,
+ "y": -968.5677253775948
+ }
+ },
+ {
+ "id": "9b2d8c58-ce8f-4162-a5a1-48de854040d6",
+ "type": "invocation",
+ "data": {
+ "id": "9b2d8c58-ce8f-4162-a5a1-48de854040d6",
+ "version": "1.2.0",
+ "label": "",
+ "notes": "",
+ "type": "compel",
+ "inputs": {
+ "prompt": {
+ "name": "prompt",
+ "label": "Positive Prompt",
+ "value": ""
+ },
+ "clip": {
+ "name": "clip",
+ "label": ""
+ },
+ "mask": {
+ "name": "mask",
+ "label": ""
+ }
+ },
+ "isOpen": true,
+ "isIntermediate": true,
+ "useCache": true
+ },
+ "position": {
+ "x": -4014.4136788915944,
+ "y": -1243.5677253775948
+ }
+ },
{
"id": "b875cae6-d8a3-4fdc-b969-4d53cbd03f9a",
"type": "invocation",
@@ -181,64 +489,6 @@
"y": 3.422855503409039
}
},
- {
- "id": "9b2d8c58-ce8f-4162-a5a1-48de854040d6",
- "type": "invocation",
- "data": {
- "id": "9b2d8c58-ce8f-4162-a5a1-48de854040d6",
- "version": "1.1.1",
- "label": "",
- "notes": "",
- "type": "compel",
- "inputs": {
- "prompt": {
- "name": "prompt",
- "label": "Positive Prompt",
- "value": ""
- },
- "clip": {
- "name": "clip",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": -4014.4136788915944,
- "y": -1243.5677253775948
- }
- },
- {
- "id": "947c3f88-0305-4695-8355-df4abac64b1c",
- "type": "invocation",
- "data": {
- "id": "947c3f88-0305-4695-8355-df4abac64b1c",
- "version": "1.1.1",
- "label": "",
- "notes": "",
- "type": "compel",
- "inputs": {
- "prompt": {
- "name": "prompt",
- "label": "",
- "value": ""
- },
- "clip": {
- "name": "clip",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": -4014.4136788915944,
- "y": -968.5677253775948
- }
- },
{
"id": "b3513fed-ed42-408d-b382-128fdb0de523",
"type": "invocation",
@@ -379,104 +629,6 @@
"y": -29.08699277598673
}
},
- {
- "id": "338b883c-3728-4f18-b3a6-6e7190c2f850",
- "type": "invocation",
- "data": {
- "id": "338b883c-3728-4f18-b3a6-6e7190c2f850",
- "version": "1.0.2",
- "label": "",
- "notes": "",
- "type": "i2l",
- "inputs": {
- "image": {
- "name": "image",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": false
- }
- },
- "isOpen": false,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": -2908.4791167517287,
- "y": -408.87504820159086
- }
- },
- {
- "id": "d334f2da-016a-4524-9911-bdab85546888",
- "type": "invocation",
- "data": {
- "id": "d334f2da-016a-4524-9911-bdab85546888",
- "version": "1.1.1",
- "label": "",
- "notes": "",
- "type": "controlnet",
- "inputs": {
- "image": {
- "name": "image",
- "label": ""
- },
- "control_model": {
- "name": "control_model",
- "label": "Control Model (select contro_v11f1e_sd15_tile)",
- "value": {
- "key": "773843c8-db1f-4502-8f65-59782efa7960",
- "hash": "blake3:f0812e13758f91baf4e54b7dbb707b70642937d3b2098cd2b94cc36d3eba308e",
- "name": "control_v11f1e_sd15_tile",
- "base": "sd-1",
- "type": "controlnet"
- }
- },
- "control_weight": {
- "name": "control_weight",
- "label": "",
- "value": 1
- },
- "begin_step_percent": {
- "name": "begin_step_percent",
- "label": "",
- "value": 0
- },
- "end_step_percent": {
- "name": "end_step_percent",
- "label": "Structural Control",
- "value": 1
- },
- "control_mode": {
- "name": "control_mode",
- "label": "",
- "value": "more_control"
- },
- "resize_mode": {
- "name": "resize_mode",
- "label": "",
- "value": "just_resize"
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": -2481.9569385477016,
- "y": -181.06590482739782
- }
- },
{
"id": "1011539e-85de-4e02-a003-0b22358491b8",
"type": "invocation",
@@ -563,52 +715,6 @@
"y": -1006.415909408244
}
},
- {
- "id": "b76fe66f-7884-43ad-b72c-fadc81d7a73c",
- "type": "invocation",
- "data": {
- "id": "b76fe66f-7884-43ad-b72c-fadc81d7a73c",
- "version": "1.2.2",
- "label": "",
- "notes": "",
- "type": "l2i",
- "inputs": {
- "board": {
- "name": "board",
- "label": ""
- },
- "metadata": {
- "name": "metadata",
- "label": ""
- },
- "latents": {
- "name": "latents",
- "label": ""
- },
- "vae": {
- "name": "vae",
- "label": ""
- },
- "tiled": {
- "name": "tiled",
- "label": "",
- "value": false
- },
- "fp32": {
- "name": "fp32",
- "label": "",
- "value": false
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": -1999.770193862987,
- "y": -1075
- }
- },
{
"id": "ab6f5dda-4b60-4ddf-99f2-f61fb5937527",
"type": "invocation",
@@ -779,56 +885,6 @@
"y": -78.2819050861178
}
},
- {
- "id": "287f134f-da8d-41d1-884e-5940e8f7b816",
- "type": "invocation",
- "data": {
- "id": "287f134f-da8d-41d1-884e-5940e8f7b816",
- "version": "1.2.2",
- "label": "",
- "notes": "",
- "type": "ip_adapter",
- "inputs": {
- "image": {
- "name": "image",
- "label": ""
- },
- "ip_adapter_model": {
- "name": "ip_adapter_model",
- "label": "IP-Adapter Model (select ip_adapter_sd15)",
- "value": {
- "key": "1cc210bb-4d0a-4312-b36c-b5d46c43768e",
- "hash": "blake3:3d669dffa7471b357b4df088b99ffb6bf4d4383d5e0ef1de5ec1c89728a3d5a5",
- "name": "ip_adapter_sd15",
- "base": "sd-1",
- "type": "ip_adapter"
- }
- },
- "weight": {
- "name": "weight",
- "label": "",
- "value": 0.2
- },
- "begin_step_percent": {
- "name": "begin_step_percent",
- "label": "",
- "value": 0
- },
- "end_step_percent": {
- "name": "end_step_percent",
- "label": "",
- "value": 1
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": -2855.8555540799207,
- "y": -183.58854843775742
- }
- },
{
"id": "1f86c8bf-06f9-4e28-abee-02f46f445ac4",
"type": "invocation",
@@ -899,30 +955,6 @@
"y": -41.810810454906914
}
},
- {
- "id": "2ff466b8-5e2a-4d8f-923a-a3884c7ecbc5",
- "type": "invocation",
- "data": {
- "id": "2ff466b8-5e2a-4d8f-923a-a3884c7ecbc5",
- "version": "1.0.2",
- "label": "",
- "notes": "",
- "type": "main_model_loader",
- "inputs": {
- "model": {
- "name": "model",
- "label": ""
- }
- },
- "isOpen": true,
- "isIntermediate": true,
- "useCache": true
- },
- "position": {
- "x": -4514.466823162653,
- "y": -1235.7908800002283
- }
- },
{
"id": "f5d9bf3b-2646-4b17-9894-20fd2b4218ea",
"type": "invocation",
diff --git a/invokeai/backend/image_util/lineart_anime.py b/invokeai/backend/image_util/lineart_anime.py
index 5185d92c51..33d16bb361 100644
--- a/invokeai/backend/image_util/lineart_anime.py
+++ b/invokeai/backend/image_util/lineart_anime.py
@@ -98,7 +98,7 @@ class UnetSkipConnectionBlock(nn.Module):
"""
super(UnetSkipConnectionBlock, self).__init__()
self.outermost = outermost
- if type(norm_layer) == functools.partial:
+ if isinstance(norm_layer, functools.partial):
use_bias = norm_layer.func == nn.InstanceNorm2d
else:
use_bias = norm_layer == nn.InstanceNorm2d
diff --git a/invokeai/backend/ip_adapter/ip_adapter.py b/invokeai/backend/ip_adapter/ip_adapter.py
index 75286f4733..87ce029a87 100644
--- a/invokeai/backend/ip_adapter/ip_adapter.py
+++ b/invokeai/backend/ip_adapter/ip_adapter.py
@@ -124,16 +124,14 @@ class IPAdapter(RawModel):
self.device, dtype=self.dtype
)
- def to(
- self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None, non_blocking: bool = False
- ):
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None):
if device is not None:
self.device = device
if dtype is not None:
self.dtype = dtype
- self._image_proj_model.to(device=self.device, dtype=self.dtype, non_blocking=non_blocking)
- self.attn_weights.to(device=self.device, dtype=self.dtype, non_blocking=non_blocking)
+ self._image_proj_model.to(device=self.device, dtype=self.dtype)
+ self.attn_weights.to(device=self.device, dtype=self.dtype)
def calc_size(self) -> int:
# HACK(ryand): Fix this issue with circular imports.
diff --git a/invokeai/backend/lora.py b/invokeai/backend/lora.py
index 9c669a4c78..8ef81915f1 100644
--- a/invokeai/backend/lora.py
+++ b/invokeai/backend/lora.py
@@ -11,7 +11,6 @@ from typing_extensions import Self
from invokeai.backend.model_manager import BaseModelType
from invokeai.backend.raw_model import RawModel
-from invokeai.backend.util.devices import TorchDevice
class LoRALayerBase:
@@ -57,14 +56,9 @@ class LoRALayerBase:
model_size += val.nelement() * val.element_size()
return model_size
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ) -> None:
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
if self.bias is not None:
- self.bias = self.bias.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.bias = self.bias.to(device=device, dtype=dtype)
# TODO: find and debug lora/locon with bias
@@ -106,19 +100,14 @@ class LoRALayer(LoRALayerBase):
model_size += val.nelement() * val.element_size()
return model_size
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ) -> None:
- super().to(device=device, dtype=dtype, non_blocking=non_blocking)
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
+ super().to(device=device, dtype=dtype)
- self.up = self.up.to(device=device, dtype=dtype, non_blocking=non_blocking)
- self.down = self.down.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.up = self.up.to(device=device, dtype=dtype)
+ self.down = self.down.to(device=device, dtype=dtype)
if self.mid is not None:
- self.mid = self.mid.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.mid = self.mid.to(device=device, dtype=dtype)
class LoHALayer(LoRALayerBase):
@@ -167,23 +156,18 @@ class LoHALayer(LoRALayerBase):
model_size += val.nelement() * val.element_size()
return model_size
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ) -> None:
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
super().to(device=device, dtype=dtype)
- self.w1_a = self.w1_a.to(device=device, dtype=dtype, non_blocking=non_blocking)
- self.w1_b = self.w1_b.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.w1_a = self.w1_a.to(device=device, dtype=dtype)
+ self.w1_b = self.w1_b.to(device=device, dtype=dtype)
if self.t1 is not None:
- self.t1 = self.t1.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.t1 = self.t1.to(device=device, dtype=dtype)
- self.w2_a = self.w2_a.to(device=device, dtype=dtype, non_blocking=non_blocking)
- self.w2_b = self.w2_b.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.w2_a = self.w2_a.to(device=device, dtype=dtype)
+ self.w2_b = self.w2_b.to(device=device, dtype=dtype)
if self.t2 is not None:
- self.t2 = self.t2.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.t2 = self.t2.to(device=device, dtype=dtype)
class LoKRLayer(LoRALayerBase):
@@ -264,12 +248,7 @@ class LoKRLayer(LoRALayerBase):
model_size += val.nelement() * val.element_size()
return model_size
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ) -> None:
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
super().to(device=device, dtype=dtype)
if self.w1 is not None:
@@ -277,19 +256,19 @@ class LoKRLayer(LoRALayerBase):
else:
assert self.w1_a is not None
assert self.w1_b is not None
- self.w1_a = self.w1_a.to(device=device, dtype=dtype, non_blocking=non_blocking)
- self.w1_b = self.w1_b.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.w1_a = self.w1_a.to(device=device, dtype=dtype)
+ self.w1_b = self.w1_b.to(device=device, dtype=dtype)
if self.w2 is not None:
- self.w2 = self.w2.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.w2 = self.w2.to(device=device, dtype=dtype)
else:
assert self.w2_a is not None
assert self.w2_b is not None
- self.w2_a = self.w2_a.to(device=device, dtype=dtype, non_blocking=non_blocking)
- self.w2_b = self.w2_b.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.w2_a = self.w2_a.to(device=device, dtype=dtype)
+ self.w2_b = self.w2_b.to(device=device, dtype=dtype)
if self.t2 is not None:
- self.t2 = self.t2.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.t2 = self.t2.to(device=device, dtype=dtype)
class FullLayer(LoRALayerBase):
@@ -319,15 +298,10 @@ class FullLayer(LoRALayerBase):
model_size += self.weight.nelement() * self.weight.element_size()
return model_size
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ) -> None:
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
super().to(device=device, dtype=dtype)
- self.weight = self.weight.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.weight = self.weight.to(device=device, dtype=dtype)
class IA3Layer(LoRALayerBase):
@@ -359,16 +333,11 @@ class IA3Layer(LoRALayerBase):
model_size += self.on_input.nelement() * self.on_input.element_size()
return model_size
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ):
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None):
super().to(device=device, dtype=dtype)
- self.weight = self.weight.to(device=device, dtype=dtype, non_blocking=non_blocking)
- self.on_input = self.on_input.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ self.weight = self.weight.to(device=device, dtype=dtype)
+ self.on_input = self.on_input.to(device=device, dtype=dtype)
AnyLoRALayer = Union[LoRALayer, LoHALayer, LoKRLayer, FullLayer, IA3Layer]
@@ -390,15 +359,10 @@ class LoRAModelRaw(RawModel): # (torch.nn.Module):
def name(self) -> str:
return self._name
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ) -> None:
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
# TODO: try revert if exception?
for _key, layer in self.layers.items():
- layer.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ layer.to(device=device, dtype=dtype)
def calc_size(self) -> int:
model_size = 0
@@ -521,7 +485,7 @@ class LoRAModelRaw(RawModel): # (torch.nn.Module):
# lower memory consumption by removing already parsed layer values
state_dict[layer_key].clear()
- layer.to(device=device, dtype=dtype, non_blocking=TorchDevice.get_non_blocking(device))
+ layer.to(device=device, dtype=dtype)
model.layers[layer_key] = layer
return model
diff --git a/invokeai/backend/model_manager/config.py b/invokeai/backend/model_manager/config.py
index dbcd259368..f6cc5929c8 100644
--- a/invokeai/backend/model_manager/config.py
+++ b/invokeai/backend/model_manager/config.py
@@ -67,6 +67,7 @@ class ModelType(str, Enum):
IPAdapter = "ip_adapter"
CLIPVision = "clip_vision"
T2IAdapter = "t2i_adapter"
+ SpandrelImageToImage = "spandrel_image_to_image"
class SubModelType(str, Enum):
@@ -371,6 +372,17 @@ class T2IAdapterConfig(DiffusersConfigBase, ControlAdapterConfigBase):
return Tag(f"{ModelType.T2IAdapter.value}.{ModelFormat.Diffusers.value}")
+class SpandrelImageToImageConfig(ModelConfigBase):
+ """Model config for Spandrel Image to Image models."""
+
+ type: Literal[ModelType.SpandrelImageToImage] = ModelType.SpandrelImageToImage
+ format: Literal[ModelFormat.Checkpoint] = ModelFormat.Checkpoint
+
+ @staticmethod
+ def get_tag() -> Tag:
+ return Tag(f"{ModelType.SpandrelImageToImage.value}.{ModelFormat.Checkpoint.value}")
+
+
def get_model_discriminator_value(v: Any) -> str:
"""
Computes the discriminator value for a model config.
@@ -407,6 +419,7 @@ AnyModelConfig = Annotated[
Annotated[IPAdapterInvokeAIConfig, IPAdapterInvokeAIConfig.get_tag()],
Annotated[IPAdapterCheckpointConfig, IPAdapterCheckpointConfig.get_tag()],
Annotated[T2IAdapterConfig, T2IAdapterConfig.get_tag()],
+ Annotated[SpandrelImageToImageConfig, SpandrelImageToImageConfig.get_tag()],
Annotated[CLIPVisionDiffusersConfig, CLIPVisionDiffusersConfig.get_tag()],
],
Discriminator(get_model_discriminator_value),
diff --git a/invokeai/backend/model_manager/load/model_cache/model_cache_default.py b/invokeai/backend/model_manager/load/model_cache/model_cache_default.py
index 9027b7b5b7..e69201e739 100644
--- a/invokeai/backend/model_manager/load/model_cache/model_cache_default.py
+++ b/invokeai/backend/model_manager/load/model_cache/model_cache_default.py
@@ -289,11 +289,9 @@ class ModelCache(ModelCacheBase[AnyModel]):
else:
new_dict: Dict[str, torch.Tensor] = {}
for k, v in cache_entry.state_dict.items():
- new_dict[k] = v.to(
- target_device, copy=True, non_blocking=TorchDevice.get_non_blocking(target_device)
- )
+ new_dict[k] = v.to(target_device, copy=True)
cache_entry.model.load_state_dict(new_dict, assign=True)
- cache_entry.model.to(target_device, non_blocking=TorchDevice.get_non_blocking(target_device))
+ cache_entry.model.to(target_device)
cache_entry.device = target_device
except Exception as e: # blow away cache entry
self._delete_cache_entry(cache_entry)
diff --git a/invokeai/backend/model_manager/load/model_loaders/spandrel_image_to_image.py b/invokeai/backend/model_manager/load/model_loaders/spandrel_image_to_image.py
new file mode 100644
index 0000000000..7a57c5cf59
--- /dev/null
+++ b/invokeai/backend/model_manager/load/model_loaders/spandrel_image_to_image.py
@@ -0,0 +1,45 @@
+from pathlib import Path
+from typing import Optional
+
+import torch
+
+from invokeai.backend.model_manager.config import (
+ AnyModel,
+ AnyModelConfig,
+ BaseModelType,
+ ModelFormat,
+ ModelType,
+ SubModelType,
+)
+from invokeai.backend.model_manager.load.load_default import ModelLoader
+from invokeai.backend.model_manager.load.model_loader_registry import ModelLoaderRegistry
+from invokeai.backend.spandrel_image_to_image_model import SpandrelImageToImageModel
+
+
+@ModelLoaderRegistry.register(
+ base=BaseModelType.Any, type=ModelType.SpandrelImageToImage, format=ModelFormat.Checkpoint
+)
+class SpandrelImageToImageModelLoader(ModelLoader):
+ """Class for loading Spandrel Image-to-Image models (i.e. models wrapped by spandrel.ImageModelDescriptor)."""
+
+ def _load_model(
+ self,
+ config: AnyModelConfig,
+ submodel_type: Optional[SubModelType] = None,
+ ) -> AnyModel:
+ if submodel_type is not None:
+ raise ValueError("Unexpected submodel requested for Spandrel model.")
+
+ model_path = Path(config.path)
+ model = SpandrelImageToImageModel.load_from_file(model_path)
+
+ torch_dtype = self._torch_dtype
+ if not model.supports_dtype(torch_dtype):
+ self._logger.warning(
+ f"The configured dtype ('{self._torch_dtype}') is not supported by the {model.get_model_type_name()} "
+ "model. Falling back to 'float32'."
+ )
+ torch_dtype = torch.float32
+ model.to(dtype=torch_dtype)
+
+ return model
diff --git a/invokeai/backend/model_manager/load/model_util.py b/invokeai/backend/model_manager/load/model_util.py
index 64fbd29a1f..f070a42965 100644
--- a/invokeai/backend/model_manager/load/model_util.py
+++ b/invokeai/backend/model_manager/load/model_util.py
@@ -15,6 +15,7 @@ from invokeai.backend.ip_adapter.ip_adapter import IPAdapter
from invokeai.backend.lora import LoRAModelRaw
from invokeai.backend.model_manager.config import AnyModel
from invokeai.backend.onnx.onnx_runtime import IAIOnnxRuntimeModel
+from invokeai.backend.spandrel_image_to_image_model import SpandrelImageToImageModel
from invokeai.backend.textual_inversion import TextualInversionModelRaw
@@ -33,7 +34,7 @@ def calc_model_size_by_data(logger: logging.Logger, model: AnyModel) -> int:
elif isinstance(model, CLIPTokenizer):
# TODO(ryand): Accurately calculate the tokenizer's size. It's small enough that it shouldn't matter for now.
return 0
- elif isinstance(model, (TextualInversionModelRaw, IPAdapter, LoRAModelRaw)):
+ elif isinstance(model, (TextualInversionModelRaw, IPAdapter, LoRAModelRaw, SpandrelImageToImageModel)):
return model.calc_size()
else:
# TODO(ryand): Promote this from a log to an exception once we are confident that we are handling all of the
diff --git a/invokeai/backend/model_manager/probe.py b/invokeai/backend/model_manager/probe.py
index f6fb2d24bc..1929b3f4fd 100644
--- a/invokeai/backend/model_manager/probe.py
+++ b/invokeai/backend/model_manager/probe.py
@@ -4,6 +4,7 @@ from pathlib import Path
from typing import Any, Dict, Literal, Optional, Union
import safetensors.torch
+import spandrel
import torch
from picklescan.scanner import scan_file_path
@@ -25,6 +26,7 @@ from invokeai.backend.model_manager.config import (
SchedulerPredictionType,
)
from invokeai.backend.model_manager.util.model_util import lora_token_vector_length, read_checkpoint_meta
+from invokeai.backend.spandrel_image_to_image_model import SpandrelImageToImageModel
from invokeai.backend.util.silence_warnings import SilenceWarnings
CkptType = Dict[str | int, Any]
@@ -220,24 +222,46 @@ class ModelProbe(object):
ckpt = ckpt.get("state_dict", ckpt)
for key in [str(k) for k in ckpt.keys()]:
- if any(key.startswith(v) for v in {"cond_stage_model.", "first_stage_model.", "model.diffusion_model."}):
+ if key.startswith(("cond_stage_model.", "first_stage_model.", "model.diffusion_model.")):
return ModelType.Main
- elif any(key.startswith(v) for v in {"encoder.conv_in", "decoder.conv_in"}):
+ elif key.startswith(("encoder.conv_in", "decoder.conv_in")):
return ModelType.VAE
- elif any(key.startswith(v) for v in {"lora_te_", "lora_unet_"}):
+ elif key.startswith(("lora_te_", "lora_unet_")):
return ModelType.LoRA
- elif any(key.endswith(v) for v in {"to_k_lora.up.weight", "to_q_lora.down.weight"}):
+ elif key.endswith(("to_k_lora.up.weight", "to_q_lora.down.weight")):
return ModelType.LoRA
- elif any(key.startswith(v) for v in {"controlnet", "control_model", "input_blocks"}):
+ elif key.startswith(("controlnet", "control_model", "input_blocks")):
return ModelType.ControlNet
- elif any(key.startswith(v) for v in {"image_proj.", "ip_adapter."}):
+ elif key.startswith(("image_proj.", "ip_adapter.")):
return ModelType.IPAdapter
elif key in {"emb_params", "string_to_param"}:
return ModelType.TextualInversion
- else:
- # diffusers-ti
- if len(ckpt) < 10 and all(isinstance(v, torch.Tensor) for v in ckpt.values()):
- return ModelType.TextualInversion
+
+ # diffusers-ti
+ if len(ckpt) < 10 and all(isinstance(v, torch.Tensor) for v in ckpt.values()):
+ return ModelType.TextualInversion
+
+ # Check if the model can be loaded as a SpandrelImageToImageModel.
+ # This check is intentionally performed last, as it can be expensive (it requires loading the model from disk).
+ try:
+ # It would be nice to avoid having to load the Spandrel model from disk here. A couple of options were
+ # explored to avoid this:
+ # 1. Call `SpandrelImageToImageModel.load_from_state_dict(ckpt)`, where `ckpt` is a state_dict on the meta
+ # device. Unfortunately, some Spandrel models perform operations during initialization that are not
+ # supported on meta tensors.
+ # 2. Spandrel has internal logic to determine a model's type from its state_dict before loading the model.
+ # This logic is not exposed in spandrel's public API. We could copy the logic here, but then we have to
+ # maintain it, and the risk of false positive detections is higher.
+ SpandrelImageToImageModel.load_from_file(model_path)
+ return ModelType.SpandrelImageToImage
+ except spandrel.UnsupportedModelError:
+ pass
+ except RuntimeError as e:
+ if "No such file or directory" in str(e):
+ # This error is expected if the model_path does not exist (which is the case in some unit tests).
+ pass
+ else:
+ raise e
raise InvalidModelConfigException(f"Unable to determine model type for {model_path}")
@@ -569,6 +593,11 @@ class T2IAdapterCheckpointProbe(CheckpointProbeBase):
raise NotImplementedError()
+class SpandrelImageToImageCheckpointProbe(CheckpointProbeBase):
+ def get_base_type(self) -> BaseModelType:
+ return BaseModelType.Any
+
+
########################################################
# classes for probing folders
#######################################################
@@ -776,6 +805,11 @@ class CLIPVisionFolderProbe(FolderProbeBase):
return BaseModelType.Any
+class SpandrelImageToImageFolderProbe(FolderProbeBase):
+ def get_base_type(self) -> BaseModelType:
+ raise NotImplementedError()
+
+
class T2IAdapterFolderProbe(FolderProbeBase):
def get_base_type(self) -> BaseModelType:
config_file = self.model_path / "config.json"
@@ -805,6 +839,7 @@ ModelProbe.register_probe("diffusers", ModelType.ControlNet, ControlNetFolderPro
ModelProbe.register_probe("diffusers", ModelType.IPAdapter, IPAdapterFolderProbe)
ModelProbe.register_probe("diffusers", ModelType.CLIPVision, CLIPVisionFolderProbe)
ModelProbe.register_probe("diffusers", ModelType.T2IAdapter, T2IAdapterFolderProbe)
+ModelProbe.register_probe("diffusers", ModelType.SpandrelImageToImage, SpandrelImageToImageFolderProbe)
ModelProbe.register_probe("checkpoint", ModelType.Main, PipelineCheckpointProbe)
ModelProbe.register_probe("checkpoint", ModelType.VAE, VaeCheckpointProbe)
@@ -814,5 +849,6 @@ ModelProbe.register_probe("checkpoint", ModelType.ControlNet, ControlNetCheckpoi
ModelProbe.register_probe("checkpoint", ModelType.IPAdapter, IPAdapterCheckpointProbe)
ModelProbe.register_probe("checkpoint", ModelType.CLIPVision, CLIPVisionCheckpointProbe)
ModelProbe.register_probe("checkpoint", ModelType.T2IAdapter, T2IAdapterCheckpointProbe)
+ModelProbe.register_probe("checkpoint", ModelType.SpandrelImageToImage, SpandrelImageToImageCheckpointProbe)
ModelProbe.register_probe("onnx", ModelType.ONNX, ONNXFolderProbe)
diff --git a/invokeai/backend/model_manager/starter_models.py b/invokeai/backend/model_manager/starter_models.py
index a397a2a5dc..2a860eeac1 100644
--- a/invokeai/backend/model_manager/starter_models.py
+++ b/invokeai/backend/model_manager/starter_models.py
@@ -399,6 +399,43 @@ STARTER_MODELS: list[StarterModel] = [
type=ModelType.T2IAdapter,
),
# endregion
+ # region SpandrelImageToImage
+ StarterModel(
+ name="RealESRGAN_x4plus_anime_6B",
+ base=BaseModelType.Any,
+ source="https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.2.4/RealESRGAN_x4plus_anime_6B.pth",
+ description="A Real-ESRGAN 4x upscaling model (optimized for anime images).",
+ type=ModelType.SpandrelImageToImage,
+ ),
+ StarterModel(
+ name="RealESRGAN_x4plus",
+ base=BaseModelType.Any,
+ source="https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
+ description="A Real-ESRGAN 4x upscaling model (general-purpose).",
+ type=ModelType.SpandrelImageToImage,
+ ),
+ StarterModel(
+ name="ESRGAN_SRx4_DF2KOST_official",
+ base=BaseModelType.Any,
+ source="https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.1/ESRGAN_SRx4_DF2KOST_official-ff704c30.pth",
+ description="The official ESRGAN 4x upscaling model.",
+ type=ModelType.SpandrelImageToImage,
+ ),
+ StarterModel(
+ name="RealESRGAN_x2plus",
+ base=BaseModelType.Any,
+ source="https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.1/RealESRGAN_x2plus.pth",
+ description="A Real-ESRGAN 2x upscaling model (general-purpose).",
+ type=ModelType.SpandrelImageToImage,
+ ),
+ StarterModel(
+ name="SwinIR - realSR_BSRGAN_DFOWMFC_s64w8_SwinIR-L_x4_GAN",
+ base=BaseModelType.Any,
+ source="https://github.com/JingyunLiang/SwinIR/releases/download/v0.0/003_realSR_BSRGAN_DFOWMFC_s64w8_SwinIR-L_x4_GAN-with-dict-keys-params-and-params_ema.pth",
+ description="A SwinIR 4x upscaling model.",
+ type=ModelType.SpandrelImageToImage,
+ ),
+ # endregion
]
assert len(STARTER_MODELS) == len({m.source for m in STARTER_MODELS}), "Duplicate starter models"
diff --git a/invokeai/backend/model_patcher.py b/invokeai/backend/model_patcher.py
index b2d6036f63..d30f7b3167 100644
--- a/invokeai/backend/model_patcher.py
+++ b/invokeai/backend/model_patcher.py
@@ -158,15 +158,12 @@ class ModelPatcher:
# We intentionally move to the target device first, then cast. Experimentally, this was found to
# be significantly faster for 16-bit CPU tensors being moved to a CUDA device than doing the
# same thing in a single call to '.to(...)'.
- layer.to(device=device, non_blocking=TorchDevice.get_non_blocking(device))
- layer.to(dtype=torch.float32, non_blocking=TorchDevice.get_non_blocking(device))
+ layer.to(device=device)
+ layer.to(dtype=torch.float32)
# TODO(ryand): Using torch.autocast(...) over explicit casting may offer a speed benefit on CUDA
# devices here. Experimentally, it was found to be very slow on CPU. More investigation needed.
layer_weight = layer.get_weight(module.weight) * (lora_weight * layer_scale)
- layer.to(
- device=TorchDevice.CPU_DEVICE,
- non_blocking=TorchDevice.get_non_blocking(TorchDevice.CPU_DEVICE),
- )
+ layer.to(device=TorchDevice.CPU_DEVICE)
assert isinstance(layer_weight, torch.Tensor) # mypy thinks layer_weight is a float|Any ??!
if module.weight.shape != layer_weight.shape:
@@ -175,7 +172,7 @@ class ModelPatcher:
layer_weight = layer_weight.reshape(module.weight.shape)
assert isinstance(layer_weight, torch.Tensor) # mypy thinks layer_weight is a float|Any ??!
- module.weight += layer_weight.to(dtype=dtype, non_blocking=TorchDevice.get_non_blocking(device))
+ module.weight += layer_weight.to(dtype=dtype)
yield # wait for context manager exit
@@ -183,9 +180,7 @@ class ModelPatcher:
assert hasattr(model, "get_submodule") # mypy not picking up fact that torch.nn.Module has get_submodule()
with torch.no_grad():
for module_key, weight in original_weights.items():
- model.get_submodule(module_key).weight.copy_(
- weight, non_blocking=TorchDevice.get_non_blocking(weight.device)
- )
+ model.get_submodule(module_key).weight.copy_(weight)
@classmethod
@contextmanager
diff --git a/invokeai/backend/onnx/onnx_runtime.py b/invokeai/backend/onnx/onnx_runtime.py
index d562a46dff..a8132d4b23 100644
--- a/invokeai/backend/onnx/onnx_runtime.py
+++ b/invokeai/backend/onnx/onnx_runtime.py
@@ -190,12 +190,7 @@ class IAIOnnxRuntimeModel(RawModel):
return self.session.run(None, inputs)
# compatability with RawModel ABC
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ) -> None:
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
pass
# compatability with diffusers load code
diff --git a/invokeai/backend/raw_model.py b/invokeai/backend/raw_model.py
index 7bca6945d9..23502b20cb 100644
--- a/invokeai/backend/raw_model.py
+++ b/invokeai/backend/raw_model.py
@@ -1,15 +1,3 @@
-"""Base class for 'Raw' models.
-
-The RawModel class is the base class of LoRAModelRaw and TextualInversionModelRaw,
-and is used for type checking of calls to the model patcher. Its main purpose
-is to avoid a circular import issues when lora.py tries to import BaseModelType
-from invokeai.backend.model_manager.config, and the latter tries to import LoRAModelRaw
-from lora.py.
-
-The term 'raw' was introduced to describe a wrapper around a torch.nn.Module
-that adds additional methods and attributes.
-"""
-
from abc import ABC, abstractmethod
from typing import Optional
@@ -17,13 +5,18 @@ import torch
class RawModel(ABC):
- """Abstract base class for 'Raw' model wrappers."""
+ """Base class for 'Raw' models.
+
+ The RawModel class is the base class of LoRAModelRaw, TextualInversionModelRaw, etc.
+ and is used for type checking of calls to the model patcher. Its main purpose
+ is to avoid a circular import issues when lora.py tries to import BaseModelType
+ from invokeai.backend.model_manager.config, and the latter tries to import LoRAModelRaw
+ from lora.py.
+
+ The term 'raw' was introduced to describe a wrapper around a torch.nn.Module
+ that adds additional methods and attributes.
+ """
@abstractmethod
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ) -> None:
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
pass
diff --git a/invokeai/backend/spandrel_image_to_image_model.py b/invokeai/backend/spandrel_image_to_image_model.py
new file mode 100644
index 0000000000..ccf02c57ac
--- /dev/null
+++ b/invokeai/backend/spandrel_image_to_image_model.py
@@ -0,0 +1,139 @@
+from pathlib import Path
+from typing import Any, Optional
+
+import numpy as np
+import torch
+from PIL import Image
+from spandrel import ImageModelDescriptor, ModelLoader
+
+from invokeai.backend.raw_model import RawModel
+
+
+class SpandrelImageToImageModel(RawModel):
+ """A wrapper for a Spandrel Image-to-Image model.
+
+ The main reason for having a wrapper class is to integrate with the type handling of RawModel.
+ """
+
+ def __init__(self, spandrel_model: ImageModelDescriptor[Any]):
+ self._spandrel_model = spandrel_model
+
+ @staticmethod
+ def pil_to_tensor(image: Image.Image) -> torch.Tensor:
+ """Convert PIL Image to the torch.Tensor format expected by SpandrelImageToImageModel.run().
+
+ Args:
+ image (Image.Image): A PIL Image with shape (H, W, C) and values in the range [0, 255].
+
+ Returns:
+ torch.Tensor: A torch.Tensor with shape (N, C, H, W) and values in the range [0, 1].
+ """
+ image_np = np.array(image)
+ # (H, W, C) -> (C, H, W)
+ image_np = np.transpose(image_np, (2, 0, 1))
+ image_np = image_np / 255
+ image_tensor = torch.from_numpy(image_np).float()
+ # (C, H, W) -> (N, C, H, W)
+ image_tensor = image_tensor.unsqueeze(0)
+ return image_tensor
+
+ @staticmethod
+ def tensor_to_pil(tensor: torch.Tensor) -> Image.Image:
+ """Convert a torch.Tensor produced by SpandrelImageToImageModel.run() to a PIL Image.
+
+ Args:
+ tensor (torch.Tensor): A torch.Tensor with shape (N, C, H, W) and values in the range [0, 1].
+
+ Returns:
+ Image.Image: A PIL Image with shape (H, W, C) and values in the range [0, 255].
+ """
+ # (N, C, H, W) -> (C, H, W)
+ tensor = tensor.squeeze(0)
+ # (C, H, W) -> (H, W, C)
+ tensor = tensor.permute(1, 2, 0)
+ tensor = tensor.clamp(0, 1)
+ tensor = (tensor * 255).cpu().detach().numpy().astype(np.uint8)
+ image = Image.fromarray(tensor)
+ return image
+
+ def run(self, image_tensor: torch.Tensor) -> torch.Tensor:
+ """Run the image-to-image model.
+
+ Args:
+ image_tensor (torch.Tensor): A torch.Tensor with shape (N, C, H, W) and values in the range [0, 1].
+ """
+ return self._spandrel_model(image_tensor)
+
+ @classmethod
+ def load_from_file(cls, file_path: str | Path):
+ model = ModelLoader().load_from_file(file_path)
+ if not isinstance(model, ImageModelDescriptor):
+ raise ValueError(
+ f"Loaded a spandrel model of type '{type(model)}'. Only image-to-image models are supported "
+ "('ImageModelDescriptor')."
+ )
+
+ return cls(spandrel_model=model)
+
+ @classmethod
+ def load_from_state_dict(cls, state_dict: dict[str, torch.Tensor]):
+ model = ModelLoader().load_from_state_dict(state_dict)
+ if not isinstance(model, ImageModelDescriptor):
+ raise ValueError(
+ f"Loaded a spandrel model of type '{type(model)}'. Only image-to-image models are supported "
+ "('ImageModelDescriptor')."
+ )
+
+ return cls(spandrel_model=model)
+
+ def supports_dtype(self, dtype: torch.dtype) -> bool:
+ """Check if the model supports the given dtype."""
+ if dtype == torch.float16:
+ return self._spandrel_model.supports_half
+ elif dtype == torch.bfloat16:
+ return self._spandrel_model.supports_bfloat16
+ elif dtype == torch.float32:
+ # All models support float32.
+ return True
+ else:
+ raise ValueError(f"Unexpected dtype '{dtype}'.")
+
+ def get_model_type_name(self) -> str:
+ """The model type name. Intended for logging / debugging purposes. Do not rely on this field remaining
+ consistent over time.
+ """
+ return str(type(self._spandrel_model.model))
+
+ def to(
+ self,
+ device: Optional[torch.device] = None,
+ dtype: Optional[torch.dtype] = None,
+ non_blocking: bool = False,
+ ) -> None:
+ """Note: Some models have limited dtype support. Call supports_dtype(...) to check if the dtype is supported.
+ Note: The non_blocking parameter is currently ignored."""
+ # TODO(ryand): spandrel.ImageModelDescriptor.to(...) does not support non_blocking. We will have to access the
+ # model directly if we want to apply this optimization.
+ self._spandrel_model.to(device=device, dtype=dtype)
+
+ @property
+ def device(self) -> torch.device:
+ """The device of the underlying model."""
+ return self._spandrel_model.device
+
+ @property
+ def dtype(self) -> torch.dtype:
+ """The dtype of the underlying model."""
+ return self._spandrel_model.dtype
+
+ @property
+ def scale(self) -> int:
+ """The scale of the model (e.g. 1x, 2x, 4x, etc.)."""
+ return self._spandrel_model.scale
+
+ def calc_size(self) -> int:
+ """Get size of the model in memory in bytes."""
+ # HACK(ryand): Fix this issue with circular imports.
+ from invokeai.backend.model_manager.load.model_util import calc_module_size
+
+ return calc_module_size(self._spandrel_model.model)
diff --git a/invokeai/backend/textual_inversion.py b/invokeai/backend/textual_inversion.py
index 483f2da88c..0345478b97 100644
--- a/invokeai/backend/textual_inversion.py
+++ b/invokeai/backend/textual_inversion.py
@@ -65,17 +65,12 @@ class TextualInversionModelRaw(RawModel):
return result
- def to(
- self,
- device: Optional[torch.device] = None,
- dtype: Optional[torch.dtype] = None,
- non_blocking: bool = False,
- ) -> None:
+ def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
if not torch.cuda.is_available():
return
for emb in [self.embedding, self.embedding_2]:
if emb is not None:
- emb.to(device=device, dtype=dtype, non_blocking=non_blocking)
+ emb.to(device=device, dtype=dtype)
def calc_size(self) -> int:
"""Get the size of this model in bytes."""
diff --git a/invokeai/backend/util/devices.py b/invokeai/backend/util/devices.py
index 1cba70c662..83ce055024 100644
--- a/invokeai/backend/util/devices.py
+++ b/invokeai/backend/util/devices.py
@@ -112,15 +112,3 @@ class TorchDevice:
@classmethod
def _to_dtype(cls, precision_name: TorchPrecisionNames) -> torch.dtype:
return NAME_TO_PRECISION[precision_name]
-
- @staticmethod
- def get_non_blocking(to_device: torch.device) -> bool:
- """Return the non_blocking flag to be used when moving a tensor to a given device.
- MPS may have unexpected errors with non-blocking operations - we should not use non-blocking when moving _to_ MPS.
- When moving _from_ MPS, we can use non-blocking operations.
-
- See:
- - https://github.com/pytorch/pytorch/issues/107455
- - https://discuss.pytorch.org/t/should-we-set-non-blocking-to-true/38234/28
- """
- return False if to_device.type == "mps" else True
diff --git a/invokeai/frontend/web/public/locales/it.json b/invokeai/frontend/web/public/locales/it.json
index 25c2e5b9a8..eced64a1e3 100644
--- a/invokeai/frontend/web/public/locales/it.json
+++ b/invokeai/frontend/web/public/locales/it.json
@@ -962,8 +962,8 @@
"selectedForAutoAdd": "Selezionato per l'aggiunta automatica",
"addSharedBoard": "Aggiungi una Bacheca Condivisa",
"boards": "Bacheche",
- "private": "Privata",
- "shared": "Condivisa",
+ "private": "Bacheche private",
+ "shared": "Bacheche condivise",
"addPrivateBoard": "Aggiungi una Bacheca Privata"
},
"controlnet": {
@@ -1028,7 +1028,7 @@
"minConfidence": "Confidenza minima",
"scribble": "Scarabocchio",
"amult": "Angolo di illuminazione",
- "coarse": "Approssimativo",
+ "coarse": "Grossolano",
"resizeSimple": "Ridimensiona (semplice)",
"large": "Grande",
"small": "Piccolo",
@@ -1353,7 +1353,7 @@
"lora": {
"heading": "LoRA",
"paragraphs": [
- "Modelli leggeri utilizzati insieme ai modelli base."
+ "Modelli concettuali utilizzati insieme ai modelli di base."
]
},
"controlNet": {
diff --git a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/imageDeletionListeners.ts b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/imageDeletionListeners.ts
index 056346cb68..489adb7476 100644
--- a/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/imageDeletionListeners.ts
+++ b/invokeai/frontend/web/src/app/store/middleware/listenerMiddleware/listeners/imageDeletionListeners.ts
@@ -136,7 +136,12 @@ export const addImageDeletionListeners = (startAppListening: AppStartListening)
if (data) {
const deletedImageIndex = data.items.findIndex((i) => i.image_name === imageDTO.image_name);
const nextImage = data.items[deletedImageIndex + 1] ?? data.items[0] ?? null;
- dispatch(imageSelected(nextImage));
+ if (nextImage?.image_name === imageDTO.image_name) {
+ // If the next image is the same as the deleted one, it means it was the last image, reset selection
+ dispatch(imageSelected(null));
+ } else {
+ dispatch(imageSelected(nextImage));
+ }
}
}
@@ -176,6 +181,8 @@ export const addImageDeletionListeners = (startAppListening: AppStartListening)
const queryArgs = selectListImagesQueryArgs(state);
const { data } = imagesApi.endpoints.listImages.select(queryArgs)(state);
if (data) {
+ // When we delete multiple images, we clear the selection. Then, the the next time we load images, we will
+ // select the first one. This is handled below in the listener for `imagesApi.endpoints.listImages.matchFulfilled`.
dispatch(imageSelected(null));
}
}
diff --git a/invokeai/frontend/web/src/features/gallery/components/Boards/BoardsList/BoardsList.tsx b/invokeai/frontend/web/src/features/gallery/components/Boards/BoardsList/BoardsList.tsx
index bd4c42e8d1..4325281e0f 100644
--- a/invokeai/frontend/web/src/features/gallery/components/Boards/BoardsList/BoardsList.tsx
+++ b/invokeai/frontend/web/src/features/gallery/components/Boards/BoardsList/BoardsList.tsx
@@ -1,4 +1,4 @@
-import { Flex, Text } from '@invoke-ai/ui-library';
+import { Box, Flex, Text } from '@invoke-ai/ui-library';
import { EMPTY_ARRAY } from 'app/store/constants';
import { useAppSelector } from 'app/store/storeHooks';
import { overlayScrollbarsParams } from 'common/components/OverlayScrollbars/constants';
@@ -40,9 +40,41 @@ const BoardsList = () => {
return (
<>
-
-
- {allowPrivateBoards && (
+
+
+
+ {allowPrivateBoards && (
+
+
+
+ {t('boards.private')}
+
+
+
+
+
+ {filteredPrivateBoards.map((board) => (
+
+ ))}
+
+
+ )}
{
justifyContent="space-between"
alignItems="center"
ps={2}
- py={1}
+ pb={1}
+ pt={2}
zIndex={1}
top={0}
bg="base.900"
>
- {t('boards.private')}
+ {allowPrivateBoards ? t('boards.shared') : t('boards.boards')}
-
+
-
- {filteredPrivateBoards.map((board) => (
+ {!allowPrivateBoards && }
+ {filteredSharedBoards.map((board) => (
{
))}
- )}
-
-
-
- {allowPrivateBoards ? t('boards.shared') : t('boards.boards')}
-
-
-
-
- {!allowPrivateBoards && }
- {filteredSharedBoards.map((board) => (
-
- ))}
-
-
-
-
+
+
+
>
);
diff --git a/invokeai/frontend/web/src/features/gallery/components/ImageGalleryContent.tsx b/invokeai/frontend/web/src/features/gallery/components/ImageGalleryContent.tsx
index 7c992c65d6..5a096f5cef 100644
--- a/invokeai/frontend/web/src/features/gallery/components/ImageGalleryContent.tsx
+++ b/invokeai/frontend/web/src/features/gallery/components/ImageGalleryContent.tsx
@@ -16,6 +16,7 @@ import { GalleryHeader } from 'features/gallery/components/GalleryHeader';
import { galleryViewChanged } from 'features/gallery/store/gallerySlice';
import ResizeHandle from 'features/ui/components/tabs/ResizeHandle';
import { usePanel, type UsePanelOptions } from 'features/ui/hooks/usePanel';
+import type { CSSProperties } from 'react';
import { memo, useCallback, useMemo, useRef } from 'react';
import { useTranslation } from 'react-i18next';
import { PiMagnifyingGlassBold } from 'react-icons/pi';
@@ -29,13 +30,15 @@ import GalleryImageGrid from './ImageGrid/GalleryImageGrid';
import { GalleryPagination } from './ImageGrid/GalleryPagination';
import { GallerySearch } from './ImageGrid/GallerySearch';
-const baseStyles: ChakraProps['sx'] = {
+const COLLAPSE_STYLES: CSSProperties = { flexShrink: 0, minHeight: 0 };
+
+const BASE_STYLES: ChakraProps['sx'] = {
fontWeight: 'semibold',
fontSize: 'sm',
color: 'base.300',
};
-const selectedStyles: ChakraProps['sx'] = {
+const SELECTED_STYLES: ChakraProps['sx'] = {
borderColor: 'base.800',
borderBottomColor: 'base.900',
color: 'invokeBlue.300',
@@ -110,11 +113,13 @@ const ImageGalleryContent = () => {
onExpand={boardsListPanel.onExpand}
collapsible
>
-
-
-
-
-
+
+
+
+
+
+
+
{
-
+
{t('parameters.images')}
-
+
{t('gallery.assets')}
@@ -157,7 +162,7 @@ const ImageGalleryContent = () => {
-
+
diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelList.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelList.tsx
index 67e65dbfb6..b82917221e 100644
--- a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelList.tsx
+++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelList.tsx
@@ -11,6 +11,7 @@ import {
useLoRAModels,
useMainModels,
useRefinerModels,
+ useSpandrelImageToImageModels,
useT2IAdapterModels,
useVAEModels,
} from 'services/api/hooks/modelsByType';
@@ -71,6 +72,13 @@ const ModelList = () => {
[vaeModels, searchTerm, filteredModelType]
);
+ const [spandrelImageToImageModels, { isLoading: isLoadingSpandrelImageToImageModels }] =
+ useSpandrelImageToImageModels();
+ const filteredSpandrelImageToImageModels = useMemo(
+ () => modelsFilter(spandrelImageToImageModels, searchTerm, filteredModelType),
+ [spandrelImageToImageModels, searchTerm, filteredModelType]
+ );
+
const totalFilteredModels = useMemo(() => {
return (
filteredMainModels.length +
@@ -80,7 +88,8 @@ const ModelList = () => {
filteredControlNetModels.length +
filteredT2IAdapterModels.length +
filteredIPAdapterModels.length +
- filteredVAEModels.length
+ filteredVAEModels.length +
+ filteredSpandrelImageToImageModels.length
);
}, [
filteredControlNetModels.length,
@@ -91,6 +100,7 @@ const ModelList = () => {
filteredRefinerModels.length,
filteredT2IAdapterModels.length,
filteredVAEModels.length,
+ filteredSpandrelImageToImageModels.length,
]);
return (
@@ -143,6 +153,17 @@ const ModelList = () => {
{!isLoadingT2IAdapterModels && filteredT2IAdapterModels.length > 0 && (
)}
+ {/* Spandrel Image to Image List */}
+ {isLoadingSpandrelImageToImageModels && (
+
+ )}
+ {!isLoadingSpandrelImageToImageModels && filteredSpandrelImageToImageModels.length > 0 && (
+
+ )}
{totalFilteredModels === 0 && (
{t('modelManager.noMatchingModels')}
diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelTypeFilter.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelTypeFilter.tsx
index 76802b36e7..1a2444870b 100644
--- a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelTypeFilter.tsx
+++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelTypeFilter.tsx
@@ -21,6 +21,7 @@ export const ModelTypeFilter = () => {
t2i_adapter: t('common.t2iAdapter'),
ip_adapter: t('common.ipAdapter'),
clip_vision: 'Clip Vision',
+ spandrel_image_to_image: 'Image-to-Image',
}),
[t]
);
diff --git a/invokeai/frontend/web/src/features/nodes/components/flow/nodes/Invocation/fields/InputFieldRenderer.tsx b/invokeai/frontend/web/src/features/nodes/components/flow/nodes/Invocation/fields/InputFieldRenderer.tsx
index 99937ceec4..d863def973 100644
--- a/invokeai/frontend/web/src/features/nodes/components/flow/nodes/Invocation/fields/InputFieldRenderer.tsx
+++ b/invokeai/frontend/web/src/features/nodes/components/flow/nodes/Invocation/fields/InputFieldRenderer.tsx
@@ -32,6 +32,8 @@ import {
isSDXLMainModelFieldInputTemplate,
isSDXLRefinerModelFieldInputInstance,
isSDXLRefinerModelFieldInputTemplate,
+ isSpandrelImageToImageModelFieldInputInstance,
+ isSpandrelImageToImageModelFieldInputTemplate,
isStringFieldInputInstance,
isStringFieldInputTemplate,
isT2IAdapterModelFieldInputInstance,
@@ -54,6 +56,7 @@ import NumberFieldInputComponent from './inputs/NumberFieldInputComponent';
import RefinerModelFieldInputComponent from './inputs/RefinerModelFieldInputComponent';
import SchedulerFieldInputComponent from './inputs/SchedulerFieldInputComponent';
import SDXLMainModelFieldInputComponent from './inputs/SDXLMainModelFieldInputComponent';
+import SpandrelImageToImageModelFieldInputComponent from './inputs/SpandrelImageToImageModelFieldInputComponent';
import StringFieldInputComponent from './inputs/StringFieldInputComponent';
import T2IAdapterModelFieldInputComponent from './inputs/T2IAdapterModelFieldInputComponent';
import VAEModelFieldInputComponent from './inputs/VAEModelFieldInputComponent';
@@ -125,6 +128,20 @@ const InputFieldRenderer = ({ nodeId, fieldName }: InputFieldProps) => {
if (isT2IAdapterModelFieldInputInstance(fieldInstance) && isT2IAdapterModelFieldInputTemplate(fieldTemplate)) {
return ;
}
+
+ if (
+ isSpandrelImageToImageModelFieldInputInstance(fieldInstance) &&
+ isSpandrelImageToImageModelFieldInputTemplate(fieldTemplate)
+ ) {
+ return (
+
+ );
+ }
+
if (isColorFieldInputInstance(fieldInstance) && isColorFieldInputTemplate(fieldTemplate)) {
return ;
}
diff --git a/invokeai/frontend/web/src/features/nodes/components/flow/nodes/Invocation/fields/inputs/SpandrelImageToImageModelFieldInputComponent.tsx b/invokeai/frontend/web/src/features/nodes/components/flow/nodes/Invocation/fields/inputs/SpandrelImageToImageModelFieldInputComponent.tsx
new file mode 100644
index 0000000000..ccd4eaa797
--- /dev/null
+++ b/invokeai/frontend/web/src/features/nodes/components/flow/nodes/Invocation/fields/inputs/SpandrelImageToImageModelFieldInputComponent.tsx
@@ -0,0 +1,55 @@
+import { Combobox, FormControl, Tooltip } from '@invoke-ai/ui-library';
+import { useAppDispatch } from 'app/store/storeHooks';
+import { useGroupedModelCombobox } from 'common/hooks/useGroupedModelCombobox';
+import { fieldSpandrelImageToImageModelValueChanged } from 'features/nodes/store/nodesSlice';
+import type {
+ SpandrelImageToImageModelFieldInputInstance,
+ SpandrelImageToImageModelFieldInputTemplate,
+} from 'features/nodes/types/field';
+import { memo, useCallback } from 'react';
+import { useSpandrelImageToImageModels } from 'services/api/hooks/modelsByType';
+import type { SpandrelImageToImageModelConfig } from 'services/api/types';
+
+import type { FieldComponentProps } from './types';
+
+const SpandrelImageToImageModelFieldInputComponent = (
+ props: FieldComponentProps
+) => {
+ const { nodeId, field } = props;
+ const dispatch = useAppDispatch();
+
+ const [modelConfigs, { isLoading }] = useSpandrelImageToImageModels();
+
+ const _onChange = useCallback(
+ (value: SpandrelImageToImageModelConfig | null) => {
+ if (!value) {
+ return;
+ }
+ dispatch(
+ fieldSpandrelImageToImageModelValueChanged({
+ nodeId,
+ fieldName: field.name,
+ value,
+ })
+ );
+ },
+ [dispatch, field.name, nodeId]
+ );
+
+ const { options, value, onChange } = useGroupedModelCombobox({
+ modelConfigs,
+ onChange: _onChange,
+ selectedModel: field.value,
+ isLoading,
+ });
+
+ return (
+
+
+
+
+
+ );
+};
+
+export default memo(SpandrelImageToImageModelFieldInputComponent);
diff --git a/invokeai/frontend/web/src/features/nodes/store/nodesSlice.ts b/invokeai/frontend/web/src/features/nodes/store/nodesSlice.ts
index 5ebc5de147..f9214c1572 100644
--- a/invokeai/frontend/web/src/features/nodes/store/nodesSlice.ts
+++ b/invokeai/frontend/web/src/features/nodes/store/nodesSlice.ts
@@ -19,6 +19,7 @@ import type {
ModelIdentifierFieldValue,
SchedulerFieldValue,
SDXLRefinerModelFieldValue,
+ SpandrelImageToImageModelFieldValue,
StatefulFieldValue,
StringFieldValue,
T2IAdapterModelFieldValue,
@@ -39,6 +40,7 @@ import {
zModelIdentifierFieldValue,
zSchedulerFieldValue,
zSDXLRefinerModelFieldValue,
+ zSpandrelImageToImageModelFieldValue,
zStatefulFieldValue,
zStringFieldValue,
zT2IAdapterModelFieldValue,
@@ -333,6 +335,12 @@ export const nodesSlice = createSlice({
fieldT2IAdapterModelValueChanged: (state, action: FieldValueAction) => {
fieldValueReducer(state, action, zT2IAdapterModelFieldValue);
},
+ fieldSpandrelImageToImageModelValueChanged: (
+ state,
+ action: FieldValueAction
+ ) => {
+ fieldValueReducer(state, action, zSpandrelImageToImageModelFieldValue);
+ },
fieldEnumModelValueChanged: (state, action: FieldValueAction) => {
fieldValueReducer(state, action, zEnumFieldValue);
},
@@ -384,6 +392,7 @@ export const {
fieldImageValueChanged,
fieldIPAdapterModelValueChanged,
fieldT2IAdapterModelValueChanged,
+ fieldSpandrelImageToImageModelValueChanged,
fieldLabelChanged,
fieldLoRAModelValueChanged,
fieldModelIdentifierValueChanged,
diff --git a/invokeai/frontend/web/src/features/nodes/types/common.ts b/invokeai/frontend/web/src/features/nodes/types/common.ts
index 54e126af3a..2ea8900281 100644
--- a/invokeai/frontend/web/src/features/nodes/types/common.ts
+++ b/invokeai/frontend/web/src/features/nodes/types/common.ts
@@ -66,6 +66,7 @@ const zModelType = z.enum([
'embedding',
'onnx',
'clip_vision',
+ 'spandrel_image_to_image',
]);
const zSubModelType = z.enum([
'unet',
diff --git a/invokeai/frontend/web/src/features/nodes/types/constants.ts b/invokeai/frontend/web/src/features/nodes/types/constants.ts
index 4ede5cd479..05697c384c 100644
--- a/invokeai/frontend/web/src/features/nodes/types/constants.ts
+++ b/invokeai/frontend/web/src/features/nodes/types/constants.ts
@@ -38,6 +38,7 @@ export const MODEL_TYPES = [
'VAEField',
'CLIPField',
'T2IAdapterModelField',
+ 'SpandrelImageToImageModelField',
];
/**
@@ -62,6 +63,7 @@ export const FIELD_COLORS: { [key: string]: string } = {
MainModelField: 'teal.500',
SDXLMainModelField: 'teal.500',
SDXLRefinerModelField: 'teal.500',
+ SpandrelImageToImageModelField: 'teal.500',
StringField: 'yellow.500',
T2IAdapterField: 'teal.500',
T2IAdapterModelField: 'teal.500',
diff --git a/invokeai/frontend/web/src/features/nodes/types/field.ts b/invokeai/frontend/web/src/features/nodes/types/field.ts
index e2a84e3390..925bd40b9d 100644
--- a/invokeai/frontend/web/src/features/nodes/types/field.ts
+++ b/invokeai/frontend/web/src/features/nodes/types/field.ts
@@ -139,6 +139,10 @@ const zT2IAdapterModelFieldType = zFieldTypeBase.extend({
name: z.literal('T2IAdapterModelField'),
originalType: zStatelessFieldType.optional(),
});
+const zSpandrelImageToImageModelFieldType = zFieldTypeBase.extend({
+ name: z.literal('SpandrelImageToImageModelField'),
+ originalType: zStatelessFieldType.optional(),
+});
const zSchedulerFieldType = zFieldTypeBase.extend({
name: z.literal('SchedulerField'),
originalType: zStatelessFieldType.optional(),
@@ -160,6 +164,7 @@ const zStatefulFieldType = z.union([
zControlNetModelFieldType,
zIPAdapterModelFieldType,
zT2IAdapterModelFieldType,
+ zSpandrelImageToImageModelFieldType,
zColorFieldType,
zSchedulerFieldType,
]);
@@ -581,6 +586,33 @@ export const isT2IAdapterModelFieldInputTemplate = (val: unknown): val is T2IAda
zT2IAdapterModelFieldInputTemplate.safeParse(val).success;
// #endregion
+// #region SpandrelModelToModelField
+
+export const zSpandrelImageToImageModelFieldValue = zModelIdentifierField.optional();
+const zSpandrelImageToImageModelFieldInputInstance = zFieldInputInstanceBase.extend({
+ value: zSpandrelImageToImageModelFieldValue,
+});
+const zSpandrelImageToImageModelFieldInputTemplate = zFieldInputTemplateBase.extend({
+ type: zSpandrelImageToImageModelFieldType,
+ originalType: zFieldType.optional(),
+ default: zSpandrelImageToImageModelFieldValue,
+});
+const zSpandrelImageToImageModelFieldOutputTemplate = zFieldOutputTemplateBase.extend({
+ type: zSpandrelImageToImageModelFieldType,
+});
+export type SpandrelImageToImageModelFieldValue = z.infer;
+export type SpandrelImageToImageModelFieldInputInstance = z.infer;
+export type SpandrelImageToImageModelFieldInputTemplate = z.infer;
+export const isSpandrelImageToImageModelFieldInputInstance = (
+ val: unknown
+): val is SpandrelImageToImageModelFieldInputInstance =>
+ zSpandrelImageToImageModelFieldInputInstance.safeParse(val).success;
+export const isSpandrelImageToImageModelFieldInputTemplate = (
+ val: unknown
+): val is SpandrelImageToImageModelFieldInputTemplate =>
+ zSpandrelImageToImageModelFieldInputTemplate.safeParse(val).success;
+// #endregion
+
// #region SchedulerField
export const zSchedulerFieldValue = zSchedulerField.optional();
@@ -667,6 +699,7 @@ export const zStatefulFieldValue = z.union([
zControlNetModelFieldValue,
zIPAdapterModelFieldValue,
zT2IAdapterModelFieldValue,
+ zSpandrelImageToImageModelFieldValue,
zColorFieldValue,
zSchedulerFieldValue,
]);
@@ -694,6 +727,7 @@ const zStatefulFieldInputInstance = z.union([
zControlNetModelFieldInputInstance,
zIPAdapterModelFieldInputInstance,
zT2IAdapterModelFieldInputInstance,
+ zSpandrelImageToImageModelFieldInputInstance,
zColorFieldInputInstance,
zSchedulerFieldInputInstance,
]);
@@ -722,6 +756,7 @@ const zStatefulFieldInputTemplate = z.union([
zControlNetModelFieldInputTemplate,
zIPAdapterModelFieldInputTemplate,
zT2IAdapterModelFieldInputTemplate,
+ zSpandrelImageToImageModelFieldInputTemplate,
zColorFieldInputTemplate,
zSchedulerFieldInputTemplate,
zStatelessFieldInputTemplate,
@@ -751,6 +786,7 @@ const zStatefulFieldOutputTemplate = z.union([
zControlNetModelFieldOutputTemplate,
zIPAdapterModelFieldOutputTemplate,
zT2IAdapterModelFieldOutputTemplate,
+ zSpandrelImageToImageModelFieldOutputTemplate,
zColorFieldOutputTemplate,
zSchedulerFieldOutputTemplate,
]);
diff --git a/invokeai/frontend/web/src/features/nodes/util/schema/buildFieldInputInstance.ts b/invokeai/frontend/web/src/features/nodes/util/schema/buildFieldInputInstance.ts
index 597779fd61..a5a2d89f03 100644
--- a/invokeai/frontend/web/src/features/nodes/util/schema/buildFieldInputInstance.ts
+++ b/invokeai/frontend/web/src/features/nodes/util/schema/buildFieldInputInstance.ts
@@ -18,6 +18,7 @@ const FIELD_VALUE_FALLBACK_MAP: Record =
SDXLRefinerModelField: undefined,
StringField: '',
T2IAdapterModelField: undefined,
+ SpandrelImageToImageModelField: undefined,
VAEModelField: undefined,
ControlNetModelField: undefined,
};
diff --git a/invokeai/frontend/web/src/features/nodes/util/schema/buildFieldInputTemplate.ts b/invokeai/frontend/web/src/features/nodes/util/schema/buildFieldInputTemplate.ts
index 2b77274526..8478415cd1 100644
--- a/invokeai/frontend/web/src/features/nodes/util/schema/buildFieldInputTemplate.ts
+++ b/invokeai/frontend/web/src/features/nodes/util/schema/buildFieldInputTemplate.ts
@@ -17,6 +17,7 @@ import type {
SchedulerFieldInputTemplate,
SDXLMainModelFieldInputTemplate,
SDXLRefinerModelFieldInputTemplate,
+ SpandrelImageToImageModelFieldInputTemplate,
StatefulFieldType,
StatelessFieldInputTemplate,
StringFieldInputTemplate,
@@ -263,6 +264,17 @@ const buildT2IAdapterModelFieldInputTemplate: FieldInputTemplateBuilder = ({ schemaObject, baseField, fieldType }) => {
+ const template: SpandrelImageToImageModelFieldInputTemplate = {
+ ...baseField,
+ type: fieldType,
+ default: schemaObject.default ?? undefined,
+ };
+
+ return template;
+};
const buildBoardFieldInputTemplate: FieldInputTemplateBuilder = ({
schemaObject,
baseField,
@@ -377,6 +389,7 @@ export const TEMPLATE_BUILDER_MAP: Record {
+ return config.type === 'spandrel_image_to_image';
+};
+
export const isControlAdapterModelConfig = (
config: AnyModelConfig
): config is ControlNetModelConfig | T2IAdapterModelConfig | IPAdapterModelConfig => {
diff --git a/invokeai/version/invokeai_version.py b/invokeai/version/invokeai_version.py
index da1546b0a0..09545bce26 100644
--- a/invokeai/version/invokeai_version.py
+++ b/invokeai/version/invokeai_version.py
@@ -1 +1 @@
-__version__ = "4.2.6a1"
+__version__ = "4.2.6post1"
diff --git a/pyproject.toml b/pyproject.toml
index a11a19071c..9953c1c1a0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -46,6 +46,7 @@ dependencies = [
"opencv-python==4.9.0.80",
"pytorch-lightning==2.1.3",
"safetensors==0.4.3",
+ "spandrel==0.3.4",
"timm==0.6.13", # needed to override timm latest in controlnet_aux, see https://github.com/isl-org/ZoeDepth/issues/26
"torch==2.2.2",
"torchmetrics==0.11.4",