mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
Merge branch 'main' into stalker7779/backend_base
This commit is contained in:
commit
710dc6b487
@ -233,21 +233,14 @@ async def get_image_workflow(
|
|||||||
)
|
)
|
||||||
async def get_image_full(
|
async def get_image_full(
|
||||||
image_name: str = Path(description="The name of full-resolution image file to get"),
|
image_name: str = Path(description="The name of full-resolution image file to get"),
|
||||||
) -> FileResponse:
|
) -> Response:
|
||||||
"""Gets a full-resolution image file"""
|
"""Gets a full-resolution image file"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
path = ApiDependencies.invoker.services.images.get_path(image_name)
|
path = ApiDependencies.invoker.services.images.get_path(image_name)
|
||||||
|
with open(path, "rb") as f:
|
||||||
if not ApiDependencies.invoker.services.images.validate_path(path):
|
content = f.read()
|
||||||
raise HTTPException(status_code=404)
|
response = Response(content, media_type="image/png")
|
||||||
|
|
||||||
response = FileResponse(
|
|
||||||
path,
|
|
||||||
media_type="image/png",
|
|
||||||
filename=image_name,
|
|
||||||
content_disposition_type="inline",
|
|
||||||
)
|
|
||||||
response.headers["Cache-Control"] = f"max-age={IMAGE_MAX_AGE}"
|
response.headers["Cache-Control"] = f"max-age={IMAGE_MAX_AGE}"
|
||||||
return response
|
return response
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -268,15 +261,14 @@ async def get_image_full(
|
|||||||
)
|
)
|
||||||
async def get_image_thumbnail(
|
async def get_image_thumbnail(
|
||||||
image_name: str = Path(description="The name of thumbnail image file to get"),
|
image_name: str = Path(description="The name of thumbnail image file to get"),
|
||||||
) -> FileResponse:
|
) -> Response:
|
||||||
"""Gets a thumbnail image file"""
|
"""Gets a thumbnail image file"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
path = ApiDependencies.invoker.services.images.get_path(image_name, thumbnail=True)
|
path = ApiDependencies.invoker.services.images.get_path(image_name, thumbnail=True)
|
||||||
if not ApiDependencies.invoker.services.images.validate_path(path):
|
with open(path, "rb") as f:
|
||||||
raise HTTPException(status_code=404)
|
content = f.read()
|
||||||
|
response = Response(content, media_type="image/webp")
|
||||||
response = FileResponse(path, media_type="image/webp", content_disposition_type="inline")
|
|
||||||
response.headers["Cache-Control"] = f"max-age={IMAGE_MAX_AGE}"
|
response.headers["Cache-Control"] = f"max-age={IMAGE_MAX_AGE}"
|
||||||
return response
|
return response
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -161,6 +161,7 @@ def invoke_api() -> None:
|
|||||||
# Taken from https://waylonwalker.com/python-find-available-port/, thanks Waylon!
|
# Taken from https://waylonwalker.com/python-find-available-port/, thanks Waylon!
|
||||||
# https://github.com/WaylonWalker
|
# https://github.com/WaylonWalker
|
||||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
||||||
|
s.settimeout(1)
|
||||||
if s.connect_ex(("localhost", port)) == 0:
|
if s.connect_ex(("localhost", port)) == 0:
|
||||||
return find_port(port=port + 1)
|
return find_port(port=port + 1)
|
||||||
else:
|
else:
|
||||||
|
@ -48,6 +48,7 @@ class UIType(str, Enum, metaclass=MetaEnum):
|
|||||||
ControlNetModel = "ControlNetModelField"
|
ControlNetModel = "ControlNetModelField"
|
||||||
IPAdapterModel = "IPAdapterModelField"
|
IPAdapterModel = "IPAdapterModelField"
|
||||||
T2IAdapterModel = "T2IAdapterModelField"
|
T2IAdapterModel = "T2IAdapterModelField"
|
||||||
|
SpandrelImageToImageModel = "SpandrelImageToImageModelField"
|
||||||
# endregion
|
# endregion
|
||||||
|
|
||||||
# region Misc Field Types
|
# region Misc Field Types
|
||||||
@ -134,6 +135,7 @@ class FieldDescriptions:
|
|||||||
sdxl_main_model = "SDXL Main model (UNet, VAE, CLIP1, CLIP2) to load"
|
sdxl_main_model = "SDXL Main model (UNet, VAE, CLIP1, CLIP2) to load"
|
||||||
sdxl_refiner_model = "SDXL Refiner Main Modde (UNet, VAE, CLIP2) to load"
|
sdxl_refiner_model = "SDXL Refiner Main Modde (UNet, VAE, CLIP2) to load"
|
||||||
onnx_main_model = "ONNX Main model (UNet, VAE, CLIP) to load"
|
onnx_main_model = "ONNX Main model (UNet, VAE, CLIP) to load"
|
||||||
|
spandrel_image_to_image_model = "Image-to-Image model"
|
||||||
lora_weight = "The weight at which the LoRA is applied to each model"
|
lora_weight = "The weight at which the LoRA is applied to each model"
|
||||||
compel_prompt = "Prompt to be parsed by Compel to create a conditioning tensor"
|
compel_prompt = "Prompt to be parsed by Compel to create a conditioning tensor"
|
||||||
raw_prompt = "Raw prompt text (no parsing)"
|
raw_prompt = "Raw prompt text (no parsing)"
|
||||||
|
144
invokeai/app/invocations/spandrel_image_to_image.py
Normal file
144
invokeai/app/invocations/spandrel_image_to_image.py
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
import numpy as np
|
||||||
|
import torch
|
||||||
|
from PIL import Image
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
from invokeai.app.invocations.baseinvocation import BaseInvocation, invocation
|
||||||
|
from invokeai.app.invocations.fields import (
|
||||||
|
FieldDescriptions,
|
||||||
|
ImageField,
|
||||||
|
InputField,
|
||||||
|
UIType,
|
||||||
|
WithBoard,
|
||||||
|
WithMetadata,
|
||||||
|
)
|
||||||
|
from invokeai.app.invocations.model import ModelIdentifierField
|
||||||
|
from invokeai.app.invocations.primitives import ImageOutput
|
||||||
|
from invokeai.app.services.session_processor.session_processor_common import CanceledException
|
||||||
|
from invokeai.app.services.shared.invocation_context import InvocationContext
|
||||||
|
from invokeai.backend.spandrel_image_to_image_model import SpandrelImageToImageModel
|
||||||
|
from invokeai.backend.tiles.tiles import calc_tiles_min_overlap
|
||||||
|
from invokeai.backend.tiles.utils import TBLR, Tile
|
||||||
|
|
||||||
|
|
||||||
|
@invocation("spandrel_image_to_image", title="Image-to-Image", tags=["upscale"], category="upscale", version="1.1.0")
|
||||||
|
class SpandrelImageToImageInvocation(BaseInvocation, WithMetadata, WithBoard):
|
||||||
|
"""Run any spandrel image-to-image model (https://github.com/chaiNNer-org/spandrel)."""
|
||||||
|
|
||||||
|
image: ImageField = InputField(description="The input image")
|
||||||
|
image_to_image_model: ModelIdentifierField = InputField(
|
||||||
|
title="Image-to-Image Model",
|
||||||
|
description=FieldDescriptions.spandrel_image_to_image_model,
|
||||||
|
ui_type=UIType.SpandrelImageToImageModel,
|
||||||
|
)
|
||||||
|
tile_size: int = InputField(
|
||||||
|
default=512, description="The tile size for tiled image-to-image. Set to 0 to disable tiling."
|
||||||
|
)
|
||||||
|
|
||||||
|
def _scale_tile(self, tile: Tile, scale: int) -> Tile:
|
||||||
|
return Tile(
|
||||||
|
coords=TBLR(
|
||||||
|
top=tile.coords.top * scale,
|
||||||
|
bottom=tile.coords.bottom * scale,
|
||||||
|
left=tile.coords.left * scale,
|
||||||
|
right=tile.coords.right * scale,
|
||||||
|
),
|
||||||
|
overlap=TBLR(
|
||||||
|
top=tile.overlap.top * scale,
|
||||||
|
bottom=tile.overlap.bottom * scale,
|
||||||
|
left=tile.overlap.left * scale,
|
||||||
|
right=tile.overlap.right * scale,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@torch.inference_mode()
|
||||||
|
def invoke(self, context: InvocationContext) -> ImageOutput:
|
||||||
|
# Images are converted to RGB, because most models don't support an alpha channel. In the future, we may want to
|
||||||
|
# revisit this.
|
||||||
|
image = context.images.get_pil(self.image.image_name, mode="RGB")
|
||||||
|
|
||||||
|
# Compute the image tiles.
|
||||||
|
if self.tile_size > 0:
|
||||||
|
min_overlap = 20
|
||||||
|
tiles = calc_tiles_min_overlap(
|
||||||
|
image_height=image.height,
|
||||||
|
image_width=image.width,
|
||||||
|
tile_height=self.tile_size,
|
||||||
|
tile_width=self.tile_size,
|
||||||
|
min_overlap=min_overlap,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# No tiling. Generate a single tile that covers the entire image.
|
||||||
|
min_overlap = 0
|
||||||
|
tiles = [
|
||||||
|
Tile(
|
||||||
|
coords=TBLR(top=0, bottom=image.height, left=0, right=image.width),
|
||||||
|
overlap=TBLR(top=0, bottom=0, left=0, right=0),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
# Sort tiles first by left x coordinate, then by top y coordinate. During tile processing, we want to iterate
|
||||||
|
# over tiles left-to-right, top-to-bottom.
|
||||||
|
tiles = sorted(tiles, key=lambda x: x.coords.left)
|
||||||
|
tiles = sorted(tiles, key=lambda x: x.coords.top)
|
||||||
|
|
||||||
|
# Prepare input image for inference.
|
||||||
|
image_tensor = SpandrelImageToImageModel.pil_to_tensor(image)
|
||||||
|
|
||||||
|
# Load the model.
|
||||||
|
spandrel_model_info = context.models.load(self.image_to_image_model)
|
||||||
|
|
||||||
|
# Run the model on each tile.
|
||||||
|
with spandrel_model_info as spandrel_model:
|
||||||
|
assert isinstance(spandrel_model, SpandrelImageToImageModel)
|
||||||
|
|
||||||
|
# Scale the tiles for re-assembling the final image.
|
||||||
|
scale = spandrel_model.scale
|
||||||
|
scaled_tiles = [self._scale_tile(tile, scale=scale) for tile in tiles]
|
||||||
|
|
||||||
|
# Prepare the output tensor.
|
||||||
|
_, channels, height, width = image_tensor.shape
|
||||||
|
output_tensor = torch.zeros(
|
||||||
|
(height * scale, width * scale, channels), dtype=torch.uint8, device=torch.device("cpu")
|
||||||
|
)
|
||||||
|
|
||||||
|
image_tensor = image_tensor.to(device=spandrel_model.device, dtype=spandrel_model.dtype)
|
||||||
|
|
||||||
|
for tile, scaled_tile in tqdm(list(zip(tiles, scaled_tiles, strict=True)), desc="Upscaling Tiles"):
|
||||||
|
# Exit early if the invocation has been canceled.
|
||||||
|
if context.util.is_canceled():
|
||||||
|
raise CanceledException
|
||||||
|
|
||||||
|
# Extract the current tile from the input tensor.
|
||||||
|
input_tile = image_tensor[
|
||||||
|
:, :, tile.coords.top : tile.coords.bottom, tile.coords.left : tile.coords.right
|
||||||
|
].to(device=spandrel_model.device, dtype=spandrel_model.dtype)
|
||||||
|
|
||||||
|
# Run the model on the tile.
|
||||||
|
output_tile = spandrel_model.run(input_tile)
|
||||||
|
|
||||||
|
# Convert the output tile into the output tensor's format.
|
||||||
|
# (N, C, H, W) -> (C, H, W)
|
||||||
|
output_tile = output_tile.squeeze(0)
|
||||||
|
# (C, H, W) -> (H, W, C)
|
||||||
|
output_tile = output_tile.permute(1, 2, 0)
|
||||||
|
output_tile = output_tile.clamp(0, 1)
|
||||||
|
output_tile = (output_tile * 255).to(dtype=torch.uint8, device=torch.device("cpu"))
|
||||||
|
|
||||||
|
# Merge the output tile into the output tensor.
|
||||||
|
# We only keep half of the overlap on the top and left side of the tile. We do this in case there are
|
||||||
|
# edge artifacts. We don't bother with any 'blending' in the current implementation - for most upscalers
|
||||||
|
# it seems unnecessary, but we may find a need in the future.
|
||||||
|
top_overlap = scaled_tile.overlap.top // 2
|
||||||
|
left_overlap = scaled_tile.overlap.left // 2
|
||||||
|
output_tensor[
|
||||||
|
scaled_tile.coords.top + top_overlap : scaled_tile.coords.bottom,
|
||||||
|
scaled_tile.coords.left + left_overlap : scaled_tile.coords.right,
|
||||||
|
:,
|
||||||
|
] = output_tile[top_overlap:, left_overlap:, :]
|
||||||
|
|
||||||
|
# Convert the output tensor to a PIL image.
|
||||||
|
np_image = output_tensor.detach().numpy().astype(np.uint8)
|
||||||
|
pil_image = Image.fromarray(np_image)
|
||||||
|
image_dto = context.images.save(image=pil_image)
|
||||||
|
return ImageOutput.build(image_dto)
|
@ -2,7 +2,7 @@
|
|||||||
"name": "ESRGAN Upscaling with Canny ControlNet",
|
"name": "ESRGAN Upscaling with Canny ControlNet",
|
||||||
"author": "InvokeAI",
|
"author": "InvokeAI",
|
||||||
"description": "Sample workflow for using Upscaling with ControlNet with SD1.5",
|
"description": "Sample workflow for using Upscaling with ControlNet with SD1.5",
|
||||||
"version": "2.0.0",
|
"version": "2.1.0",
|
||||||
"contact": "invoke@invoke.ai",
|
"contact": "invoke@invoke.ai",
|
||||||
"tags": "upscale, controlnet, default",
|
"tags": "upscale, controlnet, default",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -36,14 +36,13 @@
|
|||||||
"version": "3.0.0",
|
"version": "3.0.0",
|
||||||
"category": "default"
|
"category": "default"
|
||||||
},
|
},
|
||||||
"id": "0e71a27e-a22b-4a9b-b20a-6d789abff2bc",
|
|
||||||
"nodes": [
|
"nodes": [
|
||||||
{
|
{
|
||||||
"id": "e8bf67fe-67de-4227-87eb-79e86afdfc74",
|
"id": "63b6ab7e-5b05-4d1b-a3b1-42d8e53ce16b",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "e8bf67fe-67de-4227-87eb-79e86afdfc74",
|
"id": "63b6ab7e-5b05-4d1b-a3b1-42d8e53ce16b",
|
||||||
"version": "1.1.1",
|
"version": "1.2.0",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -57,6 +56,10 @@
|
|||||||
"clip": {
|
"clip": {
|
||||||
"name": "clip",
|
"name": "clip",
|
||||||
"label": ""
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": true,
|
||||||
@ -65,122 +68,63 @@
|
|||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"x": 1250,
|
"x": 1250,
|
||||||
"y": 1500
|
"y": 1200
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "d8ace142-c05f-4f1d-8982-88dc7473958d",
|
"id": "5ca498a4-c8c8-4580-a396-0c984317205d",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "d8ace142-c05f-4f1d-8982-88dc7473958d",
|
"id": "5ca498a4-c8c8-4580-a396-0c984317205d",
|
||||||
"version": "1.0.2",
|
"version": "1.1.0",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
"type": "main_model_loader",
|
"type": "i2l",
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"model": {
|
"image": {
|
||||||
"name": "model",
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
"label": "",
|
"label": "",
|
||||||
"value": {
|
"value": false
|
||||||
"key": "5cd43ca0-dd0a-418d-9f7e-35b2b9d5e106",
|
|
||||||
"hash": "blake3:6987f323017f597213cc3264250edf57056d21a40a0a85d83a1a33a7d44dc41a",
|
|
||||||
"name": "Deliberate_v5",
|
|
||||||
"base": "sd-1",
|
|
||||||
"type": "main"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 700,
|
|
||||||
"y": 1375
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "771bdf6a-0813-4099-a5d8-921a138754d4",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "771bdf6a-0813-4099-a5d8-921a138754d4",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "image",
|
|
||||||
"inputs": {
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": "Image To Upscale",
|
|
||||||
"value": {
|
|
||||||
"image_name": "d2e42ba6-d420-496b-82db-91c9b75956c1.png"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 344.5593065887157,
|
|
||||||
"y": 1698.161491368619
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "f7564dd2-9539-47f2-ac13-190804461f4e",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "f7564dd2-9539-47f2-ac13-190804461f4e",
|
|
||||||
"version": "1.3.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "esrgan",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"model_name": {
|
|
||||||
"name": "model_name",
|
|
||||||
"label": "Upscaler Model",
|
|
||||||
"value": "RealESRGAN_x2plus.pth"
|
|
||||||
},
|
},
|
||||||
"tile_size": {
|
"tile_size": {
|
||||||
"name": "tile_size",
|
"name": "tile_size",
|
||||||
"label": "",
|
"label": "",
|
||||||
"value": 400
|
"value": 0
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"name": "fp32",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": false,
|
||||||
"isIntermediate": true,
|
"isIntermediate": true,
|
||||||
"useCache": true
|
"useCache": true
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"x": 717.3863693661265,
|
"x": 1650,
|
||||||
"y": 1721.9215053134815
|
"y": 1675
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "1d887701-df21-4966-ae6e-a7d82307d7bd",
|
"id": "3ed9b2ef-f4ec-40a7-94db-92e63b583ec0",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "1d887701-df21-4966-ae6e-a7d82307d7bd",
|
"id": "3ed9b2ef-f4ec-40a7-94db-92e63b583ec0",
|
||||||
"version": "1.3.2",
|
"version": "1.3.0",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
"type": "canny_image_processor",
|
"type": "l2i",
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"board": {
|
"board": {
|
||||||
"name": "board",
|
"name": "board",
|
||||||
@ -190,38 +134,37 @@
|
|||||||
"name": "metadata",
|
"name": "metadata",
|
||||||
"label": ""
|
"label": ""
|
||||||
},
|
},
|
||||||
"image": {
|
"latents": {
|
||||||
"name": "image",
|
"name": "latents",
|
||||||
"label": ""
|
"label": ""
|
||||||
},
|
},
|
||||||
"detect_resolution": {
|
"vae": {
|
||||||
"name": "detect_resolution",
|
"name": "vae",
|
||||||
"label": "",
|
"label": ""
|
||||||
"value": 512
|
|
||||||
},
|
},
|
||||||
"image_resolution": {
|
"tiled": {
|
||||||
"name": "image_resolution",
|
"name": "tiled",
|
||||||
"label": "",
|
"label": "",
|
||||||
"value": 512
|
"value": false
|
||||||
},
|
},
|
||||||
"low_threshold": {
|
"tile_size": {
|
||||||
"name": "low_threshold",
|
"name": "tile_size",
|
||||||
"label": "",
|
"label": "",
|
||||||
"value": 100
|
"value": 0
|
||||||
},
|
},
|
||||||
"high_threshold": {
|
"fp32": {
|
||||||
"name": "high_threshold",
|
"name": "fp32",
|
||||||
"label": "",
|
"label": "",
|
||||||
"value": 200
|
"value": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": true,
|
||||||
"isIntermediate": true,
|
"isIntermediate": false,
|
||||||
"useCache": true
|
"useCache": true
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"x": 1200,
|
"x": 2559.4751127537957,
|
||||||
"y": 1900
|
"y": 1246.6000376741406
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -229,7 +172,7 @@
|
|||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "ca1d020c-89a8-4958-880a-016d28775cfa",
|
"id": "ca1d020c-89a8-4958-880a-016d28775cfa",
|
||||||
"version": "1.1.1",
|
"version": "1.1.2",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -285,6 +228,193 @@
|
|||||||
"y": 1902.9649340196056
|
"y": 1902.9649340196056
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"id": "1d887701-df21-4966-ae6e-a7d82307d7bd",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "1d887701-df21-4966-ae6e-a7d82307d7bd",
|
||||||
|
"version": "1.3.3",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "canny_image_processor",
|
||||||
|
"inputs": {
|
||||||
|
"board": {
|
||||||
|
"name": "board",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"detect_resolution": {
|
||||||
|
"name": "detect_resolution",
|
||||||
|
"label": "",
|
||||||
|
"value": 512
|
||||||
|
},
|
||||||
|
"image_resolution": {
|
||||||
|
"name": "image_resolution",
|
||||||
|
"label": "",
|
||||||
|
"value": 512
|
||||||
|
},
|
||||||
|
"low_threshold": {
|
||||||
|
"name": "low_threshold",
|
||||||
|
"label": "",
|
||||||
|
"value": 100
|
||||||
|
},
|
||||||
|
"high_threshold": {
|
||||||
|
"name": "high_threshold",
|
||||||
|
"label": "",
|
||||||
|
"value": 200
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 1200,
|
||||||
|
"y": 1900
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "d8ace142-c05f-4f1d-8982-88dc7473958d",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "d8ace142-c05f-4f1d-8982-88dc7473958d",
|
||||||
|
"version": "1.0.3",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "main_model_loader",
|
||||||
|
"inputs": {
|
||||||
|
"model": {
|
||||||
|
"name": "model",
|
||||||
|
"label": "",
|
||||||
|
"value": {
|
||||||
|
"key": "5cd43ca0-dd0a-418d-9f7e-35b2b9d5e106",
|
||||||
|
"hash": "blake3:6987f323017f597213cc3264250edf57056d21a40a0a85d83a1a33a7d44dc41a",
|
||||||
|
"name": "Deliberate_v5",
|
||||||
|
"base": "sd-1",
|
||||||
|
"type": "main"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 700,
|
||||||
|
"y": 1375
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "e8bf67fe-67de-4227-87eb-79e86afdfc74",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "e8bf67fe-67de-4227-87eb-79e86afdfc74",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "compel",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 1250,
|
||||||
|
"y": 1500
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "771bdf6a-0813-4099-a5d8-921a138754d4",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "771bdf6a-0813-4099-a5d8-921a138754d4",
|
||||||
|
"version": "1.0.2",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "image",
|
||||||
|
"inputs": {
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": "Image To Upscale"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 344.5593065887157,
|
||||||
|
"y": 1698.161491368619
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "f7564dd2-9539-47f2-ac13-190804461f4e",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "f7564dd2-9539-47f2-ac13-190804461f4e",
|
||||||
|
"version": "1.3.2",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "esrgan",
|
||||||
|
"inputs": {
|
||||||
|
"board": {
|
||||||
|
"name": "board",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"model_name": {
|
||||||
|
"name": "model_name",
|
||||||
|
"label": "Upscaler Model",
|
||||||
|
"value": "RealESRGAN_x2plus.pth"
|
||||||
|
},
|
||||||
|
"tile_size": {
|
||||||
|
"name": "tile_size",
|
||||||
|
"label": "",
|
||||||
|
"value": 400
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 717.3863693661265,
|
||||||
|
"y": 1721.9215053134815
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": "f50624ce-82bf-41d0-bdf7-8aab11a80d48",
|
"id": "f50624ce-82bf-41d0-bdf7-8aab11a80d48",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -413,122 +543,6 @@
|
|||||||
"y": 1232.6219060454753
|
"y": 1232.6219060454753
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "3ed9b2ef-f4ec-40a7-94db-92e63b583ec0",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "3ed9b2ef-f4ec-40a7-94db-92e63b583ec0",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "l2i",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"latents": {
|
|
||||||
"name": "latents",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": false,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 2559.4751127537957,
|
|
||||||
"y": 1246.6000376741406
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "5ca498a4-c8c8-4580-a396-0c984317205d",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "5ca498a4-c8c8-4580-a396-0c984317205d",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "i2l",
|
|
||||||
"inputs": {
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": false,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 1650,
|
|
||||||
"y": 1675
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "63b6ab7e-5b05-4d1b-a3b1-42d8e53ce16b",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "63b6ab7e-5b05-4d1b-a3b1-42d8e53ce16b",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "compel",
|
|
||||||
"inputs": {
|
|
||||||
"prompt": {
|
|
||||||
"name": "prompt",
|
|
||||||
"label": "",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 1250,
|
|
||||||
"y": 1200
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "eb8f6f8a-c7b1-4914-806e-045ee2717a35",
|
"id": "eb8f6f8a-c7b1-4914-806e-045ee2717a35",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"name": "Face Detailer with IP-Adapter & Canny (See Note in Details)",
|
"name": "Face Detailer with IP-Adapter & Canny (See Note in Details)",
|
||||||
"author": "kosmoskatten",
|
"author": "kosmoskatten",
|
||||||
"description": "A workflow to add detail to and improve faces. This workflow is most effective when used with a model that creates realistic outputs. ",
|
"description": "A workflow to add detail to and improve faces. This workflow is most effective when used with a model that creates realistic outputs. ",
|
||||||
"version": "2.0.0",
|
"version": "2.1.0",
|
||||||
"contact": "invoke@invoke.ai",
|
"contact": "invoke@invoke.ai",
|
||||||
"tags": "face detailer, IP-Adapter, Canny",
|
"tags": "face detailer, IP-Adapter, Canny",
|
||||||
"notes": "Set this image as the blur mask: https://i.imgur.com/Gxi61zP.png",
|
"notes": "Set this image as the blur mask: https://i.imgur.com/Gxi61zP.png",
|
||||||
@ -37,16 +37,219 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"meta": {
|
"meta": {
|
||||||
"category": "default",
|
"version": "3.0.0",
|
||||||
"version": "3.0.0"
|
"category": "default"
|
||||||
},
|
},
|
||||||
"nodes": [
|
"nodes": [
|
||||||
{
|
{
|
||||||
"id": "44f2c190-eb03-460d-8d11-a94d13b33f19",
|
"id": "c6359181-6479-40ec-bf3a-b7e8451683b8",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "44f2c190-eb03-460d-8d11-a94d13b33f19",
|
"id": "c6359181-6479-40ec-bf3a-b7e8451683b8",
|
||||||
"version": "1.1.1",
|
"version": "1.0.3",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "main_model_loader",
|
||||||
|
"inputs": {
|
||||||
|
"model": {
|
||||||
|
"name": "model",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 2031.5518710051792,
|
||||||
|
"y": -492.1742944307074
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "8fe598c6-d447-44fa-a165-4975af77d080",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "8fe598c6-d447-44fa-a165-4975af77d080",
|
||||||
|
"version": "1.3.3",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "canny_image_processor",
|
||||||
|
"inputs": {
|
||||||
|
"board": {
|
||||||
|
"name": "board",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"detect_resolution": {
|
||||||
|
"name": "detect_resolution",
|
||||||
|
"label": "",
|
||||||
|
"value": 512
|
||||||
|
},
|
||||||
|
"image_resolution": {
|
||||||
|
"name": "image_resolution",
|
||||||
|
"label": "",
|
||||||
|
"value": 512
|
||||||
|
},
|
||||||
|
"low_threshold": {
|
||||||
|
"name": "low_threshold",
|
||||||
|
"label": "",
|
||||||
|
"value": 100
|
||||||
|
},
|
||||||
|
"high_threshold": {
|
||||||
|
"name": "high_threshold",
|
||||||
|
"label": "",
|
||||||
|
"value": 200
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 3519.4131037388597,
|
||||||
|
"y": 576.7946795840575
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "f60b6161-8f26-42f6-89ff-545e6011e501",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "f60b6161-8f26-42f6-89ff-545e6011e501",
|
||||||
|
"version": "1.1.2",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "controlnet",
|
||||||
|
"inputs": {
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"control_model": {
|
||||||
|
"name": "control_model",
|
||||||
|
"label": "Control Model (select canny)",
|
||||||
|
"value": {
|
||||||
|
"key": "5bdaacf7-a7a3-4fb8-b394-cc0ffbb8941d",
|
||||||
|
"hash": "blake3:260c7f8e10aefea9868cfc68d89970e91033bd37132b14b903e70ee05ebf530e",
|
||||||
|
"name": "sd-controlnet-canny",
|
||||||
|
"base": "sd-1",
|
||||||
|
"type": "controlnet"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"control_weight": {
|
||||||
|
"name": "control_weight",
|
||||||
|
"label": "",
|
||||||
|
"value": 0.5
|
||||||
|
},
|
||||||
|
"begin_step_percent": {
|
||||||
|
"name": "begin_step_percent",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"end_step_percent": {
|
||||||
|
"name": "end_step_percent",
|
||||||
|
"label": "",
|
||||||
|
"value": 0.5
|
||||||
|
},
|
||||||
|
"control_mode": {
|
||||||
|
"name": "control_mode",
|
||||||
|
"label": "",
|
||||||
|
"value": "balanced"
|
||||||
|
},
|
||||||
|
"resize_mode": {
|
||||||
|
"name": "resize_mode",
|
||||||
|
"label": "",
|
||||||
|
"value": "just_resize"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 3950,
|
||||||
|
"y": 150
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "22b750db-b85e-486b-b278-ac983e329813",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "22b750db-b85e-486b-b278-ac983e329813",
|
||||||
|
"version": "1.4.1",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "ip_adapter",
|
||||||
|
"inputs": {
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"ip_adapter_model": {
|
||||||
|
"name": "ip_adapter_model",
|
||||||
|
"label": "IP-Adapter Model (select IP Adapter Face)",
|
||||||
|
"value": {
|
||||||
|
"key": "1cc210bb-4d0a-4312-b36c-b5d46c43768e",
|
||||||
|
"hash": "blake3:3d669dffa7471b357b4df088b99ffb6bf4d4383d5e0ef1de5ec1c89728a3d5a5",
|
||||||
|
"name": "ip_adapter_sd15",
|
||||||
|
"base": "sd-1",
|
||||||
|
"type": "ip_adapter"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"clip_vision_model": {
|
||||||
|
"name": "clip_vision_model",
|
||||||
|
"label": "",
|
||||||
|
"value": "ViT-H"
|
||||||
|
},
|
||||||
|
"weight": {
|
||||||
|
"name": "weight",
|
||||||
|
"label": "",
|
||||||
|
"value": 0.5
|
||||||
|
},
|
||||||
|
"method": {
|
||||||
|
"name": "method",
|
||||||
|
"label": "",
|
||||||
|
"value": "full"
|
||||||
|
},
|
||||||
|
"begin_step_percent": {
|
||||||
|
"name": "begin_step_percent",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"end_step_percent": {
|
||||||
|
"name": "end_step_percent",
|
||||||
|
"label": "",
|
||||||
|
"value": 0.8
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 3575,
|
||||||
|
"y": -200
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "f4d15b64-c4a6-42a5-90fc-e4ed07a0ca65",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "f4d15b64-c4a6-42a5-90fc-e4ed07a0ca65",
|
||||||
|
"version": "1.2.0",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -60,6 +263,140 @@
|
|||||||
"clip": {
|
"clip": {
|
||||||
"name": "clip",
|
"name": "clip",
|
||||||
"label": ""
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 2550,
|
||||||
|
"y": -525
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2224ed72-2453-4252-bd89-3085240e0b6f",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "2224ed72-2453-4252-bd89-3085240e0b6f",
|
||||||
|
"version": "1.3.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "l2i",
|
||||||
|
"inputs": {
|
||||||
|
"board": {
|
||||||
|
"name": "board",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"latents": {
|
||||||
|
"name": "latents",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
},
|
||||||
|
"tile_size": {
|
||||||
|
"name": "tile_size",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"name": "fp32",
|
||||||
|
"label": "",
|
||||||
|
"value": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": false,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 4980.1395106966565,
|
||||||
|
"y": -255.9158921745602
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "de8b1a48-a2e4-42ca-90bb-66058bffd534",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "de8b1a48-a2e4-42ca-90bb-66058bffd534",
|
||||||
|
"version": "1.1.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "i2l",
|
||||||
|
"inputs": {
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
},
|
||||||
|
"tile_size": {
|
||||||
|
"name": "tile_size",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"name": "fp32",
|
||||||
|
"label": "",
|
||||||
|
"value": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": false,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 3100,
|
||||||
|
"y": -275
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "44f2c190-eb03-460d-8d11-a94d13b33f19",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "44f2c190-eb03-460d-8d11-a94d13b33f19",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "compel",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": true,
|
||||||
@ -251,45 +588,6 @@
|
|||||||
"y": 0
|
"y": 0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "de8b1a48-a2e4-42ca-90bb-66058bffd534",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "de8b1a48-a2e4-42ca-90bb-66058bffd534",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "i2l",
|
|
||||||
"inputs": {
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": false,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 3100,
|
|
||||||
"y": -275
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "bd06261d-a74a-4d1f-8374-745ed6194bc2",
|
"id": "bd06261d-a74a-4d1f-8374-745ed6194bc2",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -418,53 +716,6 @@
|
|||||||
"y": -175
|
"y": -175
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "2224ed72-2453-4252-bd89-3085240e0b6f",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "2224ed72-2453-4252-bd89-3085240e0b6f",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "l2i",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"latents": {
|
|
||||||
"name": "latents",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": false,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 4980.1395106966565,
|
|
||||||
"y": -255.9158921745602
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "2974e5b3-3d41-4b6f-9953-cd21e8f3a323",
|
"id": "2974e5b3-3d41-4b6f-9953-cd21e8f3a323",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -692,201 +943,6 @@
|
|||||||
"y": -275
|
"y": -275
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "f4d15b64-c4a6-42a5-90fc-e4ed07a0ca65",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "f4d15b64-c4a6-42a5-90fc-e4ed07a0ca65",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "compel",
|
|
||||||
"inputs": {
|
|
||||||
"prompt": {
|
|
||||||
"name": "prompt",
|
|
||||||
"label": "",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 2550,
|
|
||||||
"y": -525
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "22b750db-b85e-486b-b278-ac983e329813",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "22b750db-b85e-486b-b278-ac983e329813",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "ip_adapter",
|
|
||||||
"inputs": {
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"ip_adapter_model": {
|
|
||||||
"name": "ip_adapter_model",
|
|
||||||
"label": "IP-Adapter Model (select IP Adapter Face)",
|
|
||||||
"value": {
|
|
||||||
"key": "1cc210bb-4d0a-4312-b36c-b5d46c43768e",
|
|
||||||
"hash": "blake3:3d669dffa7471b357b4df088b99ffb6bf4d4383d5e0ef1de5ec1c89728a3d5a5",
|
|
||||||
"name": "ip_adapter_sd15",
|
|
||||||
"base": "sd-1",
|
|
||||||
"type": "ip_adapter"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"weight": {
|
|
||||||
"name": "weight",
|
|
||||||
"label": "",
|
|
||||||
"value": 0.5
|
|
||||||
},
|
|
||||||
"begin_step_percent": {
|
|
||||||
"name": "begin_step_percent",
|
|
||||||
"label": "",
|
|
||||||
"value": 0
|
|
||||||
},
|
|
||||||
"end_step_percent": {
|
|
||||||
"name": "end_step_percent",
|
|
||||||
"label": "",
|
|
||||||
"value": 0.8
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 3575,
|
|
||||||
"y": -200
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "f60b6161-8f26-42f6-89ff-545e6011e501",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "f60b6161-8f26-42f6-89ff-545e6011e501",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "controlnet",
|
|
||||||
"inputs": {
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"control_model": {
|
|
||||||
"name": "control_model",
|
|
||||||
"label": "Control Model (select canny)",
|
|
||||||
"value": {
|
|
||||||
"key": "5bdaacf7-a7a3-4fb8-b394-cc0ffbb8941d",
|
|
||||||
"hash": "blake3:260c7f8e10aefea9868cfc68d89970e91033bd37132b14b903e70ee05ebf530e",
|
|
||||||
"name": "sd-controlnet-canny",
|
|
||||||
"base": "sd-1",
|
|
||||||
"type": "controlnet"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"control_weight": {
|
|
||||||
"name": "control_weight",
|
|
||||||
"label": "",
|
|
||||||
"value": 0.5
|
|
||||||
},
|
|
||||||
"begin_step_percent": {
|
|
||||||
"name": "begin_step_percent",
|
|
||||||
"label": "",
|
|
||||||
"value": 0
|
|
||||||
},
|
|
||||||
"end_step_percent": {
|
|
||||||
"name": "end_step_percent",
|
|
||||||
"label": "",
|
|
||||||
"value": 0.5
|
|
||||||
},
|
|
||||||
"control_mode": {
|
|
||||||
"name": "control_mode",
|
|
||||||
"label": "",
|
|
||||||
"value": "balanced"
|
|
||||||
},
|
|
||||||
"resize_mode": {
|
|
||||||
"name": "resize_mode",
|
|
||||||
"label": "",
|
|
||||||
"value": "just_resize"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 3950,
|
|
||||||
"y": 150
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "8fe598c6-d447-44fa-a165-4975af77d080",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "8fe598c6-d447-44fa-a165-4975af77d080",
|
|
||||||
"version": "1.3.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "canny_image_processor",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"detect_resolution": {
|
|
||||||
"name": "detect_resolution",
|
|
||||||
"label": "",
|
|
||||||
"value": 512
|
|
||||||
},
|
|
||||||
"image_resolution": {
|
|
||||||
"name": "image_resolution",
|
|
||||||
"label": "",
|
|
||||||
"value": 512
|
|
||||||
},
|
|
||||||
"low_threshold": {
|
|
||||||
"name": "low_threshold",
|
|
||||||
"label": "",
|
|
||||||
"value": 100
|
|
||||||
},
|
|
||||||
"high_threshold": {
|
|
||||||
"name": "high_threshold",
|
|
||||||
"label": "",
|
|
||||||
"value": 200
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 3519.4131037388597,
|
|
||||||
"y": 576.7946795840575
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "4bd4ae80-567f-4366-b8c6-3bb06f4fb46a",
|
"id": "4bd4ae80-567f-4366-b8c6-3bb06f4fb46a",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -1035,30 +1091,6 @@
|
|||||||
"x": 2578.2364832140506,
|
"x": 2578.2364832140506,
|
||||||
"y": 78.7948456497351
|
"y": 78.7948456497351
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "c6359181-6479-40ec-bf3a-b7e8451683b8",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "c6359181-6479-40ec-bf3a-b7e8451683b8",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "main_model_loader",
|
|
||||||
"inputs": {
|
|
||||||
"model": {
|
|
||||||
"name": "model",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 2031.5518710051792,
|
|
||||||
"y": -492.1742944307074
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"edges": [
|
"edges": [
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"name": "Multi ControlNet (Canny & Depth)",
|
"name": "Multi ControlNet (Canny & Depth)",
|
||||||
"author": "InvokeAI",
|
"author": "InvokeAI",
|
||||||
"description": "A sample workflow using canny & depth ControlNets to guide the generation process. ",
|
"description": "A sample workflow using canny & depth ControlNets to guide the generation process. ",
|
||||||
"version": "2.0.0",
|
"version": "2.1.0",
|
||||||
"contact": "invoke@invoke.ai",
|
"contact": "invoke@invoke.ai",
|
||||||
"tags": "ControlNet, canny, depth",
|
"tags": "ControlNet, canny, depth",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -37,140 +37,104 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"meta": {
|
"meta": {
|
||||||
"category": "default",
|
"version": "3.0.0",
|
||||||
"version": "3.0.0"
|
"category": "default"
|
||||||
},
|
},
|
||||||
"nodes": [
|
"nodes": [
|
||||||
{
|
{
|
||||||
"id": "8e860e51-5045-456e-bf04-9a62a2a5c49e",
|
"id": "9db25398-c869-4a63-8815-c6559341ef12",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "8e860e51-5045-456e-bf04-9a62a2a5c49e",
|
"id": "9db25398-c869-4a63-8815-c6559341ef12",
|
||||||
"version": "1.0.2",
|
"version": "1.3.0",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
"type": "image",
|
"type": "l2i",
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"image": {
|
"board": {
|
||||||
"name": "image",
|
"name": "board",
|
||||||
"label": "Depth Input Image"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 3666.135718057363,
|
|
||||||
"y": 186.66887319822808
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "a33199c2-8340-401e-b8a2-42ffa875fc1c",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "a33199c2-8340-401e-b8a2-42ffa875fc1c",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "controlnet",
|
|
||||||
"inputs": {
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
"label": ""
|
||||||
},
|
},
|
||||||
"control_model": {
|
"metadata": {
|
||||||
"name": "control_model",
|
"name": "metadata",
|
||||||
"label": "Control Model (select depth)",
|
"label": ""
|
||||||
"value": {
|
|
||||||
"key": "87e8855c-671f-4c9e-bbbb-8ed47ccb4aac",
|
|
||||||
"hash": "blake3:2550bf22a53942dfa28ab2fed9d10d80851112531f44d977168992edf9d0534c",
|
|
||||||
"name": "control_v11f1p_sd15_depth",
|
|
||||||
"base": "sd-1",
|
|
||||||
"type": "controlnet"
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"control_weight": {
|
"latents": {
|
||||||
"name": "control_weight",
|
"name": "latents",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
"label": "",
|
"label": "",
|
||||||
"value": 1
|
"value": false
|
||||||
},
|
},
|
||||||
"begin_step_percent": {
|
"tile_size": {
|
||||||
"name": "begin_step_percent",
|
"name": "tile_size",
|
||||||
"label": "",
|
"label": "",
|
||||||
"value": 0
|
"value": 0
|
||||||
},
|
},
|
||||||
"end_step_percent": {
|
"fp32": {
|
||||||
"name": "end_step_percent",
|
"name": "fp32",
|
||||||
"label": "",
|
"label": "",
|
||||||
"value": 1
|
"value": false
|
||||||
},
|
|
||||||
"control_mode": {
|
|
||||||
"name": "control_mode",
|
|
||||||
"label": "",
|
|
||||||
"value": "balanced"
|
|
||||||
},
|
|
||||||
"resize_mode": {
|
|
||||||
"name": "resize_mode",
|
|
||||||
"label": "",
|
|
||||||
"value": "just_resize"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": true,
|
||||||
"isIntermediate": true,
|
"isIntermediate": false,
|
||||||
"useCache": true
|
"useCache": true
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"x": 4477.604342844504,
|
"x": 5675,
|
||||||
"y": -49.39005411272677
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "273e3f96-49ea-4dc5-9d5b-9660390f14e1",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "273e3f96-49ea-4dc5-9d5b-9660390f14e1",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "compel",
|
|
||||||
"inputs": {
|
|
||||||
"prompt": {
|
|
||||||
"name": "prompt",
|
|
||||||
"label": "Negative Prompt",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 4075,
|
|
||||||
"y": -825
|
"y": -825
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "54486974-835b-4d81-8f82-05f9f32ce9e9",
|
"id": "c826ba5e-9676-4475-b260-07b85e88753c",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "54486974-835b-4d81-8f82-05f9f32ce9e9",
|
"id": "c826ba5e-9676-4475-b260-07b85e88753c",
|
||||||
"version": "1.0.2",
|
"version": "1.3.3",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
"type": "main_model_loader",
|
"type": "canny_image_processor",
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"model": {
|
"board": {
|
||||||
"name": "model",
|
"name": "board",
|
||||||
"label": ""
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"detect_resolution": {
|
||||||
|
"name": "detect_resolution",
|
||||||
|
"label": "",
|
||||||
|
"value": 512
|
||||||
|
},
|
||||||
|
"image_resolution": {
|
||||||
|
"name": "image_resolution",
|
||||||
|
"label": "",
|
||||||
|
"value": 512
|
||||||
|
},
|
||||||
|
"low_threshold": {
|
||||||
|
"name": "low_threshold",
|
||||||
|
"label": "",
|
||||||
|
"value": 100
|
||||||
|
},
|
||||||
|
"high_threshold": {
|
||||||
|
"name": "high_threshold",
|
||||||
|
"label": "",
|
||||||
|
"value": 200
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": true,
|
||||||
@ -178,29 +142,52 @@
|
|||||||
"useCache": true
|
"useCache": true
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"x": 3600,
|
"x": 4095.757337055795,
|
||||||
"y": -1000
|
"y": -455.63440891935863
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "7ce68934-3419-42d4-ac70-82cfc9397306",
|
"id": "018b1214-c2af-43a7-9910-fb687c6726d7",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "7ce68934-3419-42d4-ac70-82cfc9397306",
|
"id": "018b1214-c2af-43a7-9910-fb687c6726d7",
|
||||||
"version": "1.1.1",
|
"version": "1.2.4",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
"type": "compel",
|
"type": "midas_depth_image_processor",
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"prompt": {
|
"board": {
|
||||||
"name": "prompt",
|
"name": "board",
|
||||||
"label": "Positive Prompt",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"a_mult": {
|
||||||
|
"name": "a_mult",
|
||||||
|
"label": "",
|
||||||
|
"value": 2
|
||||||
|
},
|
||||||
|
"bg_th": {
|
||||||
|
"name": "bg_th",
|
||||||
|
"label": "",
|
||||||
|
"value": 0.1
|
||||||
|
},
|
||||||
|
"detect_resolution": {
|
||||||
|
"name": "detect_resolution",
|
||||||
|
"label": "",
|
||||||
|
"value": 512
|
||||||
|
},
|
||||||
|
"image_resolution": {
|
||||||
|
"name": "image_resolution",
|
||||||
|
"label": "",
|
||||||
|
"value": 512
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": true,
|
||||||
@ -208,8 +195,8 @@
|
|||||||
"useCache": true
|
"useCache": true
|
||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"x": 4075,
|
"x": 4082.783145980783,
|
||||||
"y": -1125
|
"y": 0.01629251229994111
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -217,7 +204,7 @@
|
|||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "d204d184-f209-4fae-a0a1-d152800844e1",
|
"id": "d204d184-f209-4fae-a0a1-d152800844e1",
|
||||||
"version": "1.1.1",
|
"version": "1.1.2",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -273,6 +260,185 @@
|
|||||||
"y": -618.4221638099414
|
"y": -618.4221638099414
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"id": "7ce68934-3419-42d4-ac70-82cfc9397306",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "7ce68934-3419-42d4-ac70-82cfc9397306",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "compel",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "Positive Prompt",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 4075,
|
||||||
|
"y": -1125
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "54486974-835b-4d81-8f82-05f9f32ce9e9",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "54486974-835b-4d81-8f82-05f9f32ce9e9",
|
||||||
|
"version": "1.0.3",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "main_model_loader",
|
||||||
|
"inputs": {
|
||||||
|
"model": {
|
||||||
|
"name": "model",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 3600,
|
||||||
|
"y": -1000
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "273e3f96-49ea-4dc5-9d5b-9660390f14e1",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "273e3f96-49ea-4dc5-9d5b-9660390f14e1",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "compel",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "Negative Prompt",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 4075,
|
||||||
|
"y": -825
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "a33199c2-8340-401e-b8a2-42ffa875fc1c",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "a33199c2-8340-401e-b8a2-42ffa875fc1c",
|
||||||
|
"version": "1.1.2",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "controlnet",
|
||||||
|
"inputs": {
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"control_model": {
|
||||||
|
"name": "control_model",
|
||||||
|
"label": "Control Model (select depth)",
|
||||||
|
"value": {
|
||||||
|
"key": "87e8855c-671f-4c9e-bbbb-8ed47ccb4aac",
|
||||||
|
"hash": "blake3:2550bf22a53942dfa28ab2fed9d10d80851112531f44d977168992edf9d0534c",
|
||||||
|
"name": "control_v11f1p_sd15_depth",
|
||||||
|
"base": "sd-1",
|
||||||
|
"type": "controlnet"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"control_weight": {
|
||||||
|
"name": "control_weight",
|
||||||
|
"label": "",
|
||||||
|
"value": 1
|
||||||
|
},
|
||||||
|
"begin_step_percent": {
|
||||||
|
"name": "begin_step_percent",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"end_step_percent": {
|
||||||
|
"name": "end_step_percent",
|
||||||
|
"label": "",
|
||||||
|
"value": 1
|
||||||
|
},
|
||||||
|
"control_mode": {
|
||||||
|
"name": "control_mode",
|
||||||
|
"label": "",
|
||||||
|
"value": "balanced"
|
||||||
|
},
|
||||||
|
"resize_mode": {
|
||||||
|
"name": "resize_mode",
|
||||||
|
"label": "",
|
||||||
|
"value": "just_resize"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 4477.604342844504,
|
||||||
|
"y": -49.39005411272677
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "8e860e51-5045-456e-bf04-9a62a2a5c49e",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "8e860e51-5045-456e-bf04-9a62a2a5c49e",
|
||||||
|
"version": "1.0.2",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "image",
|
||||||
|
"inputs": {
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": "Depth Input Image"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 3666.135718057363,
|
||||||
|
"y": 186.66887319822808
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": "c4b23e64-7986-40c4-9cad-46327b12e204",
|
"id": "c4b23e64-7986-40c4-9cad-46327b12e204",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -322,159 +488,6 @@
|
|||||||
"y": -575
|
"y": -575
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "018b1214-c2af-43a7-9910-fb687c6726d7",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "018b1214-c2af-43a7-9910-fb687c6726d7",
|
|
||||||
"version": "1.2.3",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "midas_depth_image_processor",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"a_mult": {
|
|
||||||
"name": "a_mult",
|
|
||||||
"label": "",
|
|
||||||
"value": 2
|
|
||||||
},
|
|
||||||
"bg_th": {
|
|
||||||
"name": "bg_th",
|
|
||||||
"label": "",
|
|
||||||
"value": 0.1
|
|
||||||
},
|
|
||||||
"detect_resolution": {
|
|
||||||
"name": "detect_resolution",
|
|
||||||
"label": "",
|
|
||||||
"value": 512
|
|
||||||
},
|
|
||||||
"image_resolution": {
|
|
||||||
"name": "image_resolution",
|
|
||||||
"label": "",
|
|
||||||
"value": 512
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 4082.783145980783,
|
|
||||||
"y": 0.01629251229994111
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "c826ba5e-9676-4475-b260-07b85e88753c",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "c826ba5e-9676-4475-b260-07b85e88753c",
|
|
||||||
"version": "1.3.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "canny_image_processor",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"detect_resolution": {
|
|
||||||
"name": "detect_resolution",
|
|
||||||
"label": "",
|
|
||||||
"value": 512
|
|
||||||
},
|
|
||||||
"image_resolution": {
|
|
||||||
"name": "image_resolution",
|
|
||||||
"label": "",
|
|
||||||
"value": 512
|
|
||||||
},
|
|
||||||
"low_threshold": {
|
|
||||||
"name": "low_threshold",
|
|
||||||
"label": "",
|
|
||||||
"value": 100
|
|
||||||
},
|
|
||||||
"high_threshold": {
|
|
||||||
"name": "high_threshold",
|
|
||||||
"label": "",
|
|
||||||
"value": 200
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 4095.757337055795,
|
|
||||||
"y": -455.63440891935863
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "9db25398-c869-4a63-8815-c6559341ef12",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "9db25398-c869-4a63-8815-c6559341ef12",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "l2i",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"latents": {
|
|
||||||
"name": "latents",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": false,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 5675,
|
|
||||||
"y": -825
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "ac481b7f-08bf-4a9d-9e0c-3a82ea5243ce",
|
"id": "ac481b7f-08bf-4a9d-9e0c-3a82ea5243ce",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@
|
|||||||
"name": "Prompt from File",
|
"name": "Prompt from File",
|
||||||
"author": "InvokeAI",
|
"author": "InvokeAI",
|
||||||
"description": "Sample workflow using Prompt from File node",
|
"description": "Sample workflow using Prompt from File node",
|
||||||
"version": "2.0.0",
|
"version": "2.1.0",
|
||||||
"contact": "invoke@invoke.ai",
|
"contact": "invoke@invoke.ai",
|
||||||
"tags": "text2image, prompt from file, default",
|
"tags": "text2image, prompt from file, default",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -37,16 +37,68 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"meta": {
|
"meta": {
|
||||||
"category": "default",
|
"version": "3.0.0",
|
||||||
"version": "3.0.0"
|
"category": "default"
|
||||||
},
|
},
|
||||||
"nodes": [
|
"nodes": [
|
||||||
{
|
{
|
||||||
"id": "c2eaf1ba-5708-4679-9e15-945b8b432692",
|
"id": "491ec988-3c77-4c37-af8a-39a0c4e7a2a1",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "c2eaf1ba-5708-4679-9e15-945b8b432692",
|
"id": "491ec988-3c77-4c37-af8a-39a0c4e7a2a1",
|
||||||
"version": "1.1.1",
|
"version": "1.3.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "l2i",
|
||||||
|
"inputs": {
|
||||||
|
"board": {
|
||||||
|
"name": "board",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"latents": {
|
||||||
|
"name": "latents",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
},
|
||||||
|
"tile_size": {
|
||||||
|
"name": "tile_size",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"name": "fp32",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 2037.861329274915,
|
||||||
|
"y": -329.8393457509562
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "fc9d0e35-a6de-4a19-84e1-c72497c823f6",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "fc9d0e35-a6de-4a19-84e1-c72497c823f6",
|
||||||
|
"version": "1.2.0",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -60,6 +112,69 @@
|
|||||||
"clip": {
|
"clip": {
|
||||||
"name": "clip",
|
"name": "clip",
|
||||||
"label": ""
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": false,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 925,
|
||||||
|
"y": -275
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "d6353b7f-b447-4e17-8f2e-80a88c91d426",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "d6353b7f-b447-4e17-8f2e-80a88c91d426",
|
||||||
|
"version": "1.0.3",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "main_model_loader",
|
||||||
|
"inputs": {
|
||||||
|
"model": {
|
||||||
|
"name": "model",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 0,
|
||||||
|
"y": -375
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "c2eaf1ba-5708-4679-9e15-945b8b432692",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "c2eaf1ba-5708-4679-9e15-945b8b432692",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "compel",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": false,
|
"isOpen": false,
|
||||||
@ -141,61 +256,6 @@
|
|||||||
"y": -400
|
"y": -400
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "d6353b7f-b447-4e17-8f2e-80a88c91d426",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "d6353b7f-b447-4e17-8f2e-80a88c91d426",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "main_model_loader",
|
|
||||||
"inputs": {
|
|
||||||
"model": {
|
|
||||||
"name": "model",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 0,
|
|
||||||
"y": -375
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "fc9d0e35-a6de-4a19-84e1-c72497c823f6",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "fc9d0e35-a6de-4a19-84e1-c72497c823f6",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "compel",
|
|
||||||
"inputs": {
|
|
||||||
"prompt": {
|
|
||||||
"name": "prompt",
|
|
||||||
"label": "",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": false,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 925,
|
|
||||||
"y": -275
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "0eb5f3f5-1b91-49eb-9ef0-41d67c7eae77",
|
"id": "0eb5f3f5-1b91-49eb-9ef0-41d67c7eae77",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -268,53 +328,6 @@
|
|||||||
"y": -50
|
"y": -50
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "491ec988-3c77-4c37-af8a-39a0c4e7a2a1",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "491ec988-3c77-4c37-af8a-39a0c4e7a2a1",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "l2i",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"latents": {
|
|
||||||
"name": "latents",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 2037.861329274915,
|
|
||||||
"y": -329.8393457509562
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "2fb1577f-0a56-4f12-8711-8afcaaaf1d5e",
|
"id": "2fb1577f-0a56-4f12-8711-8afcaaaf1d5e",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"name": "Text to Image - SD1.5",
|
"name": "Text to Image - SD1.5",
|
||||||
"author": "InvokeAI",
|
"author": "InvokeAI",
|
||||||
"description": "Sample text to image workflow for Stable Diffusion 1.5/2",
|
"description": "Sample text to image workflow for Stable Diffusion 1.5/2",
|
||||||
"version": "2.0.0",
|
"version": "2.1.0",
|
||||||
"contact": "invoke@invoke.ai",
|
"contact": "invoke@invoke.ai",
|
||||||
"tags": "text2image, SD1.5, SD2, default",
|
"tags": "text2image, SD1.5, SD2, default",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -33,16 +33,127 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"meta": {
|
"meta": {
|
||||||
"category": "default",
|
"version": "3.0.0",
|
||||||
"version": "3.0.0"
|
"category": "default"
|
||||||
},
|
},
|
||||||
"nodes": [
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
|
||||||
|
"version": "1.3.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "l2i",
|
||||||
|
"inputs": {
|
||||||
|
"board": {
|
||||||
|
"name": "board",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"latents": {
|
||||||
|
"name": "latents",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
},
|
||||||
|
"tile_size": {
|
||||||
|
"name": "tile_size",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"name": "fp32",
|
||||||
|
"label": "",
|
||||||
|
"value": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": false,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 1800,
|
||||||
|
"y": 25
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "Positive Compel Prompt",
|
||||||
|
"notes": "",
|
||||||
|
"type": "compel",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "Positive Prompt",
|
||||||
|
"value": "Super cute tiger cub, national geographic award-winning photograph"
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 1000,
|
||||||
|
"y": 25
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
||||||
|
"version": "1.0.3",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "main_model_loader",
|
||||||
|
"inputs": {
|
||||||
|
"model": {
|
||||||
|
"name": "model",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 600,
|
||||||
|
"y": 25
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
|
"id": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
|
"id": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
|
||||||
"version": "1.1.1",
|
"version": "1.2.0",
|
||||||
"nodePack": "invokeai",
|
"nodePack": "invokeai",
|
||||||
"label": "Negative Compel Prompt",
|
"label": "Negative Compel Prompt",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -56,6 +167,10 @@
|
|||||||
"clip": {
|
"clip": {
|
||||||
"name": "clip",
|
"name": "clip",
|
||||||
"label": ""
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": true,
|
||||||
@ -108,61 +223,6 @@
|
|||||||
"y": 325
|
"y": 325
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "main_model_loader",
|
|
||||||
"inputs": {
|
|
||||||
"model": {
|
|
||||||
"name": "model",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 600,
|
|
||||||
"y": 25
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "Positive Compel Prompt",
|
|
||||||
"notes": "",
|
|
||||||
"type": "compel",
|
|
||||||
"inputs": {
|
|
||||||
"prompt": {
|
|
||||||
"name": "prompt",
|
|
||||||
"label": "Positive Prompt",
|
|
||||||
"value": "Super cute tiger cub, national geographic award-winning photograph"
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 1000,
|
|
||||||
"y": 25
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "ea94bc37-d995-4a83-aa99-4af42479f2f2",
|
"id": "ea94bc37-d995-4a83-aa99-4af42479f2f2",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -280,53 +340,6 @@
|
|||||||
"x": 1400,
|
"x": 1400,
|
||||||
"y": 25
|
"y": 25
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "l2i",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"latents": {
|
|
||||||
"name": "latents",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": false,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 1800,
|
|
||||||
"y": 25
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"edges": [
|
"edges": [
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"name": "Text to Image - SDXL",
|
"name": "Text to Image - SDXL",
|
||||||
"author": "InvokeAI",
|
"author": "InvokeAI",
|
||||||
"description": "Sample text to image workflow for SDXL",
|
"description": "Sample text to image workflow for SDXL",
|
||||||
"version": "2.0.0",
|
"version": "2.1.0",
|
||||||
"contact": "invoke@invoke.ai",
|
"contact": "invoke@invoke.ai",
|
||||||
"tags": "text2image, SDXL, default",
|
"tags": "text2image, SDXL, default",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -29,10 +29,271 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"meta": {
|
"meta": {
|
||||||
"category": "default",
|
"version": "3.0.0",
|
||||||
"version": "3.0.0"
|
"category": "default"
|
||||||
},
|
},
|
||||||
"nodes": [
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "0093692f-9cf4-454d-a5b8-62f0e3eb3bb8",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "0093692f-9cf4-454d-a5b8-62f0e3eb3bb8",
|
||||||
|
"version": "1.0.3",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "vae_loader",
|
||||||
|
"inputs": {
|
||||||
|
"vae_model": {
|
||||||
|
"name": "vae_model",
|
||||||
|
"label": "VAE (use the FP16 model)",
|
||||||
|
"value": {
|
||||||
|
"key": "f20f9e5c-1bce-4c46-a84d-34ebfa7df069",
|
||||||
|
"hash": "blake3:9705ab1c31fa96b308734214fb7571a958621c7a9247eed82b7d277145f8d9fa",
|
||||||
|
"name": "sdxl-vae-fp16-fix",
|
||||||
|
"base": "sdxl",
|
||||||
|
"type": "vae"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 375,
|
||||||
|
"y": -225
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "63e91020-83b2-4f35-b174-ad9692aabb48",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "63e91020-83b2-4f35-b174-ad9692aabb48",
|
||||||
|
"version": "1.3.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "l2i",
|
||||||
|
"inputs": {
|
||||||
|
"board": {
|
||||||
|
"name": "board",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"latents": {
|
||||||
|
"name": "latents",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
},
|
||||||
|
"tile_size": {
|
||||||
|
"name": "tile_size",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"name": "fp32",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": false,
|
||||||
|
"useCache": false
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 1475,
|
||||||
|
"y": -500
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "faf965a4-7530-427b-b1f3-4ba6505c2a08",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "faf965a4-7530-427b-b1f3-4ba6505c2a08",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "SDXL Positive Compel Prompt",
|
||||||
|
"notes": "",
|
||||||
|
"type": "sdxl_compel_prompt",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "Positive Prompt",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"style": {
|
||||||
|
"name": "style",
|
||||||
|
"label": "Positive Style",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"original_width": {
|
||||||
|
"name": "original_width",
|
||||||
|
"label": "",
|
||||||
|
"value": 1024
|
||||||
|
},
|
||||||
|
"original_height": {
|
||||||
|
"name": "original_height",
|
||||||
|
"label": "",
|
||||||
|
"value": 1024
|
||||||
|
},
|
||||||
|
"crop_top": {
|
||||||
|
"name": "crop_top",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"crop_left": {
|
||||||
|
"name": "crop_left",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"target_width": {
|
||||||
|
"name": "target_width",
|
||||||
|
"label": "",
|
||||||
|
"value": 1024
|
||||||
|
},
|
||||||
|
"target_height": {
|
||||||
|
"name": "target_height",
|
||||||
|
"label": "",
|
||||||
|
"value": 1024
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"clip2": {
|
||||||
|
"name": "clip2",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": false,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 750,
|
||||||
|
"y": -175
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "30d3289c-773c-4152-a9d2-bd8a99c8fd22",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "30d3289c-773c-4152-a9d2-bd8a99c8fd22",
|
||||||
|
"version": "1.0.3",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "sdxl_model_loader",
|
||||||
|
"inputs": {
|
||||||
|
"model": {
|
||||||
|
"name": "model",
|
||||||
|
"label": "",
|
||||||
|
"value": {
|
||||||
|
"key": "4a63b226-e8ff-4da4-854e-0b9f04b562ba",
|
||||||
|
"hash": "blake3:d279309ea6e5ee6e8fd52504275865cc280dac71cbf528c5b07c98b888bddaba",
|
||||||
|
"name": "dreamshaper-xl-v2-turbo",
|
||||||
|
"base": "sdxl",
|
||||||
|
"type": "main"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 375,
|
||||||
|
"y": -500
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "3193ad09-a7c2-4bf4-a3a9-1c61cc33a204",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "3193ad09-a7c2-4bf4-a3a9-1c61cc33a204",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"label": "SDXL Negative Compel Prompt",
|
||||||
|
"notes": "",
|
||||||
|
"type": "sdxl_compel_prompt",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "Negative Prompt",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"style": {
|
||||||
|
"name": "style",
|
||||||
|
"label": "Negative Style",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"original_width": {
|
||||||
|
"name": "original_width",
|
||||||
|
"label": "",
|
||||||
|
"value": 1024
|
||||||
|
},
|
||||||
|
"original_height": {
|
||||||
|
"name": "original_height",
|
||||||
|
"label": "",
|
||||||
|
"value": 1024
|
||||||
|
},
|
||||||
|
"crop_top": {
|
||||||
|
"name": "crop_top",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"crop_left": {
|
||||||
|
"name": "crop_left",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"target_width": {
|
||||||
|
"name": "target_width",
|
||||||
|
"label": "",
|
||||||
|
"value": 1024
|
||||||
|
},
|
||||||
|
"target_height": {
|
||||||
|
"name": "target_height",
|
||||||
|
"label": "",
|
||||||
|
"value": 1024
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"clip2": {
|
||||||
|
"name": "clip2",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": false,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 750,
|
||||||
|
"y": 200
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": "3774ec24-a69e-4254-864c-097d07a6256f",
|
"id": "3774ec24-a69e-4254-864c-097d07a6256f",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -88,75 +349,6 @@
|
|||||||
"y": -125
|
"y": -125
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "3193ad09-a7c2-4bf4-a3a9-1c61cc33a204",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "3193ad09-a7c2-4bf4-a3a9-1c61cc33a204",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "SDXL Negative Compel Prompt",
|
|
||||||
"notes": "",
|
|
||||||
"type": "sdxl_compel_prompt",
|
|
||||||
"inputs": {
|
|
||||||
"prompt": {
|
|
||||||
"name": "prompt",
|
|
||||||
"label": "Negative Prompt",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"style": {
|
|
||||||
"name": "style",
|
|
||||||
"label": "Negative Style",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"original_width": {
|
|
||||||
"name": "original_width",
|
|
||||||
"label": "",
|
|
||||||
"value": 1024
|
|
||||||
},
|
|
||||||
"original_height": {
|
|
||||||
"name": "original_height",
|
|
||||||
"label": "",
|
|
||||||
"value": 1024
|
|
||||||
},
|
|
||||||
"crop_top": {
|
|
||||||
"name": "crop_top",
|
|
||||||
"label": "",
|
|
||||||
"value": 0
|
|
||||||
},
|
|
||||||
"crop_left": {
|
|
||||||
"name": "crop_left",
|
|
||||||
"label": "",
|
|
||||||
"value": 0
|
|
||||||
},
|
|
||||||
"target_width": {
|
|
||||||
"name": "target_width",
|
|
||||||
"label": "",
|
|
||||||
"value": 1024
|
|
||||||
},
|
|
||||||
"target_height": {
|
|
||||||
"name": "target_height",
|
|
||||||
"label": "",
|
|
||||||
"value": 1024
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"clip2": {
|
|
||||||
"name": "clip2",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": false,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 750,
|
|
||||||
"y": 200
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "55705012-79b9-4aac-9f26-c0b10309785b",
|
"id": "55705012-79b9-4aac-9f26-c0b10309785b",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -229,154 +421,6 @@
|
|||||||
"y": -50
|
"y": -50
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "30d3289c-773c-4152-a9d2-bd8a99c8fd22",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "30d3289c-773c-4152-a9d2-bd8a99c8fd22",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "sdxl_model_loader",
|
|
||||||
"inputs": {
|
|
||||||
"model": {
|
|
||||||
"name": "model",
|
|
||||||
"label": "",
|
|
||||||
"value": {
|
|
||||||
"key": "4a63b226-e8ff-4da4-854e-0b9f04b562ba",
|
|
||||||
"hash": "blake3:d279309ea6e5ee6e8fd52504275865cc280dac71cbf528c5b07c98b888bddaba",
|
|
||||||
"name": "dreamshaper-xl-v2-turbo",
|
|
||||||
"base": "sdxl",
|
|
||||||
"type": "main"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 375,
|
|
||||||
"y": -500
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "faf965a4-7530-427b-b1f3-4ba6505c2a08",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "faf965a4-7530-427b-b1f3-4ba6505c2a08",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "SDXL Positive Compel Prompt",
|
|
||||||
"notes": "",
|
|
||||||
"type": "sdxl_compel_prompt",
|
|
||||||
"inputs": {
|
|
||||||
"prompt": {
|
|
||||||
"name": "prompt",
|
|
||||||
"label": "Positive Prompt",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"style": {
|
|
||||||
"name": "style",
|
|
||||||
"label": "Positive Style",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"original_width": {
|
|
||||||
"name": "original_width",
|
|
||||||
"label": "",
|
|
||||||
"value": 1024
|
|
||||||
},
|
|
||||||
"original_height": {
|
|
||||||
"name": "original_height",
|
|
||||||
"label": "",
|
|
||||||
"value": 1024
|
|
||||||
},
|
|
||||||
"crop_top": {
|
|
||||||
"name": "crop_top",
|
|
||||||
"label": "",
|
|
||||||
"value": 0
|
|
||||||
},
|
|
||||||
"crop_left": {
|
|
||||||
"name": "crop_left",
|
|
||||||
"label": "",
|
|
||||||
"value": 0
|
|
||||||
},
|
|
||||||
"target_width": {
|
|
||||||
"name": "target_width",
|
|
||||||
"label": "",
|
|
||||||
"value": 1024
|
|
||||||
},
|
|
||||||
"target_height": {
|
|
||||||
"name": "target_height",
|
|
||||||
"label": "",
|
|
||||||
"value": 1024
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"clip2": {
|
|
||||||
"name": "clip2",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": false,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 750,
|
|
||||||
"y": -175
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "63e91020-83b2-4f35-b174-ad9692aabb48",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "63e91020-83b2-4f35-b174-ad9692aabb48",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"nodePack": "invokeai",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "l2i",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"latents": {
|
|
||||||
"name": "latents",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": false,
|
|
||||||
"useCache": false
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 1475,
|
|
||||||
"y": -500
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "50a36525-3c0a-4cc5-977c-e4bfc3fd6dfb",
|
"id": "50a36525-3c0a-4cc5-977c-e4bfc3fd6dfb",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -464,37 +508,6 @@
|
|||||||
"y": -500
|
"y": -500
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "0093692f-9cf4-454d-a5b8-62f0e3eb3bb8",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "0093692f-9cf4-454d-a5b8-62f0e3eb3bb8",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "vae_loader",
|
|
||||||
"inputs": {
|
|
||||||
"vae_model": {
|
|
||||||
"name": "vae_model",
|
|
||||||
"label": "VAE (use the FP16 model)",
|
|
||||||
"value": {
|
|
||||||
"key": "f20f9e5c-1bce-4c46-a84d-34ebfa7df069",
|
|
||||||
"hash": "blake3:9705ab1c31fa96b308734214fb7571a958621c7a9247eed82b7d277145f8d9fa",
|
|
||||||
"name": "sdxl-vae-fp16-fix",
|
|
||||||
"base": "sdxl",
|
|
||||||
"type": "vae"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 375,
|
|
||||||
"y": -225
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "ade2c0d3-0384-4157-b39b-29ce429cfa15",
|
"id": "ade2c0d3-0384-4157-b39b-29ce429cfa15",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"name": "Text to Image with LoRA",
|
"name": "Text to Image with LoRA",
|
||||||
"author": "InvokeAI",
|
"author": "InvokeAI",
|
||||||
"description": "Simple text to image workflow with a LoRA",
|
"description": "Simple text to image workflow with a LoRA",
|
||||||
"version": "2.0.0",
|
"version": "2.1.0",
|
||||||
"contact": "invoke@invoke.ai",
|
"contact": "invoke@invoke.ai",
|
||||||
"tags": "text to image, lora, default",
|
"tags": "text to image, lora, default",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -37,28 +37,83 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"meta": {
|
"meta": {
|
||||||
"category": "default",
|
"version": "3.0.0",
|
||||||
"version": "3.0.0"
|
"category": "default"
|
||||||
},
|
},
|
||||||
"nodes": [
|
"nodes": [
|
||||||
{
|
{
|
||||||
"id": "85b77bb2-c67a-416a-b3e8-291abe746c44",
|
"id": "a9683c0a-6b1f-4a5e-8187-c57e764b3400",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "85b77bb2-c67a-416a-b3e8-291abe746c44",
|
"id": "a9683c0a-6b1f-4a5e-8187-c57e764b3400",
|
||||||
"version": "1.1.1",
|
"version": "1.3.0",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "l2i",
|
||||||
|
"inputs": {
|
||||||
|
"board": {
|
||||||
|
"name": "board",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"latents": {
|
||||||
|
"name": "latents",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
},
|
||||||
|
"tile_size": {
|
||||||
|
"name": "tile_size",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"name": "fp32",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": false,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 4450,
|
||||||
|
"y": -550
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "c3fa6872-2599-4a82-a596-b3446a66cf8b",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "c3fa6872-2599-4a82-a596-b3446a66cf8b",
|
||||||
|
"version": "1.2.0",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
"type": "compel",
|
"type": "compel",
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"prompt": {
|
"prompt": {
|
||||||
"name": "prompt",
|
"name": "prompt",
|
||||||
"label": "Negative Prompt",
|
"label": "Positive Prompt",
|
||||||
"value": ""
|
"value": "super cute tiger cub"
|
||||||
},
|
},
|
||||||
"clip": {
|
"clip": {
|
||||||
"name": "clip",
|
"name": "clip",
|
||||||
"label": ""
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": true,
|
||||||
@ -67,31 +122,7 @@
|
|||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"x": 3425,
|
"x": 3425,
|
||||||
"y": -300
|
"y": -575
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "24e9d7ed-4836-4ec4-8f9e-e747721f9818",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "24e9d7ed-4836-4ec4-8f9e-e747721f9818",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "main_model_loader",
|
|
||||||
"inputs": {
|
|
||||||
"model": {
|
|
||||||
"name": "model",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 2500,
|
|
||||||
"y": -600
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -99,7 +130,7 @@
|
|||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "c41e705b-f2e3-4d1a-83c4-e34bb9344966",
|
"id": "c41e705b-f2e3-4d1a-83c4-e34bb9344966",
|
||||||
"version": "1.0.2",
|
"version": "1.0.3",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
"type": "lora_loader",
|
"type": "lora_loader",
|
||||||
@ -132,23 +163,51 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "c3fa6872-2599-4a82-a596-b3446a66cf8b",
|
"id": "24e9d7ed-4836-4ec4-8f9e-e747721f9818",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
"data": {
|
"data": {
|
||||||
"id": "c3fa6872-2599-4a82-a596-b3446a66cf8b",
|
"id": "24e9d7ed-4836-4ec4-8f9e-e747721f9818",
|
||||||
"version": "1.1.1",
|
"version": "1.0.3",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "main_model_loader",
|
||||||
|
"inputs": {
|
||||||
|
"model": {
|
||||||
|
"name": "model",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": 2500,
|
||||||
|
"y": -600
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "85b77bb2-c67a-416a-b3e8-291abe746c44",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "85b77bb2-c67a-416a-b3e8-291abe746c44",
|
||||||
|
"version": "1.2.0",
|
||||||
"label": "",
|
"label": "",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
"type": "compel",
|
"type": "compel",
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"prompt": {
|
"prompt": {
|
||||||
"name": "prompt",
|
"name": "prompt",
|
||||||
"label": "Positive Prompt",
|
"label": "Negative Prompt",
|
||||||
"value": "super cute tiger cub"
|
"value": ""
|
||||||
},
|
},
|
||||||
"clip": {
|
"clip": {
|
||||||
"name": "clip",
|
"name": "clip",
|
||||||
"label": ""
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"isOpen": true,
|
"isOpen": true,
|
||||||
@ -157,7 +216,7 @@
|
|||||||
},
|
},
|
||||||
"position": {
|
"position": {
|
||||||
"x": 3425,
|
"x": 3425,
|
||||||
"y": -575
|
"y": -300
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -315,52 +374,6 @@
|
|||||||
"x": 3425,
|
"x": 3425,
|
||||||
"y": 0
|
"y": 0
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "a9683c0a-6b1f-4a5e-8187-c57e764b3400",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "a9683c0a-6b1f-4a5e-8187-c57e764b3400",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "l2i",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"latents": {
|
|
||||||
"name": "latents",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": false,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": 4450,
|
|
||||||
"y": -550
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"edges": [
|
"edges": [
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"name": "Tiled Upscaling (Beta)",
|
"name": "Tiled Upscaling (Beta)",
|
||||||
"author": "Invoke",
|
"author": "Invoke",
|
||||||
"description": "A workflow to upscale an input image with tiled upscaling. ",
|
"description": "A workflow to upscale an input image with tiled upscaling. ",
|
||||||
"version": "2.0.0",
|
"version": "2.1.0",
|
||||||
"contact": "invoke@invoke.ai",
|
"contact": "invoke@invoke.ai",
|
||||||
"tags": "tiled, upscaling, sd1.5",
|
"tags": "tiled, upscaling, sd1.5",
|
||||||
"notes": "",
|
"notes": "",
|
||||||
@ -41,10 +41,318 @@
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"meta": {
|
"meta": {
|
||||||
"category": "default",
|
"version": "3.0.0",
|
||||||
"version": "3.0.0"
|
"category": "default"
|
||||||
},
|
},
|
||||||
"nodes": [
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "2ff466b8-5e2a-4d8f-923a-a3884c7ecbc5",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "2ff466b8-5e2a-4d8f-923a-a3884c7ecbc5",
|
||||||
|
"version": "1.0.3",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "main_model_loader",
|
||||||
|
"inputs": {
|
||||||
|
"model": {
|
||||||
|
"name": "model",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": -4514.466823162653,
|
||||||
|
"y": -1235.7908800002283
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "287f134f-da8d-41d1-884e-5940e8f7b816",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "287f134f-da8d-41d1-884e-5940e8f7b816",
|
||||||
|
"version": "1.4.1",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "ip_adapter",
|
||||||
|
"inputs": {
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"ip_adapter_model": {
|
||||||
|
"name": "ip_adapter_model",
|
||||||
|
"label": "IP-Adapter Model (select ip_adapter_sd15)",
|
||||||
|
"value": {
|
||||||
|
"key": "1cc210bb-4d0a-4312-b36c-b5d46c43768e",
|
||||||
|
"hash": "blake3:3d669dffa7471b357b4df088b99ffb6bf4d4383d5e0ef1de5ec1c89728a3d5a5",
|
||||||
|
"name": "ip_adapter_sd15",
|
||||||
|
"base": "sd-1",
|
||||||
|
"type": "ip_adapter"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"clip_vision_model": {
|
||||||
|
"name": "clip_vision_model",
|
||||||
|
"label": "",
|
||||||
|
"value": "ViT-H"
|
||||||
|
},
|
||||||
|
"weight": {
|
||||||
|
"name": "weight",
|
||||||
|
"label": "",
|
||||||
|
"value": 0.2
|
||||||
|
},
|
||||||
|
"method": {
|
||||||
|
"name": "method",
|
||||||
|
"label": "",
|
||||||
|
"value": "full"
|
||||||
|
},
|
||||||
|
"begin_step_percent": {
|
||||||
|
"name": "begin_step_percent",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"end_step_percent": {
|
||||||
|
"name": "end_step_percent",
|
||||||
|
"label": "",
|
||||||
|
"value": 1
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": -2855.8555540799207,
|
||||||
|
"y": -183.58854843775742
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "b76fe66f-7884-43ad-b72c-fadc81d7a73c",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "b76fe66f-7884-43ad-b72c-fadc81d7a73c",
|
||||||
|
"version": "1.3.0",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "l2i",
|
||||||
|
"inputs": {
|
||||||
|
"board": {
|
||||||
|
"name": "board",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"metadata": {
|
||||||
|
"name": "metadata",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"latents": {
|
||||||
|
"name": "latents",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
},
|
||||||
|
"tile_size": {
|
||||||
|
"name": "tile_size",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"name": "fp32",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": -1999.770193862987,
|
||||||
|
"y": -1075
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "d334f2da-016a-4524-9911-bdab85546888",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "d334f2da-016a-4524-9911-bdab85546888",
|
||||||
|
"version": "1.1.2",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "controlnet",
|
||||||
|
"inputs": {
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"control_model": {
|
||||||
|
"name": "control_model",
|
||||||
|
"label": "Control Model (select contro_v11f1e_sd15_tile)",
|
||||||
|
"value": {
|
||||||
|
"key": "773843c8-db1f-4502-8f65-59782efa7960",
|
||||||
|
"hash": "blake3:f0812e13758f91baf4e54b7dbb707b70642937d3b2098cd2b94cc36d3eba308e",
|
||||||
|
"name": "control_v11f1e_sd15_tile",
|
||||||
|
"base": "sd-1",
|
||||||
|
"type": "controlnet"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"control_weight": {
|
||||||
|
"name": "control_weight",
|
||||||
|
"label": "",
|
||||||
|
"value": 1
|
||||||
|
},
|
||||||
|
"begin_step_percent": {
|
||||||
|
"name": "begin_step_percent",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"end_step_percent": {
|
||||||
|
"name": "end_step_percent",
|
||||||
|
"label": "Structural Control",
|
||||||
|
"value": 1
|
||||||
|
},
|
||||||
|
"control_mode": {
|
||||||
|
"name": "control_mode",
|
||||||
|
"label": "",
|
||||||
|
"value": "more_control"
|
||||||
|
},
|
||||||
|
"resize_mode": {
|
||||||
|
"name": "resize_mode",
|
||||||
|
"label": "",
|
||||||
|
"value": "just_resize"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": -2481.9569385477016,
|
||||||
|
"y": -181.06590482739782
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "338b883c-3728-4f18-b3a6-6e7190c2f850",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "338b883c-3728-4f18-b3a6-6e7190c2f850",
|
||||||
|
"version": "1.1.0",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "i2l",
|
||||||
|
"inputs": {
|
||||||
|
"image": {
|
||||||
|
"name": "image",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"name": "vae",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"name": "tiled",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
},
|
||||||
|
"tile_size": {
|
||||||
|
"name": "tile_size",
|
||||||
|
"label": "",
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"name": "fp32",
|
||||||
|
"label": "",
|
||||||
|
"value": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": false,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": -2908.4791167517287,
|
||||||
|
"y": -408.87504820159086
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "947c3f88-0305-4695-8355-df4abac64b1c",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "947c3f88-0305-4695-8355-df4abac64b1c",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "compel",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": -4014.4136788915944,
|
||||||
|
"y": -968.5677253775948
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "9b2d8c58-ce8f-4162-a5a1-48de854040d6",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "9b2d8c58-ce8f-4162-a5a1-48de854040d6",
|
||||||
|
"version": "1.2.0",
|
||||||
|
"label": "",
|
||||||
|
"notes": "",
|
||||||
|
"type": "compel",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"name": "prompt",
|
||||||
|
"label": "Positive Prompt",
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"name": "clip",
|
||||||
|
"label": ""
|
||||||
|
},
|
||||||
|
"mask": {
|
||||||
|
"name": "mask",
|
||||||
|
"label": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isOpen": true,
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true
|
||||||
|
},
|
||||||
|
"position": {
|
||||||
|
"x": -4014.4136788915944,
|
||||||
|
"y": -1243.5677253775948
|
||||||
|
}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"id": "b875cae6-d8a3-4fdc-b969-4d53cbd03f9a",
|
"id": "b875cae6-d8a3-4fdc-b969-4d53cbd03f9a",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -181,64 +489,6 @@
|
|||||||
"y": 3.422855503409039
|
"y": 3.422855503409039
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "9b2d8c58-ce8f-4162-a5a1-48de854040d6",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "9b2d8c58-ce8f-4162-a5a1-48de854040d6",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "compel",
|
|
||||||
"inputs": {
|
|
||||||
"prompt": {
|
|
||||||
"name": "prompt",
|
|
||||||
"label": "Positive Prompt",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": -4014.4136788915944,
|
|
||||||
"y": -1243.5677253775948
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "947c3f88-0305-4695-8355-df4abac64b1c",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "947c3f88-0305-4695-8355-df4abac64b1c",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "compel",
|
|
||||||
"inputs": {
|
|
||||||
"prompt": {
|
|
||||||
"name": "prompt",
|
|
||||||
"label": "",
|
|
||||||
"value": ""
|
|
||||||
},
|
|
||||||
"clip": {
|
|
||||||
"name": "clip",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": -4014.4136788915944,
|
|
||||||
"y": -968.5677253775948
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "b3513fed-ed42-408d-b382-128fdb0de523",
|
"id": "b3513fed-ed42-408d-b382-128fdb0de523",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -379,104 +629,6 @@
|
|||||||
"y": -29.08699277598673
|
"y": -29.08699277598673
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "338b883c-3728-4f18-b3a6-6e7190c2f850",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "338b883c-3728-4f18-b3a6-6e7190c2f850",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "i2l",
|
|
||||||
"inputs": {
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": false,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": -2908.4791167517287,
|
|
||||||
"y": -408.87504820159086
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "d334f2da-016a-4524-9911-bdab85546888",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "d334f2da-016a-4524-9911-bdab85546888",
|
|
||||||
"version": "1.1.1",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "controlnet",
|
|
||||||
"inputs": {
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"control_model": {
|
|
||||||
"name": "control_model",
|
|
||||||
"label": "Control Model (select contro_v11f1e_sd15_tile)",
|
|
||||||
"value": {
|
|
||||||
"key": "773843c8-db1f-4502-8f65-59782efa7960",
|
|
||||||
"hash": "blake3:f0812e13758f91baf4e54b7dbb707b70642937d3b2098cd2b94cc36d3eba308e",
|
|
||||||
"name": "control_v11f1e_sd15_tile",
|
|
||||||
"base": "sd-1",
|
|
||||||
"type": "controlnet"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"control_weight": {
|
|
||||||
"name": "control_weight",
|
|
||||||
"label": "",
|
|
||||||
"value": 1
|
|
||||||
},
|
|
||||||
"begin_step_percent": {
|
|
||||||
"name": "begin_step_percent",
|
|
||||||
"label": "",
|
|
||||||
"value": 0
|
|
||||||
},
|
|
||||||
"end_step_percent": {
|
|
||||||
"name": "end_step_percent",
|
|
||||||
"label": "Structural Control",
|
|
||||||
"value": 1
|
|
||||||
},
|
|
||||||
"control_mode": {
|
|
||||||
"name": "control_mode",
|
|
||||||
"label": "",
|
|
||||||
"value": "more_control"
|
|
||||||
},
|
|
||||||
"resize_mode": {
|
|
||||||
"name": "resize_mode",
|
|
||||||
"label": "",
|
|
||||||
"value": "just_resize"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": -2481.9569385477016,
|
|
||||||
"y": -181.06590482739782
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "1011539e-85de-4e02-a003-0b22358491b8",
|
"id": "1011539e-85de-4e02-a003-0b22358491b8",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -563,52 +715,6 @@
|
|||||||
"y": -1006.415909408244
|
"y": -1006.415909408244
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "b76fe66f-7884-43ad-b72c-fadc81d7a73c",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "b76fe66f-7884-43ad-b72c-fadc81d7a73c",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "l2i",
|
|
||||||
"inputs": {
|
|
||||||
"board": {
|
|
||||||
"name": "board",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"metadata": {
|
|
||||||
"name": "metadata",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"latents": {
|
|
||||||
"name": "latents",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"vae": {
|
|
||||||
"name": "vae",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"tiled": {
|
|
||||||
"name": "tiled",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
},
|
|
||||||
"fp32": {
|
|
||||||
"name": "fp32",
|
|
||||||
"label": "",
|
|
||||||
"value": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": -1999.770193862987,
|
|
||||||
"y": -1075
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "ab6f5dda-4b60-4ddf-99f2-f61fb5937527",
|
"id": "ab6f5dda-4b60-4ddf-99f2-f61fb5937527",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -779,56 +885,6 @@
|
|||||||
"y": -78.2819050861178
|
"y": -78.2819050861178
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "287f134f-da8d-41d1-884e-5940e8f7b816",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "287f134f-da8d-41d1-884e-5940e8f7b816",
|
|
||||||
"version": "1.2.2",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "ip_adapter",
|
|
||||||
"inputs": {
|
|
||||||
"image": {
|
|
||||||
"name": "image",
|
|
||||||
"label": ""
|
|
||||||
},
|
|
||||||
"ip_adapter_model": {
|
|
||||||
"name": "ip_adapter_model",
|
|
||||||
"label": "IP-Adapter Model (select ip_adapter_sd15)",
|
|
||||||
"value": {
|
|
||||||
"key": "1cc210bb-4d0a-4312-b36c-b5d46c43768e",
|
|
||||||
"hash": "blake3:3d669dffa7471b357b4df088b99ffb6bf4d4383d5e0ef1de5ec1c89728a3d5a5",
|
|
||||||
"name": "ip_adapter_sd15",
|
|
||||||
"base": "sd-1",
|
|
||||||
"type": "ip_adapter"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"weight": {
|
|
||||||
"name": "weight",
|
|
||||||
"label": "",
|
|
||||||
"value": 0.2
|
|
||||||
},
|
|
||||||
"begin_step_percent": {
|
|
||||||
"name": "begin_step_percent",
|
|
||||||
"label": "",
|
|
||||||
"value": 0
|
|
||||||
},
|
|
||||||
"end_step_percent": {
|
|
||||||
"name": "end_step_percent",
|
|
||||||
"label": "",
|
|
||||||
"value": 1
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": -2855.8555540799207,
|
|
||||||
"y": -183.58854843775742
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "1f86c8bf-06f9-4e28-abee-02f46f445ac4",
|
"id": "1f86c8bf-06f9-4e28-abee-02f46f445ac4",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
@ -899,30 +955,6 @@
|
|||||||
"y": -41.810810454906914
|
"y": -41.810810454906914
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"id": "2ff466b8-5e2a-4d8f-923a-a3884c7ecbc5",
|
|
||||||
"type": "invocation",
|
|
||||||
"data": {
|
|
||||||
"id": "2ff466b8-5e2a-4d8f-923a-a3884c7ecbc5",
|
|
||||||
"version": "1.0.2",
|
|
||||||
"label": "",
|
|
||||||
"notes": "",
|
|
||||||
"type": "main_model_loader",
|
|
||||||
"inputs": {
|
|
||||||
"model": {
|
|
||||||
"name": "model",
|
|
||||||
"label": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"isOpen": true,
|
|
||||||
"isIntermediate": true,
|
|
||||||
"useCache": true
|
|
||||||
},
|
|
||||||
"position": {
|
|
||||||
"x": -4514.466823162653,
|
|
||||||
"y": -1235.7908800002283
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"id": "f5d9bf3b-2646-4b17-9894-20fd2b4218ea",
|
"id": "f5d9bf3b-2646-4b17-9894-20fd2b4218ea",
|
||||||
"type": "invocation",
|
"type": "invocation",
|
||||||
|
@ -98,7 +98,7 @@ class UnetSkipConnectionBlock(nn.Module):
|
|||||||
"""
|
"""
|
||||||
super(UnetSkipConnectionBlock, self).__init__()
|
super(UnetSkipConnectionBlock, self).__init__()
|
||||||
self.outermost = outermost
|
self.outermost = outermost
|
||||||
if type(norm_layer) == functools.partial:
|
if isinstance(norm_layer, functools.partial):
|
||||||
use_bias = norm_layer.func == nn.InstanceNorm2d
|
use_bias = norm_layer.func == nn.InstanceNorm2d
|
||||||
else:
|
else:
|
||||||
use_bias = norm_layer == nn.InstanceNorm2d
|
use_bias = norm_layer == nn.InstanceNorm2d
|
||||||
|
@ -124,16 +124,14 @@ class IPAdapter(RawModel):
|
|||||||
self.device, dtype=self.dtype
|
self.device, dtype=self.dtype
|
||||||
)
|
)
|
||||||
|
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None):
|
||||||
self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None, non_blocking: bool = False
|
|
||||||
):
|
|
||||||
if device is not None:
|
if device is not None:
|
||||||
self.device = device
|
self.device = device
|
||||||
if dtype is not None:
|
if dtype is not None:
|
||||||
self.dtype = dtype
|
self.dtype = dtype
|
||||||
|
|
||||||
self._image_proj_model.to(device=self.device, dtype=self.dtype, non_blocking=non_blocking)
|
self._image_proj_model.to(device=self.device, dtype=self.dtype)
|
||||||
self.attn_weights.to(device=self.device, dtype=self.dtype, non_blocking=non_blocking)
|
self.attn_weights.to(device=self.device, dtype=self.dtype)
|
||||||
|
|
||||||
def calc_size(self) -> int:
|
def calc_size(self) -> int:
|
||||||
# HACK(ryand): Fix this issue with circular imports.
|
# HACK(ryand): Fix this issue with circular imports.
|
||||||
|
@ -11,7 +11,6 @@ from typing_extensions import Self
|
|||||||
|
|
||||||
from invokeai.backend.model_manager import BaseModelType
|
from invokeai.backend.model_manager import BaseModelType
|
||||||
from invokeai.backend.raw_model import RawModel
|
from invokeai.backend.raw_model import RawModel
|
||||||
from invokeai.backend.util.devices import TorchDevice
|
|
||||||
|
|
||||||
|
|
||||||
class LoRALayerBase:
|
class LoRALayerBase:
|
||||||
@ -57,14 +56,9 @@ class LoRALayerBase:
|
|||||||
model_size += val.nelement() * val.element_size()
|
model_size += val.nelement() * val.element_size()
|
||||||
return model_size
|
return model_size
|
||||||
|
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
|
||||||
self,
|
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
) -> None:
|
|
||||||
if self.bias is not None:
|
if self.bias is not None:
|
||||||
self.bias = self.bias.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.bias = self.bias.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
|
|
||||||
# TODO: find and debug lora/locon with bias
|
# TODO: find and debug lora/locon with bias
|
||||||
@ -106,19 +100,14 @@ class LoRALayer(LoRALayerBase):
|
|||||||
model_size += val.nelement() * val.element_size()
|
model_size += val.nelement() * val.element_size()
|
||||||
return model_size
|
return model_size
|
||||||
|
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
|
||||||
self,
|
super().to(device=device, dtype=dtype)
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
) -> None:
|
|
||||||
super().to(device=device, dtype=dtype, non_blocking=non_blocking)
|
|
||||||
|
|
||||||
self.up = self.up.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.up = self.up.to(device=device, dtype=dtype)
|
||||||
self.down = self.down.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.down = self.down.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
if self.mid is not None:
|
if self.mid is not None:
|
||||||
self.mid = self.mid.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.mid = self.mid.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
|
|
||||||
class LoHALayer(LoRALayerBase):
|
class LoHALayer(LoRALayerBase):
|
||||||
@ -167,23 +156,18 @@ class LoHALayer(LoRALayerBase):
|
|||||||
model_size += val.nelement() * val.element_size()
|
model_size += val.nelement() * val.element_size()
|
||||||
return model_size
|
return model_size
|
||||||
|
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
|
||||||
self,
|
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
) -> None:
|
|
||||||
super().to(device=device, dtype=dtype)
|
super().to(device=device, dtype=dtype)
|
||||||
|
|
||||||
self.w1_a = self.w1_a.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.w1_a = self.w1_a.to(device=device, dtype=dtype)
|
||||||
self.w1_b = self.w1_b.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.w1_b = self.w1_b.to(device=device, dtype=dtype)
|
||||||
if self.t1 is not None:
|
if self.t1 is not None:
|
||||||
self.t1 = self.t1.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.t1 = self.t1.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
self.w2_a = self.w2_a.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.w2_a = self.w2_a.to(device=device, dtype=dtype)
|
||||||
self.w2_b = self.w2_b.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.w2_b = self.w2_b.to(device=device, dtype=dtype)
|
||||||
if self.t2 is not None:
|
if self.t2 is not None:
|
||||||
self.t2 = self.t2.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.t2 = self.t2.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
|
|
||||||
class LoKRLayer(LoRALayerBase):
|
class LoKRLayer(LoRALayerBase):
|
||||||
@ -264,12 +248,7 @@ class LoKRLayer(LoRALayerBase):
|
|||||||
model_size += val.nelement() * val.element_size()
|
model_size += val.nelement() * val.element_size()
|
||||||
return model_size
|
return model_size
|
||||||
|
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
|
||||||
self,
|
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
) -> None:
|
|
||||||
super().to(device=device, dtype=dtype)
|
super().to(device=device, dtype=dtype)
|
||||||
|
|
||||||
if self.w1 is not None:
|
if self.w1 is not None:
|
||||||
@ -277,19 +256,19 @@ class LoKRLayer(LoRALayerBase):
|
|||||||
else:
|
else:
|
||||||
assert self.w1_a is not None
|
assert self.w1_a is not None
|
||||||
assert self.w1_b is not None
|
assert self.w1_b is not None
|
||||||
self.w1_a = self.w1_a.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.w1_a = self.w1_a.to(device=device, dtype=dtype)
|
||||||
self.w1_b = self.w1_b.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.w1_b = self.w1_b.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
if self.w2 is not None:
|
if self.w2 is not None:
|
||||||
self.w2 = self.w2.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.w2 = self.w2.to(device=device, dtype=dtype)
|
||||||
else:
|
else:
|
||||||
assert self.w2_a is not None
|
assert self.w2_a is not None
|
||||||
assert self.w2_b is not None
|
assert self.w2_b is not None
|
||||||
self.w2_a = self.w2_a.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.w2_a = self.w2_a.to(device=device, dtype=dtype)
|
||||||
self.w2_b = self.w2_b.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.w2_b = self.w2_b.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
if self.t2 is not None:
|
if self.t2 is not None:
|
||||||
self.t2 = self.t2.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.t2 = self.t2.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
|
|
||||||
class FullLayer(LoRALayerBase):
|
class FullLayer(LoRALayerBase):
|
||||||
@ -319,15 +298,10 @@ class FullLayer(LoRALayerBase):
|
|||||||
model_size += self.weight.nelement() * self.weight.element_size()
|
model_size += self.weight.nelement() * self.weight.element_size()
|
||||||
return model_size
|
return model_size
|
||||||
|
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
|
||||||
self,
|
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
) -> None:
|
|
||||||
super().to(device=device, dtype=dtype)
|
super().to(device=device, dtype=dtype)
|
||||||
|
|
||||||
self.weight = self.weight.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.weight = self.weight.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
|
|
||||||
class IA3Layer(LoRALayerBase):
|
class IA3Layer(LoRALayerBase):
|
||||||
@ -359,16 +333,11 @@ class IA3Layer(LoRALayerBase):
|
|||||||
model_size += self.on_input.nelement() * self.on_input.element_size()
|
model_size += self.on_input.nelement() * self.on_input.element_size()
|
||||||
return model_size
|
return model_size
|
||||||
|
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None):
|
||||||
self,
|
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
):
|
|
||||||
super().to(device=device, dtype=dtype)
|
super().to(device=device, dtype=dtype)
|
||||||
|
|
||||||
self.weight = self.weight.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.weight = self.weight.to(device=device, dtype=dtype)
|
||||||
self.on_input = self.on_input.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
self.on_input = self.on_input.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
|
|
||||||
AnyLoRALayer = Union[LoRALayer, LoHALayer, LoKRLayer, FullLayer, IA3Layer]
|
AnyLoRALayer = Union[LoRALayer, LoHALayer, LoKRLayer, FullLayer, IA3Layer]
|
||||||
@ -390,15 +359,10 @@ class LoRAModelRaw(RawModel): # (torch.nn.Module):
|
|||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
return self._name
|
return self._name
|
||||||
|
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
|
||||||
self,
|
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
) -> None:
|
|
||||||
# TODO: try revert if exception?
|
# TODO: try revert if exception?
|
||||||
for _key, layer in self.layers.items():
|
for _key, layer in self.layers.items():
|
||||||
layer.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
layer.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
def calc_size(self) -> int:
|
def calc_size(self) -> int:
|
||||||
model_size = 0
|
model_size = 0
|
||||||
@ -521,7 +485,7 @@ class LoRAModelRaw(RawModel): # (torch.nn.Module):
|
|||||||
# lower memory consumption by removing already parsed layer values
|
# lower memory consumption by removing already parsed layer values
|
||||||
state_dict[layer_key].clear()
|
state_dict[layer_key].clear()
|
||||||
|
|
||||||
layer.to(device=device, dtype=dtype, non_blocking=TorchDevice.get_non_blocking(device))
|
layer.to(device=device, dtype=dtype)
|
||||||
model.layers[layer_key] = layer
|
model.layers[layer_key] = layer
|
||||||
|
|
||||||
return model
|
return model
|
||||||
|
@ -67,6 +67,7 @@ class ModelType(str, Enum):
|
|||||||
IPAdapter = "ip_adapter"
|
IPAdapter = "ip_adapter"
|
||||||
CLIPVision = "clip_vision"
|
CLIPVision = "clip_vision"
|
||||||
T2IAdapter = "t2i_adapter"
|
T2IAdapter = "t2i_adapter"
|
||||||
|
SpandrelImageToImage = "spandrel_image_to_image"
|
||||||
|
|
||||||
|
|
||||||
class SubModelType(str, Enum):
|
class SubModelType(str, Enum):
|
||||||
@ -371,6 +372,17 @@ class T2IAdapterConfig(DiffusersConfigBase, ControlAdapterConfigBase):
|
|||||||
return Tag(f"{ModelType.T2IAdapter.value}.{ModelFormat.Diffusers.value}")
|
return Tag(f"{ModelType.T2IAdapter.value}.{ModelFormat.Diffusers.value}")
|
||||||
|
|
||||||
|
|
||||||
|
class SpandrelImageToImageConfig(ModelConfigBase):
|
||||||
|
"""Model config for Spandrel Image to Image models."""
|
||||||
|
|
||||||
|
type: Literal[ModelType.SpandrelImageToImage] = ModelType.SpandrelImageToImage
|
||||||
|
format: Literal[ModelFormat.Checkpoint] = ModelFormat.Checkpoint
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_tag() -> Tag:
|
||||||
|
return Tag(f"{ModelType.SpandrelImageToImage.value}.{ModelFormat.Checkpoint.value}")
|
||||||
|
|
||||||
|
|
||||||
def get_model_discriminator_value(v: Any) -> str:
|
def get_model_discriminator_value(v: Any) -> str:
|
||||||
"""
|
"""
|
||||||
Computes the discriminator value for a model config.
|
Computes the discriminator value for a model config.
|
||||||
@ -407,6 +419,7 @@ AnyModelConfig = Annotated[
|
|||||||
Annotated[IPAdapterInvokeAIConfig, IPAdapterInvokeAIConfig.get_tag()],
|
Annotated[IPAdapterInvokeAIConfig, IPAdapterInvokeAIConfig.get_tag()],
|
||||||
Annotated[IPAdapterCheckpointConfig, IPAdapterCheckpointConfig.get_tag()],
|
Annotated[IPAdapterCheckpointConfig, IPAdapterCheckpointConfig.get_tag()],
|
||||||
Annotated[T2IAdapterConfig, T2IAdapterConfig.get_tag()],
|
Annotated[T2IAdapterConfig, T2IAdapterConfig.get_tag()],
|
||||||
|
Annotated[SpandrelImageToImageConfig, SpandrelImageToImageConfig.get_tag()],
|
||||||
Annotated[CLIPVisionDiffusersConfig, CLIPVisionDiffusersConfig.get_tag()],
|
Annotated[CLIPVisionDiffusersConfig, CLIPVisionDiffusersConfig.get_tag()],
|
||||||
],
|
],
|
||||||
Discriminator(get_model_discriminator_value),
|
Discriminator(get_model_discriminator_value),
|
||||||
|
@ -289,11 +289,9 @@ class ModelCache(ModelCacheBase[AnyModel]):
|
|||||||
else:
|
else:
|
||||||
new_dict: Dict[str, torch.Tensor] = {}
|
new_dict: Dict[str, torch.Tensor] = {}
|
||||||
for k, v in cache_entry.state_dict.items():
|
for k, v in cache_entry.state_dict.items():
|
||||||
new_dict[k] = v.to(
|
new_dict[k] = v.to(target_device, copy=True)
|
||||||
target_device, copy=True, non_blocking=TorchDevice.get_non_blocking(target_device)
|
|
||||||
)
|
|
||||||
cache_entry.model.load_state_dict(new_dict, assign=True)
|
cache_entry.model.load_state_dict(new_dict, assign=True)
|
||||||
cache_entry.model.to(target_device, non_blocking=TorchDevice.get_non_blocking(target_device))
|
cache_entry.model.to(target_device)
|
||||||
cache_entry.device = target_device
|
cache_entry.device = target_device
|
||||||
except Exception as e: # blow away cache entry
|
except Exception as e: # blow away cache entry
|
||||||
self._delete_cache_entry(cache_entry)
|
self._delete_cache_entry(cache_entry)
|
||||||
|
@ -0,0 +1,45 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import torch
|
||||||
|
|
||||||
|
from invokeai.backend.model_manager.config import (
|
||||||
|
AnyModel,
|
||||||
|
AnyModelConfig,
|
||||||
|
BaseModelType,
|
||||||
|
ModelFormat,
|
||||||
|
ModelType,
|
||||||
|
SubModelType,
|
||||||
|
)
|
||||||
|
from invokeai.backend.model_manager.load.load_default import ModelLoader
|
||||||
|
from invokeai.backend.model_manager.load.model_loader_registry import ModelLoaderRegistry
|
||||||
|
from invokeai.backend.spandrel_image_to_image_model import SpandrelImageToImageModel
|
||||||
|
|
||||||
|
|
||||||
|
@ModelLoaderRegistry.register(
|
||||||
|
base=BaseModelType.Any, type=ModelType.SpandrelImageToImage, format=ModelFormat.Checkpoint
|
||||||
|
)
|
||||||
|
class SpandrelImageToImageModelLoader(ModelLoader):
|
||||||
|
"""Class for loading Spandrel Image-to-Image models (i.e. models wrapped by spandrel.ImageModelDescriptor)."""
|
||||||
|
|
||||||
|
def _load_model(
|
||||||
|
self,
|
||||||
|
config: AnyModelConfig,
|
||||||
|
submodel_type: Optional[SubModelType] = None,
|
||||||
|
) -> AnyModel:
|
||||||
|
if submodel_type is not None:
|
||||||
|
raise ValueError("Unexpected submodel requested for Spandrel model.")
|
||||||
|
|
||||||
|
model_path = Path(config.path)
|
||||||
|
model = SpandrelImageToImageModel.load_from_file(model_path)
|
||||||
|
|
||||||
|
torch_dtype = self._torch_dtype
|
||||||
|
if not model.supports_dtype(torch_dtype):
|
||||||
|
self._logger.warning(
|
||||||
|
f"The configured dtype ('{self._torch_dtype}') is not supported by the {model.get_model_type_name()} "
|
||||||
|
"model. Falling back to 'float32'."
|
||||||
|
)
|
||||||
|
torch_dtype = torch.float32
|
||||||
|
model.to(dtype=torch_dtype)
|
||||||
|
|
||||||
|
return model
|
@ -15,6 +15,7 @@ from invokeai.backend.ip_adapter.ip_adapter import IPAdapter
|
|||||||
from invokeai.backend.lora import LoRAModelRaw
|
from invokeai.backend.lora import LoRAModelRaw
|
||||||
from invokeai.backend.model_manager.config import AnyModel
|
from invokeai.backend.model_manager.config import AnyModel
|
||||||
from invokeai.backend.onnx.onnx_runtime import IAIOnnxRuntimeModel
|
from invokeai.backend.onnx.onnx_runtime import IAIOnnxRuntimeModel
|
||||||
|
from invokeai.backend.spandrel_image_to_image_model import SpandrelImageToImageModel
|
||||||
from invokeai.backend.textual_inversion import TextualInversionModelRaw
|
from invokeai.backend.textual_inversion import TextualInversionModelRaw
|
||||||
|
|
||||||
|
|
||||||
@ -33,7 +34,7 @@ def calc_model_size_by_data(logger: logging.Logger, model: AnyModel) -> int:
|
|||||||
elif isinstance(model, CLIPTokenizer):
|
elif isinstance(model, CLIPTokenizer):
|
||||||
# TODO(ryand): Accurately calculate the tokenizer's size. It's small enough that it shouldn't matter for now.
|
# TODO(ryand): Accurately calculate the tokenizer's size. It's small enough that it shouldn't matter for now.
|
||||||
return 0
|
return 0
|
||||||
elif isinstance(model, (TextualInversionModelRaw, IPAdapter, LoRAModelRaw)):
|
elif isinstance(model, (TextualInversionModelRaw, IPAdapter, LoRAModelRaw, SpandrelImageToImageModel)):
|
||||||
return model.calc_size()
|
return model.calc_size()
|
||||||
else:
|
else:
|
||||||
# TODO(ryand): Promote this from a log to an exception once we are confident that we are handling all of the
|
# TODO(ryand): Promote this from a log to an exception once we are confident that we are handling all of the
|
||||||
|
@ -4,6 +4,7 @@ from pathlib import Path
|
|||||||
from typing import Any, Dict, Literal, Optional, Union
|
from typing import Any, Dict, Literal, Optional, Union
|
||||||
|
|
||||||
import safetensors.torch
|
import safetensors.torch
|
||||||
|
import spandrel
|
||||||
import torch
|
import torch
|
||||||
from picklescan.scanner import scan_file_path
|
from picklescan.scanner import scan_file_path
|
||||||
|
|
||||||
@ -25,6 +26,7 @@ from invokeai.backend.model_manager.config import (
|
|||||||
SchedulerPredictionType,
|
SchedulerPredictionType,
|
||||||
)
|
)
|
||||||
from invokeai.backend.model_manager.util.model_util import lora_token_vector_length, read_checkpoint_meta
|
from invokeai.backend.model_manager.util.model_util import lora_token_vector_length, read_checkpoint_meta
|
||||||
|
from invokeai.backend.spandrel_image_to_image_model import SpandrelImageToImageModel
|
||||||
from invokeai.backend.util.silence_warnings import SilenceWarnings
|
from invokeai.backend.util.silence_warnings import SilenceWarnings
|
||||||
|
|
||||||
CkptType = Dict[str | int, Any]
|
CkptType = Dict[str | int, Any]
|
||||||
@ -220,24 +222,46 @@ class ModelProbe(object):
|
|||||||
ckpt = ckpt.get("state_dict", ckpt)
|
ckpt = ckpt.get("state_dict", ckpt)
|
||||||
|
|
||||||
for key in [str(k) for k in ckpt.keys()]:
|
for key in [str(k) for k in ckpt.keys()]:
|
||||||
if any(key.startswith(v) for v in {"cond_stage_model.", "first_stage_model.", "model.diffusion_model."}):
|
if key.startswith(("cond_stage_model.", "first_stage_model.", "model.diffusion_model.")):
|
||||||
return ModelType.Main
|
return ModelType.Main
|
||||||
elif any(key.startswith(v) for v in {"encoder.conv_in", "decoder.conv_in"}):
|
elif key.startswith(("encoder.conv_in", "decoder.conv_in")):
|
||||||
return ModelType.VAE
|
return ModelType.VAE
|
||||||
elif any(key.startswith(v) for v in {"lora_te_", "lora_unet_"}):
|
elif key.startswith(("lora_te_", "lora_unet_")):
|
||||||
return ModelType.LoRA
|
return ModelType.LoRA
|
||||||
elif any(key.endswith(v) for v in {"to_k_lora.up.weight", "to_q_lora.down.weight"}):
|
elif key.endswith(("to_k_lora.up.weight", "to_q_lora.down.weight")):
|
||||||
return ModelType.LoRA
|
return ModelType.LoRA
|
||||||
elif any(key.startswith(v) for v in {"controlnet", "control_model", "input_blocks"}):
|
elif key.startswith(("controlnet", "control_model", "input_blocks")):
|
||||||
return ModelType.ControlNet
|
return ModelType.ControlNet
|
||||||
elif any(key.startswith(v) for v in {"image_proj.", "ip_adapter."}):
|
elif key.startswith(("image_proj.", "ip_adapter.")):
|
||||||
return ModelType.IPAdapter
|
return ModelType.IPAdapter
|
||||||
elif key in {"emb_params", "string_to_param"}:
|
elif key in {"emb_params", "string_to_param"}:
|
||||||
return ModelType.TextualInversion
|
return ModelType.TextualInversion
|
||||||
else:
|
|
||||||
# diffusers-ti
|
# diffusers-ti
|
||||||
if len(ckpt) < 10 and all(isinstance(v, torch.Tensor) for v in ckpt.values()):
|
if len(ckpt) < 10 and all(isinstance(v, torch.Tensor) for v in ckpt.values()):
|
||||||
return ModelType.TextualInversion
|
return ModelType.TextualInversion
|
||||||
|
|
||||||
|
# Check if the model can be loaded as a SpandrelImageToImageModel.
|
||||||
|
# This check is intentionally performed last, as it can be expensive (it requires loading the model from disk).
|
||||||
|
try:
|
||||||
|
# It would be nice to avoid having to load the Spandrel model from disk here. A couple of options were
|
||||||
|
# explored to avoid this:
|
||||||
|
# 1. Call `SpandrelImageToImageModel.load_from_state_dict(ckpt)`, where `ckpt` is a state_dict on the meta
|
||||||
|
# device. Unfortunately, some Spandrel models perform operations during initialization that are not
|
||||||
|
# supported on meta tensors.
|
||||||
|
# 2. Spandrel has internal logic to determine a model's type from its state_dict before loading the model.
|
||||||
|
# This logic is not exposed in spandrel's public API. We could copy the logic here, but then we have to
|
||||||
|
# maintain it, and the risk of false positive detections is higher.
|
||||||
|
SpandrelImageToImageModel.load_from_file(model_path)
|
||||||
|
return ModelType.SpandrelImageToImage
|
||||||
|
except spandrel.UnsupportedModelError:
|
||||||
|
pass
|
||||||
|
except RuntimeError as e:
|
||||||
|
if "No such file or directory" in str(e):
|
||||||
|
# This error is expected if the model_path does not exist (which is the case in some unit tests).
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise e
|
||||||
|
|
||||||
raise InvalidModelConfigException(f"Unable to determine model type for {model_path}")
|
raise InvalidModelConfigException(f"Unable to determine model type for {model_path}")
|
||||||
|
|
||||||
@ -569,6 +593,11 @@ class T2IAdapterCheckpointProbe(CheckpointProbeBase):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
class SpandrelImageToImageCheckpointProbe(CheckpointProbeBase):
|
||||||
|
def get_base_type(self) -> BaseModelType:
|
||||||
|
return BaseModelType.Any
|
||||||
|
|
||||||
|
|
||||||
########################################################
|
########################################################
|
||||||
# classes for probing folders
|
# classes for probing folders
|
||||||
#######################################################
|
#######################################################
|
||||||
@ -776,6 +805,11 @@ class CLIPVisionFolderProbe(FolderProbeBase):
|
|||||||
return BaseModelType.Any
|
return BaseModelType.Any
|
||||||
|
|
||||||
|
|
||||||
|
class SpandrelImageToImageFolderProbe(FolderProbeBase):
|
||||||
|
def get_base_type(self) -> BaseModelType:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
class T2IAdapterFolderProbe(FolderProbeBase):
|
class T2IAdapterFolderProbe(FolderProbeBase):
|
||||||
def get_base_type(self) -> BaseModelType:
|
def get_base_type(self) -> BaseModelType:
|
||||||
config_file = self.model_path / "config.json"
|
config_file = self.model_path / "config.json"
|
||||||
@ -805,6 +839,7 @@ ModelProbe.register_probe("diffusers", ModelType.ControlNet, ControlNetFolderPro
|
|||||||
ModelProbe.register_probe("diffusers", ModelType.IPAdapter, IPAdapterFolderProbe)
|
ModelProbe.register_probe("diffusers", ModelType.IPAdapter, IPAdapterFolderProbe)
|
||||||
ModelProbe.register_probe("diffusers", ModelType.CLIPVision, CLIPVisionFolderProbe)
|
ModelProbe.register_probe("diffusers", ModelType.CLIPVision, CLIPVisionFolderProbe)
|
||||||
ModelProbe.register_probe("diffusers", ModelType.T2IAdapter, T2IAdapterFolderProbe)
|
ModelProbe.register_probe("diffusers", ModelType.T2IAdapter, T2IAdapterFolderProbe)
|
||||||
|
ModelProbe.register_probe("diffusers", ModelType.SpandrelImageToImage, SpandrelImageToImageFolderProbe)
|
||||||
|
|
||||||
ModelProbe.register_probe("checkpoint", ModelType.Main, PipelineCheckpointProbe)
|
ModelProbe.register_probe("checkpoint", ModelType.Main, PipelineCheckpointProbe)
|
||||||
ModelProbe.register_probe("checkpoint", ModelType.VAE, VaeCheckpointProbe)
|
ModelProbe.register_probe("checkpoint", ModelType.VAE, VaeCheckpointProbe)
|
||||||
@ -814,5 +849,6 @@ ModelProbe.register_probe("checkpoint", ModelType.ControlNet, ControlNetCheckpoi
|
|||||||
ModelProbe.register_probe("checkpoint", ModelType.IPAdapter, IPAdapterCheckpointProbe)
|
ModelProbe.register_probe("checkpoint", ModelType.IPAdapter, IPAdapterCheckpointProbe)
|
||||||
ModelProbe.register_probe("checkpoint", ModelType.CLIPVision, CLIPVisionCheckpointProbe)
|
ModelProbe.register_probe("checkpoint", ModelType.CLIPVision, CLIPVisionCheckpointProbe)
|
||||||
ModelProbe.register_probe("checkpoint", ModelType.T2IAdapter, T2IAdapterCheckpointProbe)
|
ModelProbe.register_probe("checkpoint", ModelType.T2IAdapter, T2IAdapterCheckpointProbe)
|
||||||
|
ModelProbe.register_probe("checkpoint", ModelType.SpandrelImageToImage, SpandrelImageToImageCheckpointProbe)
|
||||||
|
|
||||||
ModelProbe.register_probe("onnx", ModelType.ONNX, ONNXFolderProbe)
|
ModelProbe.register_probe("onnx", ModelType.ONNX, ONNXFolderProbe)
|
||||||
|
@ -399,6 +399,43 @@ STARTER_MODELS: list[StarterModel] = [
|
|||||||
type=ModelType.T2IAdapter,
|
type=ModelType.T2IAdapter,
|
||||||
),
|
),
|
||||||
# endregion
|
# endregion
|
||||||
|
# region SpandrelImageToImage
|
||||||
|
StarterModel(
|
||||||
|
name="RealESRGAN_x4plus_anime_6B",
|
||||||
|
base=BaseModelType.Any,
|
||||||
|
source="https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.2.4/RealESRGAN_x4plus_anime_6B.pth",
|
||||||
|
description="A Real-ESRGAN 4x upscaling model (optimized for anime images).",
|
||||||
|
type=ModelType.SpandrelImageToImage,
|
||||||
|
),
|
||||||
|
StarterModel(
|
||||||
|
name="RealESRGAN_x4plus",
|
||||||
|
base=BaseModelType.Any,
|
||||||
|
source="https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.0/RealESRGAN_x4plus.pth",
|
||||||
|
description="A Real-ESRGAN 4x upscaling model (general-purpose).",
|
||||||
|
type=ModelType.SpandrelImageToImage,
|
||||||
|
),
|
||||||
|
StarterModel(
|
||||||
|
name="ESRGAN_SRx4_DF2KOST_official",
|
||||||
|
base=BaseModelType.Any,
|
||||||
|
source="https://github.com/xinntao/Real-ESRGAN/releases/download/v0.1.1/ESRGAN_SRx4_DF2KOST_official-ff704c30.pth",
|
||||||
|
description="The official ESRGAN 4x upscaling model.",
|
||||||
|
type=ModelType.SpandrelImageToImage,
|
||||||
|
),
|
||||||
|
StarterModel(
|
||||||
|
name="RealESRGAN_x2plus",
|
||||||
|
base=BaseModelType.Any,
|
||||||
|
source="https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.1/RealESRGAN_x2plus.pth",
|
||||||
|
description="A Real-ESRGAN 2x upscaling model (general-purpose).",
|
||||||
|
type=ModelType.SpandrelImageToImage,
|
||||||
|
),
|
||||||
|
StarterModel(
|
||||||
|
name="SwinIR - realSR_BSRGAN_DFOWMFC_s64w8_SwinIR-L_x4_GAN",
|
||||||
|
base=BaseModelType.Any,
|
||||||
|
source="https://github.com/JingyunLiang/SwinIR/releases/download/v0.0/003_realSR_BSRGAN_DFOWMFC_s64w8_SwinIR-L_x4_GAN-with-dict-keys-params-and-params_ema.pth",
|
||||||
|
description="A SwinIR 4x upscaling model.",
|
||||||
|
type=ModelType.SpandrelImageToImage,
|
||||||
|
),
|
||||||
|
# endregion
|
||||||
]
|
]
|
||||||
|
|
||||||
assert len(STARTER_MODELS) == len({m.source for m in STARTER_MODELS}), "Duplicate starter models"
|
assert len(STARTER_MODELS) == len({m.source for m in STARTER_MODELS}), "Duplicate starter models"
|
||||||
|
@ -158,15 +158,12 @@ class ModelPatcher:
|
|||||||
# We intentionally move to the target device first, then cast. Experimentally, this was found to
|
# We intentionally move to the target device first, then cast. Experimentally, this was found to
|
||||||
# be significantly faster for 16-bit CPU tensors being moved to a CUDA device than doing the
|
# be significantly faster for 16-bit CPU tensors being moved to a CUDA device than doing the
|
||||||
# same thing in a single call to '.to(...)'.
|
# same thing in a single call to '.to(...)'.
|
||||||
layer.to(device=device, non_blocking=TorchDevice.get_non_blocking(device))
|
layer.to(device=device)
|
||||||
layer.to(dtype=torch.float32, non_blocking=TorchDevice.get_non_blocking(device))
|
layer.to(dtype=torch.float32)
|
||||||
# TODO(ryand): Using torch.autocast(...) over explicit casting may offer a speed benefit on CUDA
|
# TODO(ryand): Using torch.autocast(...) over explicit casting may offer a speed benefit on CUDA
|
||||||
# devices here. Experimentally, it was found to be very slow on CPU. More investigation needed.
|
# devices here. Experimentally, it was found to be very slow on CPU. More investigation needed.
|
||||||
layer_weight = layer.get_weight(module.weight) * (lora_weight * layer_scale)
|
layer_weight = layer.get_weight(module.weight) * (lora_weight * layer_scale)
|
||||||
layer.to(
|
layer.to(device=TorchDevice.CPU_DEVICE)
|
||||||
device=TorchDevice.CPU_DEVICE,
|
|
||||||
non_blocking=TorchDevice.get_non_blocking(TorchDevice.CPU_DEVICE),
|
|
||||||
)
|
|
||||||
|
|
||||||
assert isinstance(layer_weight, torch.Tensor) # mypy thinks layer_weight is a float|Any ??!
|
assert isinstance(layer_weight, torch.Tensor) # mypy thinks layer_weight is a float|Any ??!
|
||||||
if module.weight.shape != layer_weight.shape:
|
if module.weight.shape != layer_weight.shape:
|
||||||
@ -175,7 +172,7 @@ class ModelPatcher:
|
|||||||
layer_weight = layer_weight.reshape(module.weight.shape)
|
layer_weight = layer_weight.reshape(module.weight.shape)
|
||||||
|
|
||||||
assert isinstance(layer_weight, torch.Tensor) # mypy thinks layer_weight is a float|Any ??!
|
assert isinstance(layer_weight, torch.Tensor) # mypy thinks layer_weight is a float|Any ??!
|
||||||
module.weight += layer_weight.to(dtype=dtype, non_blocking=TorchDevice.get_non_blocking(device))
|
module.weight += layer_weight.to(dtype=dtype)
|
||||||
|
|
||||||
yield # wait for context manager exit
|
yield # wait for context manager exit
|
||||||
|
|
||||||
@ -183,9 +180,7 @@ class ModelPatcher:
|
|||||||
assert hasattr(model, "get_submodule") # mypy not picking up fact that torch.nn.Module has get_submodule()
|
assert hasattr(model, "get_submodule") # mypy not picking up fact that torch.nn.Module has get_submodule()
|
||||||
with torch.no_grad():
|
with torch.no_grad():
|
||||||
for module_key, weight in original_weights.items():
|
for module_key, weight in original_weights.items():
|
||||||
model.get_submodule(module_key).weight.copy_(
|
model.get_submodule(module_key).weight.copy_(weight)
|
||||||
weight, non_blocking=TorchDevice.get_non_blocking(weight.device)
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@contextmanager
|
@contextmanager
|
||||||
|
@ -190,12 +190,7 @@ class IAIOnnxRuntimeModel(RawModel):
|
|||||||
return self.session.run(None, inputs)
|
return self.session.run(None, inputs)
|
||||||
|
|
||||||
# compatability with RawModel ABC
|
# compatability with RawModel ABC
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
|
||||||
self,
|
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
) -> None:
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# compatability with diffusers load code
|
# compatability with diffusers load code
|
||||||
|
@ -1,15 +1,3 @@
|
|||||||
"""Base class for 'Raw' models.
|
|
||||||
|
|
||||||
The RawModel class is the base class of LoRAModelRaw and TextualInversionModelRaw,
|
|
||||||
and is used for type checking of calls to the model patcher. Its main purpose
|
|
||||||
is to avoid a circular import issues when lora.py tries to import BaseModelType
|
|
||||||
from invokeai.backend.model_manager.config, and the latter tries to import LoRAModelRaw
|
|
||||||
from lora.py.
|
|
||||||
|
|
||||||
The term 'raw' was introduced to describe a wrapper around a torch.nn.Module
|
|
||||||
that adds additional methods and attributes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
@ -17,13 +5,18 @@ import torch
|
|||||||
|
|
||||||
|
|
||||||
class RawModel(ABC):
|
class RawModel(ABC):
|
||||||
"""Abstract base class for 'Raw' model wrappers."""
|
"""Base class for 'Raw' models.
|
||||||
|
|
||||||
|
The RawModel class is the base class of LoRAModelRaw, TextualInversionModelRaw, etc.
|
||||||
|
and is used for type checking of calls to the model patcher. Its main purpose
|
||||||
|
is to avoid a circular import issues when lora.py tries to import BaseModelType
|
||||||
|
from invokeai.backend.model_manager.config, and the latter tries to import LoRAModelRaw
|
||||||
|
from lora.py.
|
||||||
|
|
||||||
|
The term 'raw' was introduced to describe a wrapper around a torch.nn.Module
|
||||||
|
that adds additional methods and attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
|
||||||
self,
|
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
) -> None:
|
|
||||||
pass
|
pass
|
||||||
|
139
invokeai/backend/spandrel_image_to_image_model.py
Normal file
139
invokeai/backend/spandrel_image_to_image_model.py
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import torch
|
||||||
|
from PIL import Image
|
||||||
|
from spandrel import ImageModelDescriptor, ModelLoader
|
||||||
|
|
||||||
|
from invokeai.backend.raw_model import RawModel
|
||||||
|
|
||||||
|
|
||||||
|
class SpandrelImageToImageModel(RawModel):
|
||||||
|
"""A wrapper for a Spandrel Image-to-Image model.
|
||||||
|
|
||||||
|
The main reason for having a wrapper class is to integrate with the type handling of RawModel.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, spandrel_model: ImageModelDescriptor[Any]):
|
||||||
|
self._spandrel_model = spandrel_model
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pil_to_tensor(image: Image.Image) -> torch.Tensor:
|
||||||
|
"""Convert PIL Image to the torch.Tensor format expected by SpandrelImageToImageModel.run().
|
||||||
|
|
||||||
|
Args:
|
||||||
|
image (Image.Image): A PIL Image with shape (H, W, C) and values in the range [0, 255].
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
torch.Tensor: A torch.Tensor with shape (N, C, H, W) and values in the range [0, 1].
|
||||||
|
"""
|
||||||
|
image_np = np.array(image)
|
||||||
|
# (H, W, C) -> (C, H, W)
|
||||||
|
image_np = np.transpose(image_np, (2, 0, 1))
|
||||||
|
image_np = image_np / 255
|
||||||
|
image_tensor = torch.from_numpy(image_np).float()
|
||||||
|
# (C, H, W) -> (N, C, H, W)
|
||||||
|
image_tensor = image_tensor.unsqueeze(0)
|
||||||
|
return image_tensor
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def tensor_to_pil(tensor: torch.Tensor) -> Image.Image:
|
||||||
|
"""Convert a torch.Tensor produced by SpandrelImageToImageModel.run() to a PIL Image.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tensor (torch.Tensor): A torch.Tensor with shape (N, C, H, W) and values in the range [0, 1].
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Image.Image: A PIL Image with shape (H, W, C) and values in the range [0, 255].
|
||||||
|
"""
|
||||||
|
# (N, C, H, W) -> (C, H, W)
|
||||||
|
tensor = tensor.squeeze(0)
|
||||||
|
# (C, H, W) -> (H, W, C)
|
||||||
|
tensor = tensor.permute(1, 2, 0)
|
||||||
|
tensor = tensor.clamp(0, 1)
|
||||||
|
tensor = (tensor * 255).cpu().detach().numpy().astype(np.uint8)
|
||||||
|
image = Image.fromarray(tensor)
|
||||||
|
return image
|
||||||
|
|
||||||
|
def run(self, image_tensor: torch.Tensor) -> torch.Tensor:
|
||||||
|
"""Run the image-to-image model.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
image_tensor (torch.Tensor): A torch.Tensor with shape (N, C, H, W) and values in the range [0, 1].
|
||||||
|
"""
|
||||||
|
return self._spandrel_model(image_tensor)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load_from_file(cls, file_path: str | Path):
|
||||||
|
model = ModelLoader().load_from_file(file_path)
|
||||||
|
if not isinstance(model, ImageModelDescriptor):
|
||||||
|
raise ValueError(
|
||||||
|
f"Loaded a spandrel model of type '{type(model)}'. Only image-to-image models are supported "
|
||||||
|
"('ImageModelDescriptor')."
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls(spandrel_model=model)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def load_from_state_dict(cls, state_dict: dict[str, torch.Tensor]):
|
||||||
|
model = ModelLoader().load_from_state_dict(state_dict)
|
||||||
|
if not isinstance(model, ImageModelDescriptor):
|
||||||
|
raise ValueError(
|
||||||
|
f"Loaded a spandrel model of type '{type(model)}'. Only image-to-image models are supported "
|
||||||
|
"('ImageModelDescriptor')."
|
||||||
|
)
|
||||||
|
|
||||||
|
return cls(spandrel_model=model)
|
||||||
|
|
||||||
|
def supports_dtype(self, dtype: torch.dtype) -> bool:
|
||||||
|
"""Check if the model supports the given dtype."""
|
||||||
|
if dtype == torch.float16:
|
||||||
|
return self._spandrel_model.supports_half
|
||||||
|
elif dtype == torch.bfloat16:
|
||||||
|
return self._spandrel_model.supports_bfloat16
|
||||||
|
elif dtype == torch.float32:
|
||||||
|
# All models support float32.
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unexpected dtype '{dtype}'.")
|
||||||
|
|
||||||
|
def get_model_type_name(self) -> str:
|
||||||
|
"""The model type name. Intended for logging / debugging purposes. Do not rely on this field remaining
|
||||||
|
consistent over time.
|
||||||
|
"""
|
||||||
|
return str(type(self._spandrel_model.model))
|
||||||
|
|
||||||
|
def to(
|
||||||
|
self,
|
||||||
|
device: Optional[torch.device] = None,
|
||||||
|
dtype: Optional[torch.dtype] = None,
|
||||||
|
non_blocking: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""Note: Some models have limited dtype support. Call supports_dtype(...) to check if the dtype is supported.
|
||||||
|
Note: The non_blocking parameter is currently ignored."""
|
||||||
|
# TODO(ryand): spandrel.ImageModelDescriptor.to(...) does not support non_blocking. We will have to access the
|
||||||
|
# model directly if we want to apply this optimization.
|
||||||
|
self._spandrel_model.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device(self) -> torch.device:
|
||||||
|
"""The device of the underlying model."""
|
||||||
|
return self._spandrel_model.device
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dtype(self) -> torch.dtype:
|
||||||
|
"""The dtype of the underlying model."""
|
||||||
|
return self._spandrel_model.dtype
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scale(self) -> int:
|
||||||
|
"""The scale of the model (e.g. 1x, 2x, 4x, etc.)."""
|
||||||
|
return self._spandrel_model.scale
|
||||||
|
|
||||||
|
def calc_size(self) -> int:
|
||||||
|
"""Get size of the model in memory in bytes."""
|
||||||
|
# HACK(ryand): Fix this issue with circular imports.
|
||||||
|
from invokeai.backend.model_manager.load.model_util import calc_module_size
|
||||||
|
|
||||||
|
return calc_module_size(self._spandrel_model.model)
|
@ -65,17 +65,12 @@ class TextualInversionModelRaw(RawModel):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def to(
|
def to(self, device: Optional[torch.device] = None, dtype: Optional[torch.dtype] = None) -> None:
|
||||||
self,
|
|
||||||
device: Optional[torch.device] = None,
|
|
||||||
dtype: Optional[torch.dtype] = None,
|
|
||||||
non_blocking: bool = False,
|
|
||||||
) -> None:
|
|
||||||
if not torch.cuda.is_available():
|
if not torch.cuda.is_available():
|
||||||
return
|
return
|
||||||
for emb in [self.embedding, self.embedding_2]:
|
for emb in [self.embedding, self.embedding_2]:
|
||||||
if emb is not None:
|
if emb is not None:
|
||||||
emb.to(device=device, dtype=dtype, non_blocking=non_blocking)
|
emb.to(device=device, dtype=dtype)
|
||||||
|
|
||||||
def calc_size(self) -> int:
|
def calc_size(self) -> int:
|
||||||
"""Get the size of this model in bytes."""
|
"""Get the size of this model in bytes."""
|
||||||
|
@ -112,15 +112,3 @@ class TorchDevice:
|
|||||||
@classmethod
|
@classmethod
|
||||||
def _to_dtype(cls, precision_name: TorchPrecisionNames) -> torch.dtype:
|
def _to_dtype(cls, precision_name: TorchPrecisionNames) -> torch.dtype:
|
||||||
return NAME_TO_PRECISION[precision_name]
|
return NAME_TO_PRECISION[precision_name]
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_non_blocking(to_device: torch.device) -> bool:
|
|
||||||
"""Return the non_blocking flag to be used when moving a tensor to a given device.
|
|
||||||
MPS may have unexpected errors with non-blocking operations - we should not use non-blocking when moving _to_ MPS.
|
|
||||||
When moving _from_ MPS, we can use non-blocking operations.
|
|
||||||
|
|
||||||
See:
|
|
||||||
- https://github.com/pytorch/pytorch/issues/107455
|
|
||||||
- https://discuss.pytorch.org/t/should-we-set-non-blocking-to-true/38234/28
|
|
||||||
"""
|
|
||||||
return False if to_device.type == "mps" else True
|
|
||||||
|
@ -962,8 +962,8 @@
|
|||||||
"selectedForAutoAdd": "Selezionato per l'aggiunta automatica",
|
"selectedForAutoAdd": "Selezionato per l'aggiunta automatica",
|
||||||
"addSharedBoard": "Aggiungi una Bacheca Condivisa",
|
"addSharedBoard": "Aggiungi una Bacheca Condivisa",
|
||||||
"boards": "Bacheche",
|
"boards": "Bacheche",
|
||||||
"private": "Privata",
|
"private": "Bacheche private",
|
||||||
"shared": "Condivisa",
|
"shared": "Bacheche condivise",
|
||||||
"addPrivateBoard": "Aggiungi una Bacheca Privata"
|
"addPrivateBoard": "Aggiungi una Bacheca Privata"
|
||||||
},
|
},
|
||||||
"controlnet": {
|
"controlnet": {
|
||||||
@ -1028,7 +1028,7 @@
|
|||||||
"minConfidence": "Confidenza minima",
|
"minConfidence": "Confidenza minima",
|
||||||
"scribble": "Scarabocchio",
|
"scribble": "Scarabocchio",
|
||||||
"amult": "Angolo di illuminazione",
|
"amult": "Angolo di illuminazione",
|
||||||
"coarse": "Approssimativo",
|
"coarse": "Grossolano",
|
||||||
"resizeSimple": "Ridimensiona (semplice)",
|
"resizeSimple": "Ridimensiona (semplice)",
|
||||||
"large": "Grande",
|
"large": "Grande",
|
||||||
"small": "Piccolo",
|
"small": "Piccolo",
|
||||||
@ -1353,7 +1353,7 @@
|
|||||||
"lora": {
|
"lora": {
|
||||||
"heading": "LoRA",
|
"heading": "LoRA",
|
||||||
"paragraphs": [
|
"paragraphs": [
|
||||||
"Modelli leggeri utilizzati insieme ai modelli base."
|
"Modelli concettuali utilizzati insieme ai modelli di base."
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"controlNet": {
|
"controlNet": {
|
||||||
|
@ -136,7 +136,12 @@ export const addImageDeletionListeners = (startAppListening: AppStartListening)
|
|||||||
if (data) {
|
if (data) {
|
||||||
const deletedImageIndex = data.items.findIndex((i) => i.image_name === imageDTO.image_name);
|
const deletedImageIndex = data.items.findIndex((i) => i.image_name === imageDTO.image_name);
|
||||||
const nextImage = data.items[deletedImageIndex + 1] ?? data.items[0] ?? null;
|
const nextImage = data.items[deletedImageIndex + 1] ?? data.items[0] ?? null;
|
||||||
dispatch(imageSelected(nextImage));
|
if (nextImage?.image_name === imageDTO.image_name) {
|
||||||
|
// If the next image is the same as the deleted one, it means it was the last image, reset selection
|
||||||
|
dispatch(imageSelected(null));
|
||||||
|
} else {
|
||||||
|
dispatch(imageSelected(nextImage));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -176,6 +181,8 @@ export const addImageDeletionListeners = (startAppListening: AppStartListening)
|
|||||||
const queryArgs = selectListImagesQueryArgs(state);
|
const queryArgs = selectListImagesQueryArgs(state);
|
||||||
const { data } = imagesApi.endpoints.listImages.select(queryArgs)(state);
|
const { data } = imagesApi.endpoints.listImages.select(queryArgs)(state);
|
||||||
if (data) {
|
if (data) {
|
||||||
|
// When we delete multiple images, we clear the selection. Then, the the next time we load images, we will
|
||||||
|
// select the first one. This is handled below in the listener for `imagesApi.endpoints.listImages.matchFulfilled`.
|
||||||
dispatch(imageSelected(null));
|
dispatch(imageSelected(null));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { Flex, Text } from '@invoke-ai/ui-library';
|
import { Box, Flex, Text } from '@invoke-ai/ui-library';
|
||||||
import { EMPTY_ARRAY } from 'app/store/constants';
|
import { EMPTY_ARRAY } from 'app/store/constants';
|
||||||
import { useAppSelector } from 'app/store/storeHooks';
|
import { useAppSelector } from 'app/store/storeHooks';
|
||||||
import { overlayScrollbarsParams } from 'common/components/OverlayScrollbars/constants';
|
import { overlayScrollbarsParams } from 'common/components/OverlayScrollbars/constants';
|
||||||
@ -40,9 +40,41 @@ const BoardsList = () => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Flex flexDir="column" gap={2} borderRadius="base" maxHeight="100%">
|
<Box position="relative" w="full" h="full">
|
||||||
<OverlayScrollbarsComponent defer style={overlayScrollbarsStyles} options={overlayScrollbarsParams.options}>
|
<Box position="absolute" top={0} right={0} bottom={0} left={0}>
|
||||||
{allowPrivateBoards && (
|
<OverlayScrollbarsComponent defer style={overlayScrollbarsStyles} options={overlayScrollbarsParams.options}>
|
||||||
|
{allowPrivateBoards && (
|
||||||
|
<Flex direction="column" gap={1}>
|
||||||
|
<Flex
|
||||||
|
position="sticky"
|
||||||
|
w="full"
|
||||||
|
justifyContent="space-between"
|
||||||
|
alignItems="center"
|
||||||
|
ps={2}
|
||||||
|
pb={1}
|
||||||
|
pt={2}
|
||||||
|
zIndex={1}
|
||||||
|
top={0}
|
||||||
|
bg="base.900"
|
||||||
|
>
|
||||||
|
<Text fontSize="md" fontWeight="semibold" userSelect="none">
|
||||||
|
{t('boards.private')}
|
||||||
|
</Text>
|
||||||
|
<AddBoardButton isPrivateBoard={true} />
|
||||||
|
</Flex>
|
||||||
|
<Flex direction="column" gap={1}>
|
||||||
|
<NoBoardBoard isSelected={selectedBoardId === 'none'} />
|
||||||
|
{filteredPrivateBoards.map((board) => (
|
||||||
|
<GalleryBoard
|
||||||
|
board={board}
|
||||||
|
isSelected={selectedBoardId === board.board_id}
|
||||||
|
setBoardToDelete={setBoardToDelete}
|
||||||
|
key={board.board_id}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</Flex>
|
||||||
|
</Flex>
|
||||||
|
)}
|
||||||
<Flex direction="column" gap={1}>
|
<Flex direction="column" gap={1}>
|
||||||
<Flex
|
<Flex
|
||||||
position="sticky"
|
position="sticky"
|
||||||
@ -50,19 +82,20 @@ const BoardsList = () => {
|
|||||||
justifyContent="space-between"
|
justifyContent="space-between"
|
||||||
alignItems="center"
|
alignItems="center"
|
||||||
ps={2}
|
ps={2}
|
||||||
py={1}
|
pb={1}
|
||||||
|
pt={2}
|
||||||
zIndex={1}
|
zIndex={1}
|
||||||
top={0}
|
top={0}
|
||||||
bg="base.900"
|
bg="base.900"
|
||||||
>
|
>
|
||||||
<Text fontSize="md" fontWeight="semibold" userSelect="none">
|
<Text fontSize="md" fontWeight="semibold" userSelect="none">
|
||||||
{t('boards.private')}
|
{allowPrivateBoards ? t('boards.shared') : t('boards.boards')}
|
||||||
</Text>
|
</Text>
|
||||||
<AddBoardButton isPrivateBoard={true} />
|
<AddBoardButton isPrivateBoard={false} />
|
||||||
</Flex>
|
</Flex>
|
||||||
<Flex direction="column" gap={1}>
|
<Flex direction="column" gap={1}>
|
||||||
<NoBoardBoard isSelected={selectedBoardId === 'none'} />
|
{!allowPrivateBoards && <NoBoardBoard isSelected={selectedBoardId === 'none'} />}
|
||||||
{filteredPrivateBoards.map((board) => (
|
{filteredSharedBoards.map((board) => (
|
||||||
<GalleryBoard
|
<GalleryBoard
|
||||||
board={board}
|
board={board}
|
||||||
isSelected={selectedBoardId === board.board_id}
|
isSelected={selectedBoardId === board.board_id}
|
||||||
@ -72,38 +105,9 @@ const BoardsList = () => {
|
|||||||
))}
|
))}
|
||||||
</Flex>
|
</Flex>
|
||||||
</Flex>
|
</Flex>
|
||||||
)}
|
</OverlayScrollbarsComponent>
|
||||||
<Flex direction="column" gap={1} pb={2}>
|
</Box>
|
||||||
<Flex
|
</Box>
|
||||||
position="sticky"
|
|
||||||
w="full"
|
|
||||||
justifyContent="space-between"
|
|
||||||
alignItems="center"
|
|
||||||
ps={2}
|
|
||||||
py={1}
|
|
||||||
zIndex={1}
|
|
||||||
top={0}
|
|
||||||
bg="base.900"
|
|
||||||
>
|
|
||||||
<Text fontSize="md" fontWeight="semibold" userSelect="none">
|
|
||||||
{allowPrivateBoards ? t('boards.shared') : t('boards.boards')}
|
|
||||||
</Text>
|
|
||||||
<AddBoardButton isPrivateBoard={false} />
|
|
||||||
</Flex>
|
|
||||||
<Flex direction="column" gap={1}>
|
|
||||||
{!allowPrivateBoards && <NoBoardBoard isSelected={selectedBoardId === 'none'} />}
|
|
||||||
{filteredSharedBoards.map((board) => (
|
|
||||||
<GalleryBoard
|
|
||||||
board={board}
|
|
||||||
isSelected={selectedBoardId === board.board_id}
|
|
||||||
setBoardToDelete={setBoardToDelete}
|
|
||||||
key={board.board_id}
|
|
||||||
/>
|
|
||||||
))}
|
|
||||||
</Flex>
|
|
||||||
</Flex>
|
|
||||||
</OverlayScrollbarsComponent>
|
|
||||||
</Flex>
|
|
||||||
<DeleteBoardModal boardToDelete={boardToDelete} setBoardToDelete={setBoardToDelete} />
|
<DeleteBoardModal boardToDelete={boardToDelete} setBoardToDelete={setBoardToDelete} />
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
|
@ -16,6 +16,7 @@ import { GalleryHeader } from 'features/gallery/components/GalleryHeader';
|
|||||||
import { galleryViewChanged } from 'features/gallery/store/gallerySlice';
|
import { galleryViewChanged } from 'features/gallery/store/gallerySlice';
|
||||||
import ResizeHandle from 'features/ui/components/tabs/ResizeHandle';
|
import ResizeHandle from 'features/ui/components/tabs/ResizeHandle';
|
||||||
import { usePanel, type UsePanelOptions } from 'features/ui/hooks/usePanel';
|
import { usePanel, type UsePanelOptions } from 'features/ui/hooks/usePanel';
|
||||||
|
import type { CSSProperties } from 'react';
|
||||||
import { memo, useCallback, useMemo, useRef } from 'react';
|
import { memo, useCallback, useMemo, useRef } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { PiMagnifyingGlassBold } from 'react-icons/pi';
|
import { PiMagnifyingGlassBold } from 'react-icons/pi';
|
||||||
@ -29,13 +30,15 @@ import GalleryImageGrid from './ImageGrid/GalleryImageGrid';
|
|||||||
import { GalleryPagination } from './ImageGrid/GalleryPagination';
|
import { GalleryPagination } from './ImageGrid/GalleryPagination';
|
||||||
import { GallerySearch } from './ImageGrid/GallerySearch';
|
import { GallerySearch } from './ImageGrid/GallerySearch';
|
||||||
|
|
||||||
const baseStyles: ChakraProps['sx'] = {
|
const COLLAPSE_STYLES: CSSProperties = { flexShrink: 0, minHeight: 0 };
|
||||||
|
|
||||||
|
const BASE_STYLES: ChakraProps['sx'] = {
|
||||||
fontWeight: 'semibold',
|
fontWeight: 'semibold',
|
||||||
fontSize: 'sm',
|
fontSize: 'sm',
|
||||||
color: 'base.300',
|
color: 'base.300',
|
||||||
};
|
};
|
||||||
|
|
||||||
const selectedStyles: ChakraProps['sx'] = {
|
const SELECTED_STYLES: ChakraProps['sx'] = {
|
||||||
borderColor: 'base.800',
|
borderColor: 'base.800',
|
||||||
borderBottomColor: 'base.900',
|
borderBottomColor: 'base.900',
|
||||||
color: 'invokeBlue.300',
|
color: 'invokeBlue.300',
|
||||||
@ -110,11 +113,13 @@ const ImageGalleryContent = () => {
|
|||||||
onExpand={boardsListPanel.onExpand}
|
onExpand={boardsListPanel.onExpand}
|
||||||
collapsible
|
collapsible
|
||||||
>
|
>
|
||||||
<Collapse in={boardSearchDisclosure.isOpen}>
|
<Flex flexDir="column" w="full" h="full">
|
||||||
<BoardsSearch />
|
<Collapse in={boardSearchDisclosure.isOpen} style={COLLAPSE_STYLES}>
|
||||||
</Collapse>
|
<BoardsSearch />
|
||||||
<Divider pt={2} />
|
</Collapse>
|
||||||
<BoardsList />
|
<Divider pt={2} />
|
||||||
|
<BoardsList />
|
||||||
|
</Flex>
|
||||||
</Panel>
|
</Panel>
|
||||||
<ResizeHandle
|
<ResizeHandle
|
||||||
id="gallery-panel-handle"
|
id="gallery-panel-handle"
|
||||||
@ -125,10 +130,10 @@ const ImageGalleryContent = () => {
|
|||||||
<Flex flexDirection="column" alignItems="center" justifyContent="space-between" h="full" w="full">
|
<Flex flexDirection="column" alignItems="center" justifyContent="space-between" h="full" w="full">
|
||||||
<Tabs index={galleryView === 'images' ? 0 : 1} variant="enclosed" display="flex" flexDir="column" w="full">
|
<Tabs index={galleryView === 'images' ? 0 : 1} variant="enclosed" display="flex" flexDir="column" w="full">
|
||||||
<TabList gap={2} fontSize="sm" borderColor="base.800">
|
<TabList gap={2} fontSize="sm" borderColor="base.800">
|
||||||
<Tab sx={baseStyles} _selected={selectedStyles} onClick={handleClickImages} data-testid="images-tab">
|
<Tab sx={BASE_STYLES} _selected={SELECTED_STYLES} onClick={handleClickImages} data-testid="images-tab">
|
||||||
{t('parameters.images')}
|
{t('parameters.images')}
|
||||||
</Tab>
|
</Tab>
|
||||||
<Tab sx={baseStyles} _selected={selectedStyles} onClick={handleClickAssets} data-testid="assets-tab">
|
<Tab sx={BASE_STYLES} _selected={SELECTED_STYLES} onClick={handleClickAssets} data-testid="assets-tab">
|
||||||
{t('gallery.assets')}
|
{t('gallery.assets')}
|
||||||
</Tab>
|
</Tab>
|
||||||
<Spacer />
|
<Spacer />
|
||||||
@ -157,7 +162,7 @@ const ImageGalleryContent = () => {
|
|||||||
</TabList>
|
</TabList>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
<Box w="full">
|
<Box w="full">
|
||||||
<Collapse in={searchDisclosure.isOpen}>
|
<Collapse in={searchDisclosure.isOpen} style={COLLAPSE_STYLES}>
|
||||||
<Box w="full" pt={2}>
|
<Box w="full" pt={2}>
|
||||||
<GallerySearch />
|
<GallerySearch />
|
||||||
</Box>
|
</Box>
|
||||||
|
@ -11,6 +11,7 @@ import {
|
|||||||
useLoRAModels,
|
useLoRAModels,
|
||||||
useMainModels,
|
useMainModels,
|
||||||
useRefinerModels,
|
useRefinerModels,
|
||||||
|
useSpandrelImageToImageModels,
|
||||||
useT2IAdapterModels,
|
useT2IAdapterModels,
|
||||||
useVAEModels,
|
useVAEModels,
|
||||||
} from 'services/api/hooks/modelsByType';
|
} from 'services/api/hooks/modelsByType';
|
||||||
@ -71,6 +72,13 @@ const ModelList = () => {
|
|||||||
[vaeModels, searchTerm, filteredModelType]
|
[vaeModels, searchTerm, filteredModelType]
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const [spandrelImageToImageModels, { isLoading: isLoadingSpandrelImageToImageModels }] =
|
||||||
|
useSpandrelImageToImageModels();
|
||||||
|
const filteredSpandrelImageToImageModels = useMemo(
|
||||||
|
() => modelsFilter(spandrelImageToImageModels, searchTerm, filteredModelType),
|
||||||
|
[spandrelImageToImageModels, searchTerm, filteredModelType]
|
||||||
|
);
|
||||||
|
|
||||||
const totalFilteredModels = useMemo(() => {
|
const totalFilteredModels = useMemo(() => {
|
||||||
return (
|
return (
|
||||||
filteredMainModels.length +
|
filteredMainModels.length +
|
||||||
@ -80,7 +88,8 @@ const ModelList = () => {
|
|||||||
filteredControlNetModels.length +
|
filteredControlNetModels.length +
|
||||||
filteredT2IAdapterModels.length +
|
filteredT2IAdapterModels.length +
|
||||||
filteredIPAdapterModels.length +
|
filteredIPAdapterModels.length +
|
||||||
filteredVAEModels.length
|
filteredVAEModels.length +
|
||||||
|
filteredSpandrelImageToImageModels.length
|
||||||
);
|
);
|
||||||
}, [
|
}, [
|
||||||
filteredControlNetModels.length,
|
filteredControlNetModels.length,
|
||||||
@ -91,6 +100,7 @@ const ModelList = () => {
|
|||||||
filteredRefinerModels.length,
|
filteredRefinerModels.length,
|
||||||
filteredT2IAdapterModels.length,
|
filteredT2IAdapterModels.length,
|
||||||
filteredVAEModels.length,
|
filteredVAEModels.length,
|
||||||
|
filteredSpandrelImageToImageModels.length,
|
||||||
]);
|
]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@ -143,6 +153,17 @@ const ModelList = () => {
|
|||||||
{!isLoadingT2IAdapterModels && filteredT2IAdapterModels.length > 0 && (
|
{!isLoadingT2IAdapterModels && filteredT2IAdapterModels.length > 0 && (
|
||||||
<ModelListWrapper title={t('common.t2iAdapter')} modelList={filteredT2IAdapterModels} key="t2i-adapters" />
|
<ModelListWrapper title={t('common.t2iAdapter')} modelList={filteredT2IAdapterModels} key="t2i-adapters" />
|
||||||
)}
|
)}
|
||||||
|
{/* Spandrel Image to Image List */}
|
||||||
|
{isLoadingSpandrelImageToImageModels && (
|
||||||
|
<FetchingModelsLoader loadingMessage="Loading Image-to-Image Models..." />
|
||||||
|
)}
|
||||||
|
{!isLoadingSpandrelImageToImageModels && filteredSpandrelImageToImageModels.length > 0 && (
|
||||||
|
<ModelListWrapper
|
||||||
|
title="Image-to-Image"
|
||||||
|
modelList={filteredSpandrelImageToImageModels}
|
||||||
|
key="spandrel-image-to-image"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
{totalFilteredModels === 0 && (
|
{totalFilteredModels === 0 && (
|
||||||
<Flex w="full" h="full" alignItems="center" justifyContent="center">
|
<Flex w="full" h="full" alignItems="center" justifyContent="center">
|
||||||
<Text>{t('modelManager.noMatchingModels')}</Text>
|
<Text>{t('modelManager.noMatchingModels')}</Text>
|
||||||
|
@ -21,6 +21,7 @@ export const ModelTypeFilter = () => {
|
|||||||
t2i_adapter: t('common.t2iAdapter'),
|
t2i_adapter: t('common.t2iAdapter'),
|
||||||
ip_adapter: t('common.ipAdapter'),
|
ip_adapter: t('common.ipAdapter'),
|
||||||
clip_vision: 'Clip Vision',
|
clip_vision: 'Clip Vision',
|
||||||
|
spandrel_image_to_image: 'Image-to-Image',
|
||||||
}),
|
}),
|
||||||
[t]
|
[t]
|
||||||
);
|
);
|
||||||
|
@ -32,6 +32,8 @@ import {
|
|||||||
isSDXLMainModelFieldInputTemplate,
|
isSDXLMainModelFieldInputTemplate,
|
||||||
isSDXLRefinerModelFieldInputInstance,
|
isSDXLRefinerModelFieldInputInstance,
|
||||||
isSDXLRefinerModelFieldInputTemplate,
|
isSDXLRefinerModelFieldInputTemplate,
|
||||||
|
isSpandrelImageToImageModelFieldInputInstance,
|
||||||
|
isSpandrelImageToImageModelFieldInputTemplate,
|
||||||
isStringFieldInputInstance,
|
isStringFieldInputInstance,
|
||||||
isStringFieldInputTemplate,
|
isStringFieldInputTemplate,
|
||||||
isT2IAdapterModelFieldInputInstance,
|
isT2IAdapterModelFieldInputInstance,
|
||||||
@ -54,6 +56,7 @@ import NumberFieldInputComponent from './inputs/NumberFieldInputComponent';
|
|||||||
import RefinerModelFieldInputComponent from './inputs/RefinerModelFieldInputComponent';
|
import RefinerModelFieldInputComponent from './inputs/RefinerModelFieldInputComponent';
|
||||||
import SchedulerFieldInputComponent from './inputs/SchedulerFieldInputComponent';
|
import SchedulerFieldInputComponent from './inputs/SchedulerFieldInputComponent';
|
||||||
import SDXLMainModelFieldInputComponent from './inputs/SDXLMainModelFieldInputComponent';
|
import SDXLMainModelFieldInputComponent from './inputs/SDXLMainModelFieldInputComponent';
|
||||||
|
import SpandrelImageToImageModelFieldInputComponent from './inputs/SpandrelImageToImageModelFieldInputComponent';
|
||||||
import StringFieldInputComponent from './inputs/StringFieldInputComponent';
|
import StringFieldInputComponent from './inputs/StringFieldInputComponent';
|
||||||
import T2IAdapterModelFieldInputComponent from './inputs/T2IAdapterModelFieldInputComponent';
|
import T2IAdapterModelFieldInputComponent from './inputs/T2IAdapterModelFieldInputComponent';
|
||||||
import VAEModelFieldInputComponent from './inputs/VAEModelFieldInputComponent';
|
import VAEModelFieldInputComponent from './inputs/VAEModelFieldInputComponent';
|
||||||
@ -125,6 +128,20 @@ const InputFieldRenderer = ({ nodeId, fieldName }: InputFieldProps) => {
|
|||||||
if (isT2IAdapterModelFieldInputInstance(fieldInstance) && isT2IAdapterModelFieldInputTemplate(fieldTemplate)) {
|
if (isT2IAdapterModelFieldInputInstance(fieldInstance) && isT2IAdapterModelFieldInputTemplate(fieldTemplate)) {
|
||||||
return <T2IAdapterModelFieldInputComponent nodeId={nodeId} field={fieldInstance} fieldTemplate={fieldTemplate} />;
|
return <T2IAdapterModelFieldInputComponent nodeId={nodeId} field={fieldInstance} fieldTemplate={fieldTemplate} />;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
isSpandrelImageToImageModelFieldInputInstance(fieldInstance) &&
|
||||||
|
isSpandrelImageToImageModelFieldInputTemplate(fieldTemplate)
|
||||||
|
) {
|
||||||
|
return (
|
||||||
|
<SpandrelImageToImageModelFieldInputComponent
|
||||||
|
nodeId={nodeId}
|
||||||
|
field={fieldInstance}
|
||||||
|
fieldTemplate={fieldTemplate}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (isColorFieldInputInstance(fieldInstance) && isColorFieldInputTemplate(fieldTemplate)) {
|
if (isColorFieldInputInstance(fieldInstance) && isColorFieldInputTemplate(fieldTemplate)) {
|
||||||
return <ColorFieldInputComponent nodeId={nodeId} field={fieldInstance} fieldTemplate={fieldTemplate} />;
|
return <ColorFieldInputComponent nodeId={nodeId} field={fieldInstance} fieldTemplate={fieldTemplate} />;
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,55 @@
|
|||||||
|
import { Combobox, FormControl, Tooltip } from '@invoke-ai/ui-library';
|
||||||
|
import { useAppDispatch } from 'app/store/storeHooks';
|
||||||
|
import { useGroupedModelCombobox } from 'common/hooks/useGroupedModelCombobox';
|
||||||
|
import { fieldSpandrelImageToImageModelValueChanged } from 'features/nodes/store/nodesSlice';
|
||||||
|
import type {
|
||||||
|
SpandrelImageToImageModelFieldInputInstance,
|
||||||
|
SpandrelImageToImageModelFieldInputTemplate,
|
||||||
|
} from 'features/nodes/types/field';
|
||||||
|
import { memo, useCallback } from 'react';
|
||||||
|
import { useSpandrelImageToImageModels } from 'services/api/hooks/modelsByType';
|
||||||
|
import type { SpandrelImageToImageModelConfig } from 'services/api/types';
|
||||||
|
|
||||||
|
import type { FieldComponentProps } from './types';
|
||||||
|
|
||||||
|
const SpandrelImageToImageModelFieldInputComponent = (
|
||||||
|
props: FieldComponentProps<SpandrelImageToImageModelFieldInputInstance, SpandrelImageToImageModelFieldInputTemplate>
|
||||||
|
) => {
|
||||||
|
const { nodeId, field } = props;
|
||||||
|
const dispatch = useAppDispatch();
|
||||||
|
|
||||||
|
const [modelConfigs, { isLoading }] = useSpandrelImageToImageModels();
|
||||||
|
|
||||||
|
const _onChange = useCallback(
|
||||||
|
(value: SpandrelImageToImageModelConfig | null) => {
|
||||||
|
if (!value) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
dispatch(
|
||||||
|
fieldSpandrelImageToImageModelValueChanged({
|
||||||
|
nodeId,
|
||||||
|
fieldName: field.name,
|
||||||
|
value,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
},
|
||||||
|
[dispatch, field.name, nodeId]
|
||||||
|
);
|
||||||
|
|
||||||
|
const { options, value, onChange } = useGroupedModelCombobox({
|
||||||
|
modelConfigs,
|
||||||
|
onChange: _onChange,
|
||||||
|
selectedModel: field.value,
|
||||||
|
isLoading,
|
||||||
|
});
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Tooltip label={value?.description}>
|
||||||
|
<FormControl className="nowheel nodrag" isInvalid={!value}>
|
||||||
|
<Combobox value={value} placeholder="Pick one" options={options} onChange={onChange} />
|
||||||
|
</FormControl>
|
||||||
|
</Tooltip>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(SpandrelImageToImageModelFieldInputComponent);
|
@ -19,6 +19,7 @@ import type {
|
|||||||
ModelIdentifierFieldValue,
|
ModelIdentifierFieldValue,
|
||||||
SchedulerFieldValue,
|
SchedulerFieldValue,
|
||||||
SDXLRefinerModelFieldValue,
|
SDXLRefinerModelFieldValue,
|
||||||
|
SpandrelImageToImageModelFieldValue,
|
||||||
StatefulFieldValue,
|
StatefulFieldValue,
|
||||||
StringFieldValue,
|
StringFieldValue,
|
||||||
T2IAdapterModelFieldValue,
|
T2IAdapterModelFieldValue,
|
||||||
@ -39,6 +40,7 @@ import {
|
|||||||
zModelIdentifierFieldValue,
|
zModelIdentifierFieldValue,
|
||||||
zSchedulerFieldValue,
|
zSchedulerFieldValue,
|
||||||
zSDXLRefinerModelFieldValue,
|
zSDXLRefinerModelFieldValue,
|
||||||
|
zSpandrelImageToImageModelFieldValue,
|
||||||
zStatefulFieldValue,
|
zStatefulFieldValue,
|
||||||
zStringFieldValue,
|
zStringFieldValue,
|
||||||
zT2IAdapterModelFieldValue,
|
zT2IAdapterModelFieldValue,
|
||||||
@ -333,6 +335,12 @@ export const nodesSlice = createSlice({
|
|||||||
fieldT2IAdapterModelValueChanged: (state, action: FieldValueAction<T2IAdapterModelFieldValue>) => {
|
fieldT2IAdapterModelValueChanged: (state, action: FieldValueAction<T2IAdapterModelFieldValue>) => {
|
||||||
fieldValueReducer(state, action, zT2IAdapterModelFieldValue);
|
fieldValueReducer(state, action, zT2IAdapterModelFieldValue);
|
||||||
},
|
},
|
||||||
|
fieldSpandrelImageToImageModelValueChanged: (
|
||||||
|
state,
|
||||||
|
action: FieldValueAction<SpandrelImageToImageModelFieldValue>
|
||||||
|
) => {
|
||||||
|
fieldValueReducer(state, action, zSpandrelImageToImageModelFieldValue);
|
||||||
|
},
|
||||||
fieldEnumModelValueChanged: (state, action: FieldValueAction<EnumFieldValue>) => {
|
fieldEnumModelValueChanged: (state, action: FieldValueAction<EnumFieldValue>) => {
|
||||||
fieldValueReducer(state, action, zEnumFieldValue);
|
fieldValueReducer(state, action, zEnumFieldValue);
|
||||||
},
|
},
|
||||||
@ -384,6 +392,7 @@ export const {
|
|||||||
fieldImageValueChanged,
|
fieldImageValueChanged,
|
||||||
fieldIPAdapterModelValueChanged,
|
fieldIPAdapterModelValueChanged,
|
||||||
fieldT2IAdapterModelValueChanged,
|
fieldT2IAdapterModelValueChanged,
|
||||||
|
fieldSpandrelImageToImageModelValueChanged,
|
||||||
fieldLabelChanged,
|
fieldLabelChanged,
|
||||||
fieldLoRAModelValueChanged,
|
fieldLoRAModelValueChanged,
|
||||||
fieldModelIdentifierValueChanged,
|
fieldModelIdentifierValueChanged,
|
||||||
|
@ -66,6 +66,7 @@ const zModelType = z.enum([
|
|||||||
'embedding',
|
'embedding',
|
||||||
'onnx',
|
'onnx',
|
||||||
'clip_vision',
|
'clip_vision',
|
||||||
|
'spandrel_image_to_image',
|
||||||
]);
|
]);
|
||||||
const zSubModelType = z.enum([
|
const zSubModelType = z.enum([
|
||||||
'unet',
|
'unet',
|
||||||
|
@ -38,6 +38,7 @@ export const MODEL_TYPES = [
|
|||||||
'VAEField',
|
'VAEField',
|
||||||
'CLIPField',
|
'CLIPField',
|
||||||
'T2IAdapterModelField',
|
'T2IAdapterModelField',
|
||||||
|
'SpandrelImageToImageModelField',
|
||||||
];
|
];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -62,6 +63,7 @@ export const FIELD_COLORS: { [key: string]: string } = {
|
|||||||
MainModelField: 'teal.500',
|
MainModelField: 'teal.500',
|
||||||
SDXLMainModelField: 'teal.500',
|
SDXLMainModelField: 'teal.500',
|
||||||
SDXLRefinerModelField: 'teal.500',
|
SDXLRefinerModelField: 'teal.500',
|
||||||
|
SpandrelImageToImageModelField: 'teal.500',
|
||||||
StringField: 'yellow.500',
|
StringField: 'yellow.500',
|
||||||
T2IAdapterField: 'teal.500',
|
T2IAdapterField: 'teal.500',
|
||||||
T2IAdapterModelField: 'teal.500',
|
T2IAdapterModelField: 'teal.500',
|
||||||
|
@ -139,6 +139,10 @@ const zT2IAdapterModelFieldType = zFieldTypeBase.extend({
|
|||||||
name: z.literal('T2IAdapterModelField'),
|
name: z.literal('T2IAdapterModelField'),
|
||||||
originalType: zStatelessFieldType.optional(),
|
originalType: zStatelessFieldType.optional(),
|
||||||
});
|
});
|
||||||
|
const zSpandrelImageToImageModelFieldType = zFieldTypeBase.extend({
|
||||||
|
name: z.literal('SpandrelImageToImageModelField'),
|
||||||
|
originalType: zStatelessFieldType.optional(),
|
||||||
|
});
|
||||||
const zSchedulerFieldType = zFieldTypeBase.extend({
|
const zSchedulerFieldType = zFieldTypeBase.extend({
|
||||||
name: z.literal('SchedulerField'),
|
name: z.literal('SchedulerField'),
|
||||||
originalType: zStatelessFieldType.optional(),
|
originalType: zStatelessFieldType.optional(),
|
||||||
@ -160,6 +164,7 @@ const zStatefulFieldType = z.union([
|
|||||||
zControlNetModelFieldType,
|
zControlNetModelFieldType,
|
||||||
zIPAdapterModelFieldType,
|
zIPAdapterModelFieldType,
|
||||||
zT2IAdapterModelFieldType,
|
zT2IAdapterModelFieldType,
|
||||||
|
zSpandrelImageToImageModelFieldType,
|
||||||
zColorFieldType,
|
zColorFieldType,
|
||||||
zSchedulerFieldType,
|
zSchedulerFieldType,
|
||||||
]);
|
]);
|
||||||
@ -581,6 +586,33 @@ export const isT2IAdapterModelFieldInputTemplate = (val: unknown): val is T2IAda
|
|||||||
zT2IAdapterModelFieldInputTemplate.safeParse(val).success;
|
zT2IAdapterModelFieldInputTemplate.safeParse(val).success;
|
||||||
// #endregion
|
// #endregion
|
||||||
|
|
||||||
|
// #region SpandrelModelToModelField
|
||||||
|
|
||||||
|
export const zSpandrelImageToImageModelFieldValue = zModelIdentifierField.optional();
|
||||||
|
const zSpandrelImageToImageModelFieldInputInstance = zFieldInputInstanceBase.extend({
|
||||||
|
value: zSpandrelImageToImageModelFieldValue,
|
||||||
|
});
|
||||||
|
const zSpandrelImageToImageModelFieldInputTemplate = zFieldInputTemplateBase.extend({
|
||||||
|
type: zSpandrelImageToImageModelFieldType,
|
||||||
|
originalType: zFieldType.optional(),
|
||||||
|
default: zSpandrelImageToImageModelFieldValue,
|
||||||
|
});
|
||||||
|
const zSpandrelImageToImageModelFieldOutputTemplate = zFieldOutputTemplateBase.extend({
|
||||||
|
type: zSpandrelImageToImageModelFieldType,
|
||||||
|
});
|
||||||
|
export type SpandrelImageToImageModelFieldValue = z.infer<typeof zSpandrelImageToImageModelFieldValue>;
|
||||||
|
export type SpandrelImageToImageModelFieldInputInstance = z.infer<typeof zSpandrelImageToImageModelFieldInputInstance>;
|
||||||
|
export type SpandrelImageToImageModelFieldInputTemplate = z.infer<typeof zSpandrelImageToImageModelFieldInputTemplate>;
|
||||||
|
export const isSpandrelImageToImageModelFieldInputInstance = (
|
||||||
|
val: unknown
|
||||||
|
): val is SpandrelImageToImageModelFieldInputInstance =>
|
||||||
|
zSpandrelImageToImageModelFieldInputInstance.safeParse(val).success;
|
||||||
|
export const isSpandrelImageToImageModelFieldInputTemplate = (
|
||||||
|
val: unknown
|
||||||
|
): val is SpandrelImageToImageModelFieldInputTemplate =>
|
||||||
|
zSpandrelImageToImageModelFieldInputTemplate.safeParse(val).success;
|
||||||
|
// #endregion
|
||||||
|
|
||||||
// #region SchedulerField
|
// #region SchedulerField
|
||||||
|
|
||||||
export const zSchedulerFieldValue = zSchedulerField.optional();
|
export const zSchedulerFieldValue = zSchedulerField.optional();
|
||||||
@ -667,6 +699,7 @@ export const zStatefulFieldValue = z.union([
|
|||||||
zControlNetModelFieldValue,
|
zControlNetModelFieldValue,
|
||||||
zIPAdapterModelFieldValue,
|
zIPAdapterModelFieldValue,
|
||||||
zT2IAdapterModelFieldValue,
|
zT2IAdapterModelFieldValue,
|
||||||
|
zSpandrelImageToImageModelFieldValue,
|
||||||
zColorFieldValue,
|
zColorFieldValue,
|
||||||
zSchedulerFieldValue,
|
zSchedulerFieldValue,
|
||||||
]);
|
]);
|
||||||
@ -694,6 +727,7 @@ const zStatefulFieldInputInstance = z.union([
|
|||||||
zControlNetModelFieldInputInstance,
|
zControlNetModelFieldInputInstance,
|
||||||
zIPAdapterModelFieldInputInstance,
|
zIPAdapterModelFieldInputInstance,
|
||||||
zT2IAdapterModelFieldInputInstance,
|
zT2IAdapterModelFieldInputInstance,
|
||||||
|
zSpandrelImageToImageModelFieldInputInstance,
|
||||||
zColorFieldInputInstance,
|
zColorFieldInputInstance,
|
||||||
zSchedulerFieldInputInstance,
|
zSchedulerFieldInputInstance,
|
||||||
]);
|
]);
|
||||||
@ -722,6 +756,7 @@ const zStatefulFieldInputTemplate = z.union([
|
|||||||
zControlNetModelFieldInputTemplate,
|
zControlNetModelFieldInputTemplate,
|
||||||
zIPAdapterModelFieldInputTemplate,
|
zIPAdapterModelFieldInputTemplate,
|
||||||
zT2IAdapterModelFieldInputTemplate,
|
zT2IAdapterModelFieldInputTemplate,
|
||||||
|
zSpandrelImageToImageModelFieldInputTemplate,
|
||||||
zColorFieldInputTemplate,
|
zColorFieldInputTemplate,
|
||||||
zSchedulerFieldInputTemplate,
|
zSchedulerFieldInputTemplate,
|
||||||
zStatelessFieldInputTemplate,
|
zStatelessFieldInputTemplate,
|
||||||
@ -751,6 +786,7 @@ const zStatefulFieldOutputTemplate = z.union([
|
|||||||
zControlNetModelFieldOutputTemplate,
|
zControlNetModelFieldOutputTemplate,
|
||||||
zIPAdapterModelFieldOutputTemplate,
|
zIPAdapterModelFieldOutputTemplate,
|
||||||
zT2IAdapterModelFieldOutputTemplate,
|
zT2IAdapterModelFieldOutputTemplate,
|
||||||
|
zSpandrelImageToImageModelFieldOutputTemplate,
|
||||||
zColorFieldOutputTemplate,
|
zColorFieldOutputTemplate,
|
||||||
zSchedulerFieldOutputTemplate,
|
zSchedulerFieldOutputTemplate,
|
||||||
]);
|
]);
|
||||||
|
@ -18,6 +18,7 @@ const FIELD_VALUE_FALLBACK_MAP: Record<StatefulFieldType['name'], FieldValue> =
|
|||||||
SDXLRefinerModelField: undefined,
|
SDXLRefinerModelField: undefined,
|
||||||
StringField: '',
|
StringField: '',
|
||||||
T2IAdapterModelField: undefined,
|
T2IAdapterModelField: undefined,
|
||||||
|
SpandrelImageToImageModelField: undefined,
|
||||||
VAEModelField: undefined,
|
VAEModelField: undefined,
|
||||||
ControlNetModelField: undefined,
|
ControlNetModelField: undefined,
|
||||||
};
|
};
|
||||||
|
@ -17,6 +17,7 @@ import type {
|
|||||||
SchedulerFieldInputTemplate,
|
SchedulerFieldInputTemplate,
|
||||||
SDXLMainModelFieldInputTemplate,
|
SDXLMainModelFieldInputTemplate,
|
||||||
SDXLRefinerModelFieldInputTemplate,
|
SDXLRefinerModelFieldInputTemplate,
|
||||||
|
SpandrelImageToImageModelFieldInputTemplate,
|
||||||
StatefulFieldType,
|
StatefulFieldType,
|
||||||
StatelessFieldInputTemplate,
|
StatelessFieldInputTemplate,
|
||||||
StringFieldInputTemplate,
|
StringFieldInputTemplate,
|
||||||
@ -263,6 +264,17 @@ const buildT2IAdapterModelFieldInputTemplate: FieldInputTemplateBuilder<T2IAdapt
|
|||||||
return template;
|
return template;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const buildSpandrelImageToImageModelFieldInputTemplate: FieldInputTemplateBuilder<
|
||||||
|
SpandrelImageToImageModelFieldInputTemplate
|
||||||
|
> = ({ schemaObject, baseField, fieldType }) => {
|
||||||
|
const template: SpandrelImageToImageModelFieldInputTemplate = {
|
||||||
|
...baseField,
|
||||||
|
type: fieldType,
|
||||||
|
default: schemaObject.default ?? undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
return template;
|
||||||
|
};
|
||||||
const buildBoardFieldInputTemplate: FieldInputTemplateBuilder<BoardFieldInputTemplate> = ({
|
const buildBoardFieldInputTemplate: FieldInputTemplateBuilder<BoardFieldInputTemplate> = ({
|
||||||
schemaObject,
|
schemaObject,
|
||||||
baseField,
|
baseField,
|
||||||
@ -377,6 +389,7 @@ export const TEMPLATE_BUILDER_MAP: Record<StatefulFieldType['name'], FieldInputT
|
|||||||
SDXLRefinerModelField: buildRefinerModelFieldInputTemplate,
|
SDXLRefinerModelField: buildRefinerModelFieldInputTemplate,
|
||||||
StringField: buildStringFieldInputTemplate,
|
StringField: buildStringFieldInputTemplate,
|
||||||
T2IAdapterModelField: buildT2IAdapterModelFieldInputTemplate,
|
T2IAdapterModelField: buildT2IAdapterModelFieldInputTemplate,
|
||||||
|
SpandrelImageToImageModelField: buildSpandrelImageToImageModelFieldInputTemplate,
|
||||||
VAEModelField: buildVAEModelFieldInputTemplate,
|
VAEModelField: buildVAEModelFieldInputTemplate,
|
||||||
} as const;
|
} as const;
|
||||||
|
|
||||||
|
@ -35,6 +35,7 @@ const MODEL_FIELD_TYPES = [
|
|||||||
'ControlNetModelField',
|
'ControlNetModelField',
|
||||||
'IPAdapterModelField',
|
'IPAdapterModelField',
|
||||||
'T2IAdapterModelField',
|
'T2IAdapterModelField',
|
||||||
|
'SpandrelImageToImageModelField',
|
||||||
];
|
];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -11,6 +11,7 @@ import {
|
|||||||
isNonSDXLMainModelConfig,
|
isNonSDXLMainModelConfig,
|
||||||
isRefinerMainModelModelConfig,
|
isRefinerMainModelModelConfig,
|
||||||
isSDXLMainModelModelConfig,
|
isSDXLMainModelModelConfig,
|
||||||
|
isSpandrelImageToImageModelConfig,
|
||||||
isT2IAdapterModelConfig,
|
isT2IAdapterModelConfig,
|
||||||
isTIModelConfig,
|
isTIModelConfig,
|
||||||
isVAEModelConfig,
|
isVAEModelConfig,
|
||||||
@ -39,6 +40,7 @@ export const useLoRAModels = buildModelsHook(isLoRAModelConfig);
|
|||||||
export const useControlNetAndT2IAdapterModels = buildModelsHook(isControlNetOrT2IAdapterModelConfig);
|
export const useControlNetAndT2IAdapterModels = buildModelsHook(isControlNetOrT2IAdapterModelConfig);
|
||||||
export const useControlNetModels = buildModelsHook(isControlNetModelConfig);
|
export const useControlNetModels = buildModelsHook(isControlNetModelConfig);
|
||||||
export const useT2IAdapterModels = buildModelsHook(isT2IAdapterModelConfig);
|
export const useT2IAdapterModels = buildModelsHook(isT2IAdapterModelConfig);
|
||||||
|
export const useSpandrelImageToImageModels = buildModelsHook(isSpandrelImageToImageModelConfig);
|
||||||
export const useIPAdapterModels = buildModelsHook(isIPAdapterModelConfig);
|
export const useIPAdapterModels = buildModelsHook(isIPAdapterModelConfig);
|
||||||
export const useEmbeddingModels = buildModelsHook(isTIModelConfig);
|
export const useEmbeddingModels = buildModelsHook(isTIModelConfig);
|
||||||
export const useVAEModels = buildModelsHook(isVAEModelConfig);
|
export const useVAEModels = buildModelsHook(isVAEModelConfig);
|
||||||
|
File diff suppressed because one or more lines are too long
@ -51,6 +51,7 @@ export type VAEModelConfig = S['VAECheckpointConfig'] | S['VAEDiffusersConfig'];
|
|||||||
export type ControlNetModelConfig = S['ControlNetDiffusersConfig'] | S['ControlNetCheckpointConfig'];
|
export type ControlNetModelConfig = S['ControlNetDiffusersConfig'] | S['ControlNetCheckpointConfig'];
|
||||||
export type IPAdapterModelConfig = S['IPAdapterInvokeAIConfig'] | S['IPAdapterCheckpointConfig'];
|
export type IPAdapterModelConfig = S['IPAdapterInvokeAIConfig'] | S['IPAdapterCheckpointConfig'];
|
||||||
export type T2IAdapterModelConfig = S['T2IAdapterConfig'];
|
export type T2IAdapterModelConfig = S['T2IAdapterConfig'];
|
||||||
|
export type SpandrelImageToImageModelConfig = S['SpandrelImageToImageConfig'];
|
||||||
type TextualInversionModelConfig = S['TextualInversionFileConfig'] | S['TextualInversionFolderConfig'];
|
type TextualInversionModelConfig = S['TextualInversionFileConfig'] | S['TextualInversionFolderConfig'];
|
||||||
type DiffusersModelConfig = S['MainDiffusersConfig'];
|
type DiffusersModelConfig = S['MainDiffusersConfig'];
|
||||||
type CheckpointModelConfig = S['MainCheckpointConfig'];
|
type CheckpointModelConfig = S['MainCheckpointConfig'];
|
||||||
@ -62,6 +63,7 @@ export type AnyModelConfig =
|
|||||||
| ControlNetModelConfig
|
| ControlNetModelConfig
|
||||||
| IPAdapterModelConfig
|
| IPAdapterModelConfig
|
||||||
| T2IAdapterModelConfig
|
| T2IAdapterModelConfig
|
||||||
|
| SpandrelImageToImageModelConfig
|
||||||
| TextualInversionModelConfig
|
| TextualInversionModelConfig
|
||||||
| MainModelConfig
|
| MainModelConfig
|
||||||
| CLIPVisionDiffusersConfig;
|
| CLIPVisionDiffusersConfig;
|
||||||
@ -86,6 +88,12 @@ export const isT2IAdapterModelConfig = (config: AnyModelConfig): config is T2IAd
|
|||||||
return config.type === 't2i_adapter';
|
return config.type === 't2i_adapter';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const isSpandrelImageToImageModelConfig = (
|
||||||
|
config: AnyModelConfig
|
||||||
|
): config is SpandrelImageToImageModelConfig => {
|
||||||
|
return config.type === 'spandrel_image_to_image';
|
||||||
|
};
|
||||||
|
|
||||||
export const isControlAdapterModelConfig = (
|
export const isControlAdapterModelConfig = (
|
||||||
config: AnyModelConfig
|
config: AnyModelConfig
|
||||||
): config is ControlNetModelConfig | T2IAdapterModelConfig | IPAdapterModelConfig => {
|
): config is ControlNetModelConfig | T2IAdapterModelConfig | IPAdapterModelConfig => {
|
||||||
|
@ -1 +1 @@
|
|||||||
__version__ = "4.2.6a1"
|
__version__ = "4.2.6post1"
|
||||||
|
@ -46,6 +46,7 @@ dependencies = [
|
|||||||
"opencv-python==4.9.0.80",
|
"opencv-python==4.9.0.80",
|
||||||
"pytorch-lightning==2.1.3",
|
"pytorch-lightning==2.1.3",
|
||||||
"safetensors==0.4.3",
|
"safetensors==0.4.3",
|
||||||
|
"spandrel==0.3.4",
|
||||||
"timm==0.6.13", # needed to override timm latest in controlnet_aux, see https://github.com/isl-org/ZoeDepth/issues/26
|
"timm==0.6.13", # needed to override timm latest in controlnet_aux, see https://github.com/isl-org/ZoeDepth/issues/26
|
||||||
"torch==2.2.2",
|
"torch==2.2.2",
|
||||||
"torchmetrics==0.11.4",
|
"torchmetrics==0.11.4",
|
||||||
|
Loading…
Reference in New Issue
Block a user