mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
feat: workflow library (#5148)
* chore: bump pydantic to 2.5.2 This release fixes pydantic/pydantic#8175 and allows us to use `JsonValue` * fix(ui): exclude public/en.json from prettier config * fix(workflow_records): fix SQLite workflow insertion to ignore duplicates * feat(backend): update workflows handling Update workflows handling for Workflow Library. **Updated Workflow Storage** "Embedded Workflows" are workflows associated with images, and are now only stored in the image files. "Library Workflows" are not associated with images, and are stored only in DB. This works out nicely. We have always saved workflows to files, but recently began saving them to the DB in addition to in image files. When that happened, we stopped reading workflows from files, so all the workflows that only existed in images were inaccessible. With this change, access to those workflows is restored, and no workflows are lost. **Updated Workflow Handling in Nodes** Prior to this change, workflows were embedded in images by passing the whole workflow JSON to a special workflow field on a node. In the node's `invoke()` function, the node was able to access this workflow and save it with the image. This (inaccurately) models workflows as a property of an image and is rather awkward technically. A workflow is now a property of a batch/session queue item. It is available in the InvocationContext and therefore available to all nodes during `invoke()`. **Database Migrations** Added a `SQLiteMigrator` class to handle database migrations. Migrations were needed to accomodate the DB-related changes in this PR. See the code for details. The `images`, `workflows` and `session_queue` tables required migrations for this PR, and are using the new migrator. Other tables/services are still creating tables themselves. A followup PR will adapt them to use the migrator. **Other/Support Changes** - Add a `has_workflow` column to `images` table to indicate that the image has an embedded workflow. - Add handling for retrieving the workflow from an image in python. The image file must be fetched, the workflow extracted, and then sent to client, avoiding needing the browser to parse the image file. With the `has_workflow` column, the UI knows if there is a workflow to be fetched, and only fetches when the user requests to load the workflow. - Add route to get the workflow from an image - Add CRUD service/routes for the library workflows - `workflow_images` table and services removed (no longer needed now that embedded workflows are not in the DB) * feat(ui): updated workflow handling (WIP) Clientside updates for the backend workflow changes. Includes roughed-out workflow library UI. * feat: revert SQLiteMigrator class Will pursue this in a separate PR. * feat(nodes): do not overwrite custom node module names Use a different, simpler method to detect if a node is custom. * feat(nodes): restore WithWorkflow as no-op class This class is deprecated and no longer needed. Set its workflow attr value to None (meaning it is now a no-op), and issue a warning when an invocation subclasses it. * fix(nodes): fix get_workflow from queue item dict func * feat(backend): add WorkflowRecordListItemDTO This is the id, name, description, created at and updated at workflow columns/attrs. Used to display lists of workflowsl * chore(ui): typegen * feat(ui): add workflow loading, deleting to workflow library UI * feat(ui): workflow library pagination button styles * wip * feat: workflow library WIP - Save to library - Duplicate - Filter/sort - UI/queries * feat: workflow library - system graphs - wip * feat(backend): sync system workflows to db * fix: merge conflicts * feat: simplify default workflows - Rename "system" -> "default" - Simplify syncing logic - Update UI to match * feat(workflows): update default workflows - Update TextToImage_SD15 - Add TextToImage_SDXL - Add README * feat(ui): refine workflow list UI * fix(workflow_records): typo * fix(tests): fix tests * feat(ui): clean up workflow library hooks * fix(db): fix mis-ordered db cleanup step It was happening before pruning queue items - should happen afterwards, else you have to restart the app again to free disk space made available by the pruning. * feat(ui): tweak reset workflow editor translations * feat(ui): split out workflow redux state The `nodes` slice is a rather complicated slice. Removing `workflow` makes it a bit more reasonable. Also helps to flatten state out a bit. * docs: update default workflows README * fix: tidy up unused files, unrelated changes * fix(backend): revert unrelated service organisational changes * feat(backend): workflow_records.get_many arg "filter_text" -> "query" * feat(ui): use custom hook in current image buttons Already in use elsewhere, forgot to use it here. * fix(ui): remove commented out property * fix(ui): fix workflow loading - Different handling for loading from library vs external - Fix bug where only nodes and edges loaded * fix(ui): fix save/save-as workflow naming * fix(ui): fix circular dependency * fix(db): fix bug with releasing without lock in db.clean() * fix(db): remove extraneous lock * chore: bump ruff * fix(workflow_records): default `category` to `WorkflowCategory.User` This allows old workflows to validate when reading them from the db or image files. * hide workflow library buttons if feature is disabled --------- Co-authored-by: Mary Hipp <maryhipp@Marys-MacBook-Air.local>
This commit is contained in:
parent
9ba5752770
commit
c42d692ea6
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from logging import Logger
|
from logging import Logger
|
||||||
|
|
||||||
from invokeai.app.services.workflow_image_records.workflow_image_records_sqlite import SqliteWorkflowImageRecordsStorage
|
|
||||||
from invokeai.backend.util.logging import InvokeAILogger
|
from invokeai.backend.util.logging import InvokeAILogger
|
||||||
from invokeai.version.invokeai_version import __version__
|
from invokeai.version.invokeai_version import __version__
|
||||||
|
|
||||||
@ -30,7 +29,7 @@ from ..services.session_processor.session_processor_default import DefaultSessio
|
|||||||
from ..services.session_queue.session_queue_sqlite import SqliteSessionQueue
|
from ..services.session_queue.session_queue_sqlite import SqliteSessionQueue
|
||||||
from ..services.shared.default_graphs import create_system_graphs
|
from ..services.shared.default_graphs import create_system_graphs
|
||||||
from ..services.shared.graph import GraphExecutionState, LibraryGraph
|
from ..services.shared.graph import GraphExecutionState, LibraryGraph
|
||||||
from ..services.shared.sqlite import SqliteDatabase
|
from ..services.shared.sqlite.sqlite_database import SqliteDatabase
|
||||||
from ..services.urls.urls_default import LocalUrlService
|
from ..services.urls.urls_default import LocalUrlService
|
||||||
from ..services.workflow_records.workflow_records_sqlite import SqliteWorkflowRecordsStorage
|
from ..services.workflow_records.workflow_records_sqlite import SqliteWorkflowRecordsStorage
|
||||||
from .events import FastAPIEventService
|
from .events import FastAPIEventService
|
||||||
@ -94,7 +93,6 @@ class ApiDependencies:
|
|||||||
session_processor = DefaultSessionProcessor()
|
session_processor = DefaultSessionProcessor()
|
||||||
session_queue = SqliteSessionQueue(db=db)
|
session_queue = SqliteSessionQueue(db=db)
|
||||||
urls = LocalUrlService()
|
urls = LocalUrlService()
|
||||||
workflow_image_records = SqliteWorkflowImageRecordsStorage(db=db)
|
|
||||||
workflow_records = SqliteWorkflowRecordsStorage(db=db)
|
workflow_records = SqliteWorkflowRecordsStorage(db=db)
|
||||||
|
|
||||||
services = InvocationServices(
|
services = InvocationServices(
|
||||||
@ -121,14 +119,12 @@ class ApiDependencies:
|
|||||||
session_processor=session_processor,
|
session_processor=session_processor,
|
||||||
session_queue=session_queue,
|
session_queue=session_queue,
|
||||||
urls=urls,
|
urls=urls,
|
||||||
workflow_image_records=workflow_image_records,
|
|
||||||
workflow_records=workflow_records,
|
workflow_records=workflow_records,
|
||||||
)
|
)
|
||||||
|
|
||||||
create_system_graphs(services.graph_library)
|
create_system_graphs(services.graph_library)
|
||||||
|
|
||||||
ApiDependencies.invoker = Invoker(services)
|
ApiDependencies.invoker = Invoker(services)
|
||||||
|
|
||||||
db.clean()
|
db.clean()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -8,10 +8,11 @@ from fastapi.routing import APIRouter
|
|||||||
from PIL import Image
|
from PIL import Image
|
||||||
from pydantic import BaseModel, Field, ValidationError
|
from pydantic import BaseModel, Field, ValidationError
|
||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import MetadataField, MetadataFieldValidator, WorkflowFieldValidator
|
from invokeai.app.invocations.baseinvocation import MetadataField, MetadataFieldValidator
|
||||||
from invokeai.app.services.image_records.image_records_common import ImageCategory, ImageRecordChanges, ResourceOrigin
|
from invokeai.app.services.image_records.image_records_common import ImageCategory, ImageRecordChanges, ResourceOrigin
|
||||||
from invokeai.app.services.images.images_common import ImageDTO, ImageUrlsDTO
|
from invokeai.app.services.images.images_common import ImageDTO, ImageUrlsDTO
|
||||||
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import WorkflowWithoutID, WorkflowWithoutIDValidator
|
||||||
|
|
||||||
from ..dependencies import ApiDependencies
|
from ..dependencies import ApiDependencies
|
||||||
|
|
||||||
@ -73,7 +74,7 @@ async def upload_image(
|
|||||||
workflow_raw = pil_image.info.get("invokeai_workflow", None)
|
workflow_raw = pil_image.info.get("invokeai_workflow", None)
|
||||||
if workflow_raw is not None:
|
if workflow_raw is not None:
|
||||||
try:
|
try:
|
||||||
workflow = WorkflowFieldValidator.validate_json(workflow_raw)
|
workflow = WorkflowWithoutIDValidator.validate_json(workflow_raw)
|
||||||
except ValidationError:
|
except ValidationError:
|
||||||
ApiDependencies.invoker.services.logger.warn("Failed to parse metadata for uploaded image")
|
ApiDependencies.invoker.services.logger.warn("Failed to parse metadata for uploaded image")
|
||||||
pass
|
pass
|
||||||
@ -184,6 +185,18 @@ async def get_image_metadata(
|
|||||||
raise HTTPException(status_code=404)
|
raise HTTPException(status_code=404)
|
||||||
|
|
||||||
|
|
||||||
|
@images_router.get(
|
||||||
|
"/i/{image_name}/workflow", operation_id="get_image_workflow", response_model=Optional[WorkflowWithoutID]
|
||||||
|
)
|
||||||
|
async def get_image_workflow(
|
||||||
|
image_name: str = Path(description="The name of image whose workflow to get"),
|
||||||
|
) -> Optional[WorkflowWithoutID]:
|
||||||
|
try:
|
||||||
|
return ApiDependencies.invoker.services.images.get_workflow(image_name)
|
||||||
|
except Exception:
|
||||||
|
raise HTTPException(status_code=404)
|
||||||
|
|
||||||
|
|
||||||
@images_router.api_route(
|
@images_router.api_route(
|
||||||
"/i/{image_name}/full",
|
"/i/{image_name}/full",
|
||||||
methods=["GET", "HEAD"],
|
methods=["GET", "HEAD"],
|
||||||
|
@ -1,7 +1,19 @@
|
|||||||
from fastapi import APIRouter, Path
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Body, HTTPException, Path, Query
|
||||||
|
|
||||||
from invokeai.app.api.dependencies import ApiDependencies
|
from invokeai.app.api.dependencies import ApiDependencies
|
||||||
from invokeai.app.invocations.baseinvocation import WorkflowField
|
from invokeai.app.services.shared.pagination import PaginatedResults
|
||||||
|
from invokeai.app.services.shared.sqlite.sqlite_common import SQLiteDirection
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import (
|
||||||
|
Workflow,
|
||||||
|
WorkflowCategory,
|
||||||
|
WorkflowNotFoundError,
|
||||||
|
WorkflowRecordDTO,
|
||||||
|
WorkflowRecordListItemDTO,
|
||||||
|
WorkflowRecordOrderBy,
|
||||||
|
WorkflowWithoutID,
|
||||||
|
)
|
||||||
|
|
||||||
workflows_router = APIRouter(prefix="/v1/workflows", tags=["workflows"])
|
workflows_router = APIRouter(prefix="/v1/workflows", tags=["workflows"])
|
||||||
|
|
||||||
@ -10,11 +22,76 @@ workflows_router = APIRouter(prefix="/v1/workflows", tags=["workflows"])
|
|||||||
"/i/{workflow_id}",
|
"/i/{workflow_id}",
|
||||||
operation_id="get_workflow",
|
operation_id="get_workflow",
|
||||||
responses={
|
responses={
|
||||||
200: {"model": WorkflowField},
|
200: {"model": WorkflowRecordDTO},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
async def get_workflow(
|
async def get_workflow(
|
||||||
workflow_id: str = Path(description="The workflow to get"),
|
workflow_id: str = Path(description="The workflow to get"),
|
||||||
) -> WorkflowField:
|
) -> WorkflowRecordDTO:
|
||||||
"""Gets a workflow"""
|
"""Gets a workflow"""
|
||||||
return ApiDependencies.invoker.services.workflow_records.get(workflow_id)
|
try:
|
||||||
|
return ApiDependencies.invoker.services.workflow_records.get(workflow_id)
|
||||||
|
except WorkflowNotFoundError:
|
||||||
|
raise HTTPException(status_code=404, detail="Workflow not found")
|
||||||
|
|
||||||
|
|
||||||
|
@workflows_router.patch(
|
||||||
|
"/i/{workflow_id}",
|
||||||
|
operation_id="update_workflow",
|
||||||
|
responses={
|
||||||
|
200: {"model": WorkflowRecordDTO},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
async def update_workflow(
|
||||||
|
workflow: Workflow = Body(description="The updated workflow", embed=True),
|
||||||
|
) -> WorkflowRecordDTO:
|
||||||
|
"""Updates a workflow"""
|
||||||
|
return ApiDependencies.invoker.services.workflow_records.update(workflow=workflow)
|
||||||
|
|
||||||
|
|
||||||
|
@workflows_router.delete(
|
||||||
|
"/i/{workflow_id}",
|
||||||
|
operation_id="delete_workflow",
|
||||||
|
)
|
||||||
|
async def delete_workflow(
|
||||||
|
workflow_id: str = Path(description="The workflow to delete"),
|
||||||
|
) -> None:
|
||||||
|
"""Deletes a workflow"""
|
||||||
|
ApiDependencies.invoker.services.workflow_records.delete(workflow_id)
|
||||||
|
|
||||||
|
|
||||||
|
@workflows_router.post(
|
||||||
|
"/",
|
||||||
|
operation_id="create_workflow",
|
||||||
|
responses={
|
||||||
|
200: {"model": WorkflowRecordDTO},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
async def create_workflow(
|
||||||
|
workflow: WorkflowWithoutID = Body(description="The workflow to create", embed=True),
|
||||||
|
) -> WorkflowRecordDTO:
|
||||||
|
"""Creates a workflow"""
|
||||||
|
return ApiDependencies.invoker.services.workflow_records.create(workflow=workflow)
|
||||||
|
|
||||||
|
|
||||||
|
@workflows_router.get(
|
||||||
|
"/",
|
||||||
|
operation_id="list_workflows",
|
||||||
|
responses={
|
||||||
|
200: {"model": PaginatedResults[WorkflowRecordListItemDTO]},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
async def list_workflows(
|
||||||
|
page: int = Query(default=0, description="The page to get"),
|
||||||
|
per_page: int = Query(default=10, description="The number of workflows per page"),
|
||||||
|
order_by: WorkflowRecordOrderBy = Query(
|
||||||
|
default=WorkflowRecordOrderBy.Name, description="The attribute to order by"
|
||||||
|
),
|
||||||
|
direction: SQLiteDirection = Query(default=SQLiteDirection.Ascending, description="The direction to order by"),
|
||||||
|
category: WorkflowCategory = Query(default=WorkflowCategory.User, description="The category of workflow to get"),
|
||||||
|
query: Optional[str] = Query(default=None, description="The text to query by (matches name and description)"),
|
||||||
|
) -> PaginatedResults[WorkflowRecordListItemDTO]:
|
||||||
|
"""Gets a page of workflows"""
|
||||||
|
return ApiDependencies.invoker.services.workflow_records.get_many(
|
||||||
|
page=page, per_page=per_page, order_by=order_by, direction=direction, query=query, category=category
|
||||||
|
)
|
||||||
|
@ -16,6 +16,7 @@ from pydantic.fields import FieldInfo, _Unset
|
|||||||
from pydantic_core import PydanticUndefined
|
from pydantic_core import PydanticUndefined
|
||||||
|
|
||||||
from invokeai.app.services.config.config_default import InvokeAIAppConfig
|
from invokeai.app.services.config.config_default import InvokeAIAppConfig
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import WorkflowWithoutID
|
||||||
from invokeai.app.shared.fields import FieldDescriptions
|
from invokeai.app.shared.fields import FieldDescriptions
|
||||||
from invokeai.app.util.metaenum import MetaEnum
|
from invokeai.app.util.metaenum import MetaEnum
|
||||||
from invokeai.app.util.misc import uuid_string
|
from invokeai.app.util.misc import uuid_string
|
||||||
@ -452,6 +453,7 @@ class InvocationContext:
|
|||||||
queue_id: str
|
queue_id: str
|
||||||
queue_item_id: int
|
queue_item_id: int
|
||||||
queue_batch_id: str
|
queue_batch_id: str
|
||||||
|
workflow: Optional[WorkflowWithoutID]
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -460,12 +462,14 @@ class InvocationContext:
|
|||||||
queue_item_id: int,
|
queue_item_id: int,
|
||||||
queue_batch_id: str,
|
queue_batch_id: str,
|
||||||
graph_execution_state_id: str,
|
graph_execution_state_id: str,
|
||||||
|
workflow: Optional[WorkflowWithoutID],
|
||||||
):
|
):
|
||||||
self.services = services
|
self.services = services
|
||||||
self.graph_execution_state_id = graph_execution_state_id
|
self.graph_execution_state_id = graph_execution_state_id
|
||||||
self.queue_id = queue_id
|
self.queue_id = queue_id
|
||||||
self.queue_item_id = queue_item_id
|
self.queue_item_id = queue_item_id
|
||||||
self.queue_batch_id = queue_batch_id
|
self.queue_batch_id = queue_batch_id
|
||||||
|
self.workflow = workflow
|
||||||
|
|
||||||
|
|
||||||
class BaseInvocationOutput(BaseModel):
|
class BaseInvocationOutput(BaseModel):
|
||||||
@ -807,9 +811,9 @@ def invocation(
|
|||||||
cls.UIConfig.category = category
|
cls.UIConfig.category = category
|
||||||
|
|
||||||
# Grab the node pack's name from the module name, if it's a custom node
|
# Grab the node pack's name from the module name, if it's a custom node
|
||||||
module_name = cls.__module__.split(".")[0]
|
is_custom_node = cls.__module__.rsplit(".", 1)[0] == "invokeai.app.invocations"
|
||||||
if module_name.endswith(CUSTOM_NODE_PACK_SUFFIX):
|
if is_custom_node:
|
||||||
cls.UIConfig.node_pack = module_name.split(CUSTOM_NODE_PACK_SUFFIX)[0]
|
cls.UIConfig.node_pack = cls.__module__.split(".")[0]
|
||||||
else:
|
else:
|
||||||
cls.UIConfig.node_pack = None
|
cls.UIConfig.node_pack = None
|
||||||
|
|
||||||
@ -903,24 +907,6 @@ def invocation_output(
|
|||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
class WorkflowField(RootModel):
|
|
||||||
"""
|
|
||||||
Pydantic model for workflows with custom root of type dict[str, Any].
|
|
||||||
Workflows are stored without a strict schema.
|
|
||||||
"""
|
|
||||||
|
|
||||||
root: dict[str, Any] = Field(description="The workflow")
|
|
||||||
|
|
||||||
|
|
||||||
WorkflowFieldValidator = TypeAdapter(WorkflowField)
|
|
||||||
|
|
||||||
|
|
||||||
class WithWorkflow(BaseModel):
|
|
||||||
workflow: Optional[WorkflowField] = Field(
|
|
||||||
default=None, description=FieldDescriptions.workflow, json_schema_extra={"field_kind": FieldKind.NodeAttribute}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MetadataField(RootModel):
|
class MetadataField(RootModel):
|
||||||
"""
|
"""
|
||||||
Pydantic model for metadata with custom root of type dict[str, Any].
|
Pydantic model for metadata with custom root of type dict[str, Any].
|
||||||
@ -943,3 +929,13 @@ class WithMetadata(BaseModel):
|
|||||||
orig_required=False,
|
orig_required=False,
|
||||||
).model_dump(exclude_none=True),
|
).model_dump(exclude_none=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class WithWorkflow:
|
||||||
|
workflow = None
|
||||||
|
|
||||||
|
def __init_subclass__(cls) -> None:
|
||||||
|
logger.warn(
|
||||||
|
f"{cls.__module__.split('.')[0]}.{cls.__name__}: WithWorkflow is deprecated. Use `context.workflow` to access the workflow."
|
||||||
|
)
|
||||||
|
super().__init_subclass__()
|
||||||
|
@ -39,7 +39,6 @@ from .baseinvocation import (
|
|||||||
InvocationContext,
|
InvocationContext,
|
||||||
OutputField,
|
OutputField,
|
||||||
WithMetadata,
|
WithMetadata,
|
||||||
WithWorkflow,
|
|
||||||
invocation,
|
invocation,
|
||||||
invocation_output,
|
invocation_output,
|
||||||
)
|
)
|
||||||
@ -129,7 +128,7 @@ class ControlNetInvocation(BaseInvocation):
|
|||||||
|
|
||||||
|
|
||||||
# This invocation exists for other invocations to subclass it - do not register with @invocation!
|
# This invocation exists for other invocations to subclass it - do not register with @invocation!
|
||||||
class ImageProcessorInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
class ImageProcessorInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Base class for invocations that preprocess images for ControlNet"""
|
"""Base class for invocations that preprocess images for ControlNet"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to process")
|
image: ImageField = InputField(description="The image to process")
|
||||||
@ -153,7 +152,7 @@ class ImageProcessorInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
node_id=self.id,
|
node_id=self.id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
"""Builds an ImageOutput and its ImageField"""
|
"""Builds an ImageOutput and its ImageField"""
|
||||||
@ -173,7 +172,7 @@ class ImageProcessorInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
title="Canny Processor",
|
title="Canny Processor",
|
||||||
tags=["controlnet", "canny"],
|
tags=["controlnet", "canny"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class CannyImageProcessorInvocation(ImageProcessorInvocation):
|
class CannyImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Canny edge detection for ControlNet"""
|
"""Canny edge detection for ControlNet"""
|
||||||
@ -196,7 +195,7 @@ class CannyImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="HED (softedge) Processor",
|
title="HED (softedge) Processor",
|
||||||
tags=["controlnet", "hed", "softedge"],
|
tags=["controlnet", "hed", "softedge"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class HedImageProcessorInvocation(ImageProcessorInvocation):
|
class HedImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies HED edge detection to image"""
|
"""Applies HED edge detection to image"""
|
||||||
@ -225,7 +224,7 @@ class HedImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Lineart Processor",
|
title="Lineart Processor",
|
||||||
tags=["controlnet", "lineart"],
|
tags=["controlnet", "lineart"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class LineartImageProcessorInvocation(ImageProcessorInvocation):
|
class LineartImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies line art processing to image"""
|
"""Applies line art processing to image"""
|
||||||
@ -247,7 +246,7 @@ class LineartImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Lineart Anime Processor",
|
title="Lineart Anime Processor",
|
||||||
tags=["controlnet", "lineart", "anime"],
|
tags=["controlnet", "lineart", "anime"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class LineartAnimeImageProcessorInvocation(ImageProcessorInvocation):
|
class LineartAnimeImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies line art anime processing to image"""
|
"""Applies line art anime processing to image"""
|
||||||
@ -270,7 +269,7 @@ class LineartAnimeImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Openpose Processor",
|
title="Openpose Processor",
|
||||||
tags=["controlnet", "openpose", "pose"],
|
tags=["controlnet", "openpose", "pose"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class OpenposeImageProcessorInvocation(ImageProcessorInvocation):
|
class OpenposeImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies Openpose processing to image"""
|
"""Applies Openpose processing to image"""
|
||||||
@ -295,7 +294,7 @@ class OpenposeImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Midas Depth Processor",
|
title="Midas Depth Processor",
|
||||||
tags=["controlnet", "midas"],
|
tags=["controlnet", "midas"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class MidasDepthImageProcessorInvocation(ImageProcessorInvocation):
|
class MidasDepthImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies Midas depth processing to image"""
|
"""Applies Midas depth processing to image"""
|
||||||
@ -322,7 +321,7 @@ class MidasDepthImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Normal BAE Processor",
|
title="Normal BAE Processor",
|
||||||
tags=["controlnet"],
|
tags=["controlnet"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class NormalbaeImageProcessorInvocation(ImageProcessorInvocation):
|
class NormalbaeImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies NormalBae processing to image"""
|
"""Applies NormalBae processing to image"""
|
||||||
@ -339,7 +338,7 @@ class NormalbaeImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
|
|
||||||
|
|
||||||
@invocation(
|
@invocation(
|
||||||
"mlsd_image_processor", title="MLSD Processor", tags=["controlnet", "mlsd"], category="controlnet", version="1.1.0"
|
"mlsd_image_processor", title="MLSD Processor", tags=["controlnet", "mlsd"], category="controlnet", version="1.2.0"
|
||||||
)
|
)
|
||||||
class MlsdImageProcessorInvocation(ImageProcessorInvocation):
|
class MlsdImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies MLSD processing to image"""
|
"""Applies MLSD processing to image"""
|
||||||
@ -362,7 +361,7 @@ class MlsdImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
|
|
||||||
|
|
||||||
@invocation(
|
@invocation(
|
||||||
"pidi_image_processor", title="PIDI Processor", tags=["controlnet", "pidi"], category="controlnet", version="1.1.0"
|
"pidi_image_processor", title="PIDI Processor", tags=["controlnet", "pidi"], category="controlnet", version="1.2.0"
|
||||||
)
|
)
|
||||||
class PidiImageProcessorInvocation(ImageProcessorInvocation):
|
class PidiImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies PIDI processing to image"""
|
"""Applies PIDI processing to image"""
|
||||||
@ -389,7 +388,7 @@ class PidiImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Content Shuffle Processor",
|
title="Content Shuffle Processor",
|
||||||
tags=["controlnet", "contentshuffle"],
|
tags=["controlnet", "contentshuffle"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class ContentShuffleImageProcessorInvocation(ImageProcessorInvocation):
|
class ContentShuffleImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies content shuffle processing to image"""
|
"""Applies content shuffle processing to image"""
|
||||||
@ -419,7 +418,7 @@ class ContentShuffleImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Zoe (Depth) Processor",
|
title="Zoe (Depth) Processor",
|
||||||
tags=["controlnet", "zoe", "depth"],
|
tags=["controlnet", "zoe", "depth"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class ZoeDepthImageProcessorInvocation(ImageProcessorInvocation):
|
class ZoeDepthImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies Zoe depth processing to image"""
|
"""Applies Zoe depth processing to image"""
|
||||||
@ -435,7 +434,7 @@ class ZoeDepthImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Mediapipe Face Processor",
|
title="Mediapipe Face Processor",
|
||||||
tags=["controlnet", "mediapipe", "face"],
|
tags=["controlnet", "mediapipe", "face"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class MediapipeFaceProcessorInvocation(ImageProcessorInvocation):
|
class MediapipeFaceProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies mediapipe face processing to image"""
|
"""Applies mediapipe face processing to image"""
|
||||||
@ -458,7 +457,7 @@ class MediapipeFaceProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Leres (Depth) Processor",
|
title="Leres (Depth) Processor",
|
||||||
tags=["controlnet", "leres", "depth"],
|
tags=["controlnet", "leres", "depth"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class LeresImageProcessorInvocation(ImageProcessorInvocation):
|
class LeresImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies leres processing to image"""
|
"""Applies leres processing to image"""
|
||||||
@ -487,7 +486,7 @@ class LeresImageProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Tile Resample Processor",
|
title="Tile Resample Processor",
|
||||||
tags=["controlnet", "tile"],
|
tags=["controlnet", "tile"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class TileResamplerProcessorInvocation(ImageProcessorInvocation):
|
class TileResamplerProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Tile resampler processor"""
|
"""Tile resampler processor"""
|
||||||
@ -527,7 +526,7 @@ class TileResamplerProcessorInvocation(ImageProcessorInvocation):
|
|||||||
title="Segment Anything Processor",
|
title="Segment Anything Processor",
|
||||||
tags=["controlnet", "segmentanything"],
|
tags=["controlnet", "segmentanything"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class SegmentAnythingProcessorInvocation(ImageProcessorInvocation):
|
class SegmentAnythingProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Applies segment anything processing to image"""
|
"""Applies segment anything processing to image"""
|
||||||
@ -569,7 +568,7 @@ class SamDetectorReproducibleColors(SamDetector):
|
|||||||
title="Color Map Processor",
|
title="Color Map Processor",
|
||||||
tags=["controlnet"],
|
tags=["controlnet"],
|
||||||
category="controlnet",
|
category="controlnet",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class ColorMapImageProcessorInvocation(ImageProcessorInvocation):
|
class ColorMapImageProcessorInvocation(ImageProcessorInvocation):
|
||||||
"""Generates a color map from the provided image"""
|
"""Generates a color map from the provided image"""
|
||||||
|
@ -6,7 +6,6 @@ import sys
|
|||||||
from importlib.util import module_from_spec, spec_from_file_location
|
from importlib.util import module_from_spec, spec_from_file_location
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import CUSTOM_NODE_PACK_SUFFIX
|
|
||||||
from invokeai.backend.util.logging import InvokeAILogger
|
from invokeai.backend.util.logging import InvokeAILogger
|
||||||
|
|
||||||
logger = InvokeAILogger.get_logger()
|
logger = InvokeAILogger.get_logger()
|
||||||
@ -34,7 +33,7 @@ for d in Path(__file__).parent.iterdir():
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# load the module, appending adding a suffix to identify it as a custom node pack
|
# load the module, appending adding a suffix to identify it as a custom node pack
|
||||||
spec = spec_from_file_location(f"{module_name}{CUSTOM_NODE_PACK_SUFFIX}", init.absolute())
|
spec = spec_from_file_location(module_name, init.absolute())
|
||||||
|
|
||||||
if spec is None or spec.loader is None:
|
if spec is None or spec.loader is None:
|
||||||
logger.warn(f"Could not load {init}")
|
logger.warn(f"Could not load {init}")
|
||||||
|
@ -8,11 +8,11 @@ from PIL import Image, ImageOps
|
|||||||
from invokeai.app.invocations.primitives import ImageField, ImageOutput
|
from invokeai.app.invocations.primitives import ImageField, ImageOutput
|
||||||
from invokeai.app.services.image_records.image_records_common import ImageCategory, ResourceOrigin
|
from invokeai.app.services.image_records.image_records_common import ImageCategory, ResourceOrigin
|
||||||
|
|
||||||
from .baseinvocation import BaseInvocation, InputField, InvocationContext, WithMetadata, WithWorkflow, invocation
|
from .baseinvocation import BaseInvocation, InputField, InvocationContext, WithMetadata, invocation
|
||||||
|
|
||||||
|
|
||||||
@invocation("cv_inpaint", title="OpenCV Inpaint", tags=["opencv", "inpaint"], category="inpaint", version="1.1.0")
|
@invocation("cv_inpaint", title="OpenCV Inpaint", tags=["opencv", "inpaint"], category="inpaint", version="1.2.0")
|
||||||
class CvInpaintInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
class CvInpaintInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Simple inpaint using opencv."""
|
"""Simple inpaint using opencv."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to inpaint")
|
image: ImageField = InputField(description="The image to inpaint")
|
||||||
@ -41,7 +41,7 @@ class CvInpaintInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
node_id=self.id,
|
node_id=self.id,
|
||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
|
@ -17,7 +17,6 @@ from invokeai.app.invocations.baseinvocation import (
|
|||||||
InvocationContext,
|
InvocationContext,
|
||||||
OutputField,
|
OutputField,
|
||||||
WithMetadata,
|
WithMetadata,
|
||||||
WithWorkflow,
|
|
||||||
invocation,
|
invocation,
|
||||||
invocation_output,
|
invocation_output,
|
||||||
)
|
)
|
||||||
@ -438,8 +437,8 @@ def get_faces_list(
|
|||||||
return all_faces
|
return all_faces
|
||||||
|
|
||||||
|
|
||||||
@invocation("face_off", title="FaceOff", tags=["image", "faceoff", "face", "mask"], category="image", version="1.1.0")
|
@invocation("face_off", title="FaceOff", tags=["image", "faceoff", "face", "mask"], category="image", version="1.2.0")
|
||||||
class FaceOffInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class FaceOffInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Bound, extract, and mask a face from an image using MediaPipe detection"""
|
"""Bound, extract, and mask a face from an image using MediaPipe detection"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="Image for face detection")
|
image: ImageField = InputField(description="Image for face detection")
|
||||||
@ -508,7 +507,7 @@ class FaceOffInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
node_id=self.id,
|
node_id=self.id,
|
||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
mask_dto = context.services.images.create(
|
mask_dto = context.services.images.create(
|
||||||
@ -532,8 +531,8 @@ class FaceOffInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
return output
|
return output
|
||||||
|
|
||||||
|
|
||||||
@invocation("face_mask_detection", title="FaceMask", tags=["image", "face", "mask"], category="image", version="1.1.0")
|
@invocation("face_mask_detection", title="FaceMask", tags=["image", "face", "mask"], category="image", version="1.2.0")
|
||||||
class FaceMaskInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class FaceMaskInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Face mask creation using mediapipe face detection"""
|
"""Face mask creation using mediapipe face detection"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="Image to face detect")
|
image: ImageField = InputField(description="Image to face detect")
|
||||||
@ -627,7 +626,7 @@ class FaceMaskInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
node_id=self.id,
|
node_id=self.id,
|
||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
mask_dto = context.services.images.create(
|
mask_dto = context.services.images.create(
|
||||||
@ -650,9 +649,9 @@ class FaceMaskInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
|
|
||||||
|
|
||||||
@invocation(
|
@invocation(
|
||||||
"face_identifier", title="FaceIdentifier", tags=["image", "face", "identifier"], category="image", version="1.1.0"
|
"face_identifier", title="FaceIdentifier", tags=["image", "face", "identifier"], category="image", version="1.2.0"
|
||||||
)
|
)
|
||||||
class FaceIdentifierInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class FaceIdentifierInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Outputs an image with detected face IDs printed on each face. For use with other FaceTools."""
|
"""Outputs an image with detected face IDs printed on each face. For use with other FaceTools."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="Image to face detect")
|
image: ImageField = InputField(description="Image to face detect")
|
||||||
@ -716,7 +715,7 @@ class FaceIdentifierInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
node_id=self.id,
|
node_id=self.id,
|
||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
|
@ -13,7 +13,7 @@ from invokeai.app.shared.fields import FieldDescriptions
|
|||||||
from invokeai.backend.image_util.invisible_watermark import InvisibleWatermark
|
from invokeai.backend.image_util.invisible_watermark import InvisibleWatermark
|
||||||
from invokeai.backend.image_util.safety_checker import SafetyChecker
|
from invokeai.backend.image_util.safety_checker import SafetyChecker
|
||||||
|
|
||||||
from .baseinvocation import BaseInvocation, Input, InputField, InvocationContext, WithMetadata, WithWorkflow, invocation
|
from .baseinvocation import BaseInvocation, Input, InputField, InvocationContext, WithMetadata, invocation
|
||||||
|
|
||||||
|
|
||||||
@invocation("show_image", title="Show Image", tags=["image"], category="image", version="1.0.0")
|
@invocation("show_image", title="Show Image", tags=["image"], category="image", version="1.0.0")
|
||||||
@ -36,8 +36,14 @@ class ShowImageInvocation(BaseInvocation):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("blank_image", title="Blank Image", tags=["image"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class BlankImageInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
"blank_image",
|
||||||
|
title="Blank Image",
|
||||||
|
tags=["image"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class BlankImageInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Creates a blank image and forwards it to the pipeline"""
|
"""Creates a blank image and forwards it to the pipeline"""
|
||||||
|
|
||||||
width: int = InputField(default=512, description="The width of the image")
|
width: int = InputField(default=512, description="The width of the image")
|
||||||
@ -56,7 +62,7 @@ class BlankImageInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -66,8 +72,14 @@ class BlankImageInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_crop", title="Crop Image", tags=["image", "crop"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageCropInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"img_crop",
|
||||||
|
title="Crop Image",
|
||||||
|
tags=["image", "crop"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageCropInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Crops an image to a specified box. The box can be outside of the image."""
|
"""Crops an image to a specified box. The box can be outside of the image."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to crop")
|
image: ImageField = InputField(description="The image to crop")
|
||||||
@ -90,7 +102,7 @@ class ImageCropInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -101,11 +113,11 @@ class ImageCropInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
|
|
||||||
|
|
||||||
@invocation(
|
@invocation(
|
||||||
invocation_type="img_pad_crop",
|
"img_paste",
|
||||||
title="Center Pad or Crop Image",
|
title="Paste Image",
|
||||||
|
tags=["image", "paste"],
|
||||||
category="image",
|
category="image",
|
||||||
tags=["image", "pad", "crop"],
|
version="1.2.0",
|
||||||
version="1.0.0",
|
|
||||||
)
|
)
|
||||||
class CenterPadCropInvocation(BaseInvocation):
|
class CenterPadCropInvocation(BaseInvocation):
|
||||||
"""Pad or crop an image's sides from the center by specified pixels. Positive values are outside of the image."""
|
"""Pad or crop an image's sides from the center by specified pixels. Positive values are outside of the image."""
|
||||||
@ -155,8 +167,14 @@ class CenterPadCropInvocation(BaseInvocation):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_paste", title="Paste Image", tags=["image", "paste"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImagePasteInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
invocation_type="img_pad_crop",
|
||||||
|
title="Center Pad or Crop Image",
|
||||||
|
category="image",
|
||||||
|
tags=["image", "pad", "crop"],
|
||||||
|
version="1.0.0",
|
||||||
|
)
|
||||||
|
class ImagePasteInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Pastes an image into another image."""
|
"""Pastes an image into another image."""
|
||||||
|
|
||||||
base_image: ImageField = InputField(description="The base image")
|
base_image: ImageField = InputField(description="The base image")
|
||||||
@ -199,7 +217,7 @@ class ImagePasteInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -209,8 +227,14 @@ class ImagePasteInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("tomask", title="Mask from Alpha", tags=["image", "mask"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class MaskFromAlphaInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"tomask",
|
||||||
|
title="Mask from Alpha",
|
||||||
|
tags=["image", "mask"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class MaskFromAlphaInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Extracts the alpha channel of an image as a mask."""
|
"""Extracts the alpha channel of an image as a mask."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to create the mask from")
|
image: ImageField = InputField(description="The image to create the mask from")
|
||||||
@ -231,7 +255,7 @@ class MaskFromAlphaInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -241,8 +265,14 @@ class MaskFromAlphaInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_mul", title="Multiply Images", tags=["image", "multiply"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageMultiplyInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"img_mul",
|
||||||
|
title="Multiply Images",
|
||||||
|
tags=["image", "multiply"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageMultiplyInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Multiplies two images together using `PIL.ImageChops.multiply()`."""
|
"""Multiplies two images together using `PIL.ImageChops.multiply()`."""
|
||||||
|
|
||||||
image1: ImageField = InputField(description="The first image to multiply")
|
image1: ImageField = InputField(description="The first image to multiply")
|
||||||
@ -262,7 +292,7 @@ class ImageMultiplyInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -275,8 +305,14 @@ class ImageMultiplyInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
IMAGE_CHANNELS = Literal["A", "R", "G", "B"]
|
IMAGE_CHANNELS = Literal["A", "R", "G", "B"]
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_chan", title="Extract Image Channel", tags=["image", "channel"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageChannelInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"img_chan",
|
||||||
|
title="Extract Image Channel",
|
||||||
|
tags=["image", "channel"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageChannelInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Gets a channel from an image."""
|
"""Gets a channel from an image."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to get the channel from")
|
image: ImageField = InputField(description="The image to get the channel from")
|
||||||
@ -295,7 +331,7 @@ class ImageChannelInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -308,8 +344,14 @@ class ImageChannelInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
IMAGE_MODES = Literal["L", "RGB", "RGBA", "CMYK", "YCbCr", "LAB", "HSV", "I", "F"]
|
IMAGE_MODES = Literal["L", "RGB", "RGBA", "CMYK", "YCbCr", "LAB", "HSV", "I", "F"]
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_conv", title="Convert Image Mode", tags=["image", "convert"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageConvertInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"img_conv",
|
||||||
|
title="Convert Image Mode",
|
||||||
|
tags=["image", "convert"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageConvertInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Converts an image to a different mode."""
|
"""Converts an image to a different mode."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to convert")
|
image: ImageField = InputField(description="The image to convert")
|
||||||
@ -328,7 +370,7 @@ class ImageConvertInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -338,8 +380,14 @@ class ImageConvertInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_blur", title="Blur Image", tags=["image", "blur"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageBlurInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"img_blur",
|
||||||
|
title="Blur Image",
|
||||||
|
tags=["image", "blur"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageBlurInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Blurs an image"""
|
"""Blurs an image"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to blur")
|
image: ImageField = InputField(description="The image to blur")
|
||||||
@ -363,7 +411,7 @@ class ImageBlurInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -393,8 +441,14 @@ PIL_RESAMPLING_MAP = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_resize", title="Resize Image", tags=["image", "resize"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageResizeInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
"img_resize",
|
||||||
|
title="Resize Image",
|
||||||
|
tags=["image", "resize"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageResizeInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Resizes an image to specific dimensions"""
|
"""Resizes an image to specific dimensions"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to resize")
|
image: ImageField = InputField(description="The image to resize")
|
||||||
@ -420,7 +474,7 @@ class ImageResizeInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -430,8 +484,14 @@ class ImageResizeInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_scale", title="Scale Image", tags=["image", "scale"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageScaleInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
"img_scale",
|
||||||
|
title="Scale Image",
|
||||||
|
tags=["image", "scale"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageScaleInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Scales an image by a factor"""
|
"""Scales an image by a factor"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to scale")
|
image: ImageField = InputField(description="The image to scale")
|
||||||
@ -462,7 +522,7 @@ class ImageScaleInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -472,8 +532,14 @@ class ImageScaleInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_lerp", title="Lerp Image", tags=["image", "lerp"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageLerpInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"img_lerp",
|
||||||
|
title="Lerp Image",
|
||||||
|
tags=["image", "lerp"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageLerpInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Linear interpolation of all pixels of an image"""
|
"""Linear interpolation of all pixels of an image"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to lerp")
|
image: ImageField = InputField(description="The image to lerp")
|
||||||
@ -496,7 +562,7 @@ class ImageLerpInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -506,8 +572,14 @@ class ImageLerpInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_ilerp", title="Inverse Lerp Image", tags=["image", "ilerp"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageInverseLerpInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"img_ilerp",
|
||||||
|
title="Inverse Lerp Image",
|
||||||
|
tags=["image", "ilerp"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageInverseLerpInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Inverse linear interpolation of all pixels of an image"""
|
"""Inverse linear interpolation of all pixels of an image"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to lerp")
|
image: ImageField = InputField(description="The image to lerp")
|
||||||
@ -530,7 +602,7 @@ class ImageInverseLerpInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -540,8 +612,14 @@ class ImageInverseLerpInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_nsfw", title="Blur NSFW Image", tags=["image", "nsfw"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageNSFWBlurInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
"img_nsfw",
|
||||||
|
title="Blur NSFW Image",
|
||||||
|
tags=["image", "nsfw"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageNSFWBlurInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Add blur to NSFW-flagged images"""
|
"""Add blur to NSFW-flagged images"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to check")
|
image: ImageField = InputField(description="The image to check")
|
||||||
@ -566,7 +644,7 @@ class ImageNSFWBlurInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -587,9 +665,9 @@ class ImageNSFWBlurInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
title="Add Invisible Watermark",
|
title="Add Invisible Watermark",
|
||||||
tags=["image", "watermark"],
|
tags=["image", "watermark"],
|
||||||
category="image",
|
category="image",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class ImageWatermarkInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
class ImageWatermarkInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Add an invisible watermark to an image"""
|
"""Add an invisible watermark to an image"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to check")
|
image: ImageField = InputField(description="The image to check")
|
||||||
@ -606,7 +684,7 @@ class ImageWatermarkInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -616,8 +694,14 @@ class ImageWatermarkInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("mask_edge", title="Mask Edge", tags=["image", "mask", "inpaint"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class MaskEdgeInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"mask_edge",
|
||||||
|
title="Mask Edge",
|
||||||
|
tags=["image", "mask", "inpaint"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class MaskEdgeInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Applies an edge mask to an image"""
|
"""Applies an edge mask to an image"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to apply the mask to")
|
image: ImageField = InputField(description="The image to apply the mask to")
|
||||||
@ -652,7 +736,7 @@ class MaskEdgeInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -667,9 +751,9 @@ class MaskEdgeInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
title="Combine Masks",
|
title="Combine Masks",
|
||||||
tags=["image", "mask", "multiply"],
|
tags=["image", "mask", "multiply"],
|
||||||
category="image",
|
category="image",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class MaskCombineInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class MaskCombineInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Combine two masks together by multiplying them using `PIL.ImageChops.multiply()`."""
|
"""Combine two masks together by multiplying them using `PIL.ImageChops.multiply()`."""
|
||||||
|
|
||||||
mask1: ImageField = InputField(description="The first mask to combine")
|
mask1: ImageField = InputField(description="The first mask to combine")
|
||||||
@ -689,7 +773,7 @@ class MaskCombineInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -699,8 +783,14 @@ class MaskCombineInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("color_correct", title="Color Correct", tags=["image", "color"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ColorCorrectInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"color_correct",
|
||||||
|
title="Color Correct",
|
||||||
|
tags=["image", "color"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ColorCorrectInvocation(BaseInvocation, WithMetadata):
|
||||||
"""
|
"""
|
||||||
Shifts the colors of a target image to match the reference image, optionally
|
Shifts the colors of a target image to match the reference image, optionally
|
||||||
using a mask to only color-correct certain regions of the target image.
|
using a mask to only color-correct certain regions of the target image.
|
||||||
@ -800,7 +890,7 @@ class ColorCorrectInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -810,8 +900,14 @@ class ColorCorrectInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("img_hue_adjust", title="Adjust Image Hue", tags=["image", "hue"], category="image", version="1.1.0")
|
@invocation(
|
||||||
class ImageHueAdjustmentInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
"img_hue_adjust",
|
||||||
|
title="Adjust Image Hue",
|
||||||
|
tags=["image", "hue"],
|
||||||
|
category="image",
|
||||||
|
version="1.2.0",
|
||||||
|
)
|
||||||
|
class ImageHueAdjustmentInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Adjusts the Hue of an image."""
|
"""Adjusts the Hue of an image."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to adjust")
|
image: ImageField = InputField(description="The image to adjust")
|
||||||
@ -840,7 +936,7 @@ class ImageHueAdjustmentInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -913,9 +1009,9 @@ CHANNEL_FORMATS = {
|
|||||||
"value",
|
"value",
|
||||||
],
|
],
|
||||||
category="image",
|
category="image",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class ImageChannelOffsetInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class ImageChannelOffsetInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Add or subtract a value from a specific color channel of an image."""
|
"""Add or subtract a value from a specific color channel of an image."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to adjust")
|
image: ImageField = InputField(description="The image to adjust")
|
||||||
@ -950,7 +1046,7 @@ class ImageChannelOffsetInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -984,9 +1080,9 @@ class ImageChannelOffsetInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
"value",
|
"value",
|
||||||
],
|
],
|
||||||
category="image",
|
category="image",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class ImageChannelMultiplyInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class ImageChannelMultiplyInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Scale a specific color channel of an image."""
|
"""Scale a specific color channel of an image."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to adjust")
|
image: ImageField = InputField(description="The image to adjust")
|
||||||
@ -1025,7 +1121,7 @@ class ImageChannelMultiplyInvocation(BaseInvocation, WithWorkflow, WithMetadata)
|
|||||||
node_id=self.id,
|
node_id=self.id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1043,10 +1139,10 @@ class ImageChannelMultiplyInvocation(BaseInvocation, WithWorkflow, WithMetadata)
|
|||||||
title="Save Image",
|
title="Save Image",
|
||||||
tags=["primitives", "image"],
|
tags=["primitives", "image"],
|
||||||
category="primitives",
|
category="primitives",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
use_cache=False,
|
use_cache=False,
|
||||||
)
|
)
|
||||||
class SaveImageInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class SaveImageInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Saves an image. Unlike an image primitive, this invocation stores a copy of the image."""
|
"""Saves an image. Unlike an image primitive, this invocation stores a copy of the image."""
|
||||||
|
|
||||||
image: ImageField = InputField(description=FieldDescriptions.image)
|
image: ImageField = InputField(description=FieldDescriptions.image)
|
||||||
@ -1064,7 +1160,7 @@ class SaveImageInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -1082,7 +1178,7 @@ class SaveImageInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
version="1.0.1",
|
version="1.0.1",
|
||||||
use_cache=False,
|
use_cache=False,
|
||||||
)
|
)
|
||||||
class LinearUIOutputInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class LinearUIOutputInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Handles Linear UI Image Outputting tasks."""
|
"""Handles Linear UI Image Outputting tasks."""
|
||||||
|
|
||||||
image: ImageField = InputField(description=FieldDescriptions.image)
|
image: ImageField = InputField(description=FieldDescriptions.image)
|
||||||
|
@ -13,7 +13,7 @@ from invokeai.backend.image_util.cv2_inpaint import cv2_inpaint
|
|||||||
from invokeai.backend.image_util.lama import LaMA
|
from invokeai.backend.image_util.lama import LaMA
|
||||||
from invokeai.backend.image_util.patchmatch import PatchMatch
|
from invokeai.backend.image_util.patchmatch import PatchMatch
|
||||||
|
|
||||||
from .baseinvocation import BaseInvocation, InputField, InvocationContext, WithMetadata, WithWorkflow, invocation
|
from .baseinvocation import BaseInvocation, InputField, InvocationContext, WithMetadata, invocation
|
||||||
from .image import PIL_RESAMPLING_MAP, PIL_RESAMPLING_MODES
|
from .image import PIL_RESAMPLING_MAP, PIL_RESAMPLING_MODES
|
||||||
|
|
||||||
|
|
||||||
@ -118,8 +118,8 @@ def tile_fill_missing(im: Image.Image, tile_size: int = 16, seed: Optional[int]
|
|||||||
return si
|
return si
|
||||||
|
|
||||||
|
|
||||||
@invocation("infill_rgba", title="Solid Color Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.0")
|
@invocation("infill_rgba", title="Solid Color Infill", tags=["image", "inpaint"], category="inpaint", version="1.2.0")
|
||||||
class InfillColorInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class InfillColorInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Infills transparent areas of an image with a solid color"""
|
"""Infills transparent areas of an image with a solid color"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to infill")
|
image: ImageField = InputField(description="The image to infill")
|
||||||
@ -144,7 +144,7 @@ class InfillColorInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -154,8 +154,8 @@ class InfillColorInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("infill_tile", title="Tile Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.1")
|
@invocation("infill_tile", title="Tile Infill", tags=["image", "inpaint"], category="inpaint", version="1.2.1")
|
||||||
class InfillTileInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class InfillTileInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Infills transparent areas of an image with tiles of the image"""
|
"""Infills transparent areas of an image with tiles of the image"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to infill")
|
image: ImageField = InputField(description="The image to infill")
|
||||||
@ -181,7 +181,7 @@ class InfillTileInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -192,9 +192,9 @@ class InfillTileInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
|
|
||||||
|
|
||||||
@invocation(
|
@invocation(
|
||||||
"infill_patchmatch", title="PatchMatch Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.0"
|
"infill_patchmatch", title="PatchMatch Infill", tags=["image", "inpaint"], category="inpaint", version="1.2.0"
|
||||||
)
|
)
|
||||||
class InfillPatchMatchInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class InfillPatchMatchInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Infills transparent areas of an image using the PatchMatch algorithm"""
|
"""Infills transparent areas of an image using the PatchMatch algorithm"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to infill")
|
image: ImageField = InputField(description="The image to infill")
|
||||||
@ -235,7 +235,7 @@ class InfillPatchMatchInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -245,8 +245,8 @@ class InfillPatchMatchInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("infill_lama", title="LaMa Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.0")
|
@invocation("infill_lama", title="LaMa Infill", tags=["image", "inpaint"], category="inpaint", version="1.2.0")
|
||||||
class LaMaInfillInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class LaMaInfillInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Infills transparent areas of an image using the LaMa model"""
|
"""Infills transparent areas of an image using the LaMa model"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to infill")
|
image: ImageField = InputField(description="The image to infill")
|
||||||
@ -264,7 +264,7 @@ class LaMaInfillInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
@ -274,8 +274,8 @@ class LaMaInfillInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("infill_cv2", title="CV2 Infill", tags=["image", "inpaint"], category="inpaint", version="1.1.0")
|
@invocation("infill_cv2", title="CV2 Infill", tags=["image", "inpaint"], category="inpaint", version="1.2.0")
|
||||||
class CV2InfillInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class CV2InfillInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Infills transparent areas of an image using OpenCV Inpainting"""
|
"""Infills transparent areas of an image using OpenCV Inpainting"""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The image to infill")
|
image: ImageField = InputField(description="The image to infill")
|
||||||
@ -293,7 +293,7 @@ class CV2InfillInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
|
@ -64,7 +64,6 @@ from .baseinvocation import (
|
|||||||
OutputField,
|
OutputField,
|
||||||
UIType,
|
UIType,
|
||||||
WithMetadata,
|
WithMetadata,
|
||||||
WithWorkflow,
|
|
||||||
invocation,
|
invocation,
|
||||||
invocation_output,
|
invocation_output,
|
||||||
)
|
)
|
||||||
@ -802,9 +801,9 @@ class DenoiseLatentsInvocation(BaseInvocation):
|
|||||||
title="Latents to Image",
|
title="Latents to Image",
|
||||||
tags=["latents", "image", "vae", "l2i"],
|
tags=["latents", "image", "vae", "l2i"],
|
||||||
category="latents",
|
category="latents",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class LatentsToImageInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
class LatentsToImageInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Generates an image from latents."""
|
"""Generates an image from latents."""
|
||||||
|
|
||||||
latents: LatentsField = InputField(
|
latents: LatentsField = InputField(
|
||||||
@ -886,7 +885,7 @@ class LatentsToImageInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
|
@ -31,7 +31,6 @@ from .baseinvocation import (
|
|||||||
UIComponent,
|
UIComponent,
|
||||||
UIType,
|
UIType,
|
||||||
WithMetadata,
|
WithMetadata,
|
||||||
WithWorkflow,
|
|
||||||
invocation,
|
invocation,
|
||||||
invocation_output,
|
invocation_output,
|
||||||
)
|
)
|
||||||
@ -326,9 +325,9 @@ class ONNXTextToLatentsInvocation(BaseInvocation):
|
|||||||
title="ONNX Latents to Image",
|
title="ONNX Latents to Image",
|
||||||
tags=["latents", "image", "vae", "onnx"],
|
tags=["latents", "image", "vae", "onnx"],
|
||||||
category="image",
|
category="image",
|
||||||
version="1.1.0",
|
version="1.2.0",
|
||||||
)
|
)
|
||||||
class ONNXLatentsToImageInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
class ONNXLatentsToImageInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Generates an image from latents."""
|
"""Generates an image from latents."""
|
||||||
|
|
||||||
latents: LatentsField = InputField(
|
latents: LatentsField = InputField(
|
||||||
@ -378,7 +377,7 @@ class ONNXLatentsToImageInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
|
@ -9,7 +9,6 @@ from invokeai.app.invocations.baseinvocation import (
|
|||||||
InvocationContext,
|
InvocationContext,
|
||||||
OutputField,
|
OutputField,
|
||||||
WithMetadata,
|
WithMetadata,
|
||||||
WithWorkflow,
|
|
||||||
invocation,
|
invocation,
|
||||||
invocation_output,
|
invocation_output,
|
||||||
)
|
)
|
||||||
@ -122,8 +121,8 @@ class PairTileImageInvocation(BaseInvocation):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@invocation("merge_tiles_to_image", title="Merge Tiles to Image", tags=["tiles"], category="tiles", version="1.0.0")
|
@invocation("merge_tiles_to_image", title="Merge Tiles to Image", tags=["tiles"], category="tiles", version="1.1.0")
|
||||||
class MergeTilesToImageInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
class MergeTilesToImageInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Merge multiple tile images into a single image."""
|
"""Merge multiple tile images into a single image."""
|
||||||
|
|
||||||
# Inputs
|
# Inputs
|
||||||
@ -172,7 +171,7 @@ class MergeTilesToImageInvocation(BaseInvocation, WithMetadata, WithWorkflow):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
image=ImageField(image_name=image_dto.image_name),
|
image=ImageField(image_name=image_dto.image_name),
|
||||||
|
@ -14,7 +14,7 @@ from invokeai.app.services.image_records.image_records_common import ImageCatego
|
|||||||
from invokeai.backend.image_util.realesrgan.realesrgan import RealESRGAN
|
from invokeai.backend.image_util.realesrgan.realesrgan import RealESRGAN
|
||||||
from invokeai.backend.util.devices import choose_torch_device
|
from invokeai.backend.util.devices import choose_torch_device
|
||||||
|
|
||||||
from .baseinvocation import BaseInvocation, InputField, InvocationContext, WithMetadata, WithWorkflow, invocation
|
from .baseinvocation import BaseInvocation, InputField, InvocationContext, WithMetadata, invocation
|
||||||
|
|
||||||
# TODO: Populate this from disk?
|
# TODO: Populate this from disk?
|
||||||
# TODO: Use model manager to load?
|
# TODO: Use model manager to load?
|
||||||
@ -29,8 +29,8 @@ if choose_torch_device() == torch.device("mps"):
|
|||||||
from torch import mps
|
from torch import mps
|
||||||
|
|
||||||
|
|
||||||
@invocation("esrgan", title="Upscale (RealESRGAN)", tags=["esrgan", "upscale"], category="esrgan", version="1.2.0")
|
@invocation("esrgan", title="Upscale (RealESRGAN)", tags=["esrgan", "upscale"], category="esrgan", version="1.3.0")
|
||||||
class ESRGANInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
class ESRGANInvocation(BaseInvocation, WithMetadata):
|
||||||
"""Upscales an image using RealESRGAN."""
|
"""Upscales an image using RealESRGAN."""
|
||||||
|
|
||||||
image: ImageField = InputField(description="The input image")
|
image: ImageField = InputField(description="The input image")
|
||||||
@ -118,7 +118,7 @@ class ESRGANInvocation(BaseInvocation, WithWorkflow, WithMetadata):
|
|||||||
session_id=context.graph_execution_state_id,
|
session_id=context.graph_execution_state_id,
|
||||||
is_intermediate=self.is_intermediate,
|
is_intermediate=self.is_intermediate,
|
||||||
metadata=self.metadata,
|
metadata=self.metadata,
|
||||||
workflow=self.workflow,
|
workflow=context.workflow,
|
||||||
)
|
)
|
||||||
|
|
||||||
return ImageOutput(
|
return ImageOutput(
|
||||||
|
@ -4,7 +4,7 @@ from typing import Optional, cast
|
|||||||
|
|
||||||
from invokeai.app.services.image_records.image_records_common import ImageRecord, deserialize_image_record
|
from invokeai.app.services.image_records.image_records_common import ImageRecord, deserialize_image_record
|
||||||
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
||||||
from invokeai.app.services.shared.sqlite import SqliteDatabase
|
from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase
|
||||||
|
|
||||||
from .board_image_records_base import BoardImageRecordStorageBase
|
from .board_image_records_base import BoardImageRecordStorageBase
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ import threading
|
|||||||
from typing import Union, cast
|
from typing import Union, cast
|
||||||
|
|
||||||
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
||||||
from invokeai.app.services.shared.sqlite import SqliteDatabase
|
from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase
|
||||||
from invokeai.app.util.misc import uuid_string
|
from invokeai.app.util.misc import uuid_string
|
||||||
|
|
||||||
from .board_records_base import BoardRecordStorageBase
|
from .board_records_base import BoardRecordStorageBase
|
||||||
|
@ -4,7 +4,8 @@ from typing import Optional
|
|||||||
|
|
||||||
from PIL.Image import Image as PILImageType
|
from PIL.Image import Image as PILImageType
|
||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import MetadataField, WorkflowField
|
from invokeai.app.invocations.baseinvocation import MetadataField
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import WorkflowWithoutID
|
||||||
|
|
||||||
|
|
||||||
class ImageFileStorageBase(ABC):
|
class ImageFileStorageBase(ABC):
|
||||||
@ -33,7 +34,7 @@ class ImageFileStorageBase(ABC):
|
|||||||
image: PILImageType,
|
image: PILImageType,
|
||||||
image_name: str,
|
image_name: str,
|
||||||
metadata: Optional[MetadataField] = None,
|
metadata: Optional[MetadataField] = None,
|
||||||
workflow: Optional[WorkflowField] = None,
|
workflow: Optional[WorkflowWithoutID] = None,
|
||||||
thumbnail_size: int = 256,
|
thumbnail_size: int = 256,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Saves an image and a 256x256 WEBP thumbnail. Returns a tuple of the image name, thumbnail name, and created timestamp."""
|
"""Saves an image and a 256x256 WEBP thumbnail. Returns a tuple of the image name, thumbnail name, and created timestamp."""
|
||||||
@ -43,3 +44,8 @@ class ImageFileStorageBase(ABC):
|
|||||||
def delete(self, image_name: str) -> None:
|
def delete(self, image_name: str) -> None:
|
||||||
"""Deletes an image and its thumbnail (if one exists)."""
|
"""Deletes an image and its thumbnail (if one exists)."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_workflow(self, image_name: str) -> Optional[WorkflowWithoutID]:
|
||||||
|
"""Gets the workflow of an image."""
|
||||||
|
pass
|
||||||
|
@ -7,8 +7,9 @@ from PIL import Image, PngImagePlugin
|
|||||||
from PIL.Image import Image as PILImageType
|
from PIL.Image import Image as PILImageType
|
||||||
from send2trash import send2trash
|
from send2trash import send2trash
|
||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import MetadataField, WorkflowField
|
from invokeai.app.invocations.baseinvocation import MetadataField
|
||||||
from invokeai.app.services.invoker import Invoker
|
from invokeai.app.services.invoker import Invoker
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import WorkflowWithoutID
|
||||||
from invokeai.app.util.thumbnails import get_thumbnail_name, make_thumbnail
|
from invokeai.app.util.thumbnails import get_thumbnail_name, make_thumbnail
|
||||||
|
|
||||||
from .image_files_base import ImageFileStorageBase
|
from .image_files_base import ImageFileStorageBase
|
||||||
@ -56,7 +57,7 @@ class DiskImageFileStorage(ImageFileStorageBase):
|
|||||||
image: PILImageType,
|
image: PILImageType,
|
||||||
image_name: str,
|
image_name: str,
|
||||||
metadata: Optional[MetadataField] = None,
|
metadata: Optional[MetadataField] = None,
|
||||||
workflow: Optional[WorkflowField] = None,
|
workflow: Optional[WorkflowWithoutID] = None,
|
||||||
thumbnail_size: int = 256,
|
thumbnail_size: int = 256,
|
||||||
) -> None:
|
) -> None:
|
||||||
try:
|
try:
|
||||||
@ -64,12 +65,19 @@ class DiskImageFileStorage(ImageFileStorageBase):
|
|||||||
image_path = self.get_path(image_name)
|
image_path = self.get_path(image_name)
|
||||||
|
|
||||||
pnginfo = PngImagePlugin.PngInfo()
|
pnginfo = PngImagePlugin.PngInfo()
|
||||||
|
info_dict = {}
|
||||||
|
|
||||||
if metadata is not None:
|
if metadata is not None:
|
||||||
pnginfo.add_text("invokeai_metadata", metadata.model_dump_json())
|
metadata_json = metadata.model_dump_json()
|
||||||
|
info_dict["invokeai_metadata"] = metadata_json
|
||||||
|
pnginfo.add_text("invokeai_metadata", metadata_json)
|
||||||
if workflow is not None:
|
if workflow is not None:
|
||||||
pnginfo.add_text("invokeai_workflow", workflow.model_dump_json())
|
workflow_json = workflow.model_dump_json()
|
||||||
|
info_dict["invokeai_workflow"] = workflow_json
|
||||||
|
pnginfo.add_text("invokeai_workflow", workflow_json)
|
||||||
|
|
||||||
|
# When saving the image, the image object's info field is not populated. We need to set it
|
||||||
|
image.info = info_dict
|
||||||
image.save(
|
image.save(
|
||||||
image_path,
|
image_path,
|
||||||
"PNG",
|
"PNG",
|
||||||
@ -121,6 +129,13 @@ class DiskImageFileStorage(ImageFileStorageBase):
|
|||||||
path = path if isinstance(path, Path) else Path(path)
|
path = path if isinstance(path, Path) else Path(path)
|
||||||
return path.exists()
|
return path.exists()
|
||||||
|
|
||||||
|
def get_workflow(self, image_name: str) -> WorkflowWithoutID | None:
|
||||||
|
image = self.get(image_name)
|
||||||
|
workflow = image.info.get("invokeai_workflow", None)
|
||||||
|
if workflow is not None:
|
||||||
|
return WorkflowWithoutID.model_validate_json(workflow)
|
||||||
|
return None
|
||||||
|
|
||||||
def __validate_storage_folders(self) -> None:
|
def __validate_storage_folders(self) -> None:
|
||||||
"""Checks if the required output folders exist and create them if they don't"""
|
"""Checks if the required output folders exist and create them if they don't"""
|
||||||
folders: list[Path] = [self.__output_folder, self.__thumbnails_folder]
|
folders: list[Path] = [self.__output_folder, self.__thumbnails_folder]
|
||||||
|
@ -75,6 +75,7 @@ class ImageRecordStorageBase(ABC):
|
|||||||
image_category: ImageCategory,
|
image_category: ImageCategory,
|
||||||
width: int,
|
width: int,
|
||||||
height: int,
|
height: int,
|
||||||
|
has_workflow: bool,
|
||||||
is_intermediate: Optional[bool] = False,
|
is_intermediate: Optional[bool] = False,
|
||||||
starred: Optional[bool] = False,
|
starred: Optional[bool] = False,
|
||||||
session_id: Optional[str] = None,
|
session_id: Optional[str] = None,
|
||||||
|
@ -100,6 +100,7 @@ IMAGE_DTO_COLS = ", ".join(
|
|||||||
"height",
|
"height",
|
||||||
"session_id",
|
"session_id",
|
||||||
"node_id",
|
"node_id",
|
||||||
|
"has_workflow",
|
||||||
"is_intermediate",
|
"is_intermediate",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
@ -145,6 +146,7 @@ class ImageRecord(BaseModelExcludeNull):
|
|||||||
"""The node ID that generated this image, if it is a generated image."""
|
"""The node ID that generated this image, if it is a generated image."""
|
||||||
starred: bool = Field(description="Whether this image is starred.")
|
starred: bool = Field(description="Whether this image is starred.")
|
||||||
"""Whether this image is starred."""
|
"""Whether this image is starred."""
|
||||||
|
has_workflow: bool = Field(description="Whether this image has a workflow.")
|
||||||
|
|
||||||
|
|
||||||
class ImageRecordChanges(BaseModelExcludeNull, extra="allow"):
|
class ImageRecordChanges(BaseModelExcludeNull, extra="allow"):
|
||||||
@ -188,6 +190,7 @@ def deserialize_image_record(image_dict: dict) -> ImageRecord:
|
|||||||
deleted_at = image_dict.get("deleted_at", get_iso_timestamp())
|
deleted_at = image_dict.get("deleted_at", get_iso_timestamp())
|
||||||
is_intermediate = image_dict.get("is_intermediate", False)
|
is_intermediate = image_dict.get("is_intermediate", False)
|
||||||
starred = image_dict.get("starred", False)
|
starred = image_dict.get("starred", False)
|
||||||
|
has_workflow = image_dict.get("has_workflow", False)
|
||||||
|
|
||||||
return ImageRecord(
|
return ImageRecord(
|
||||||
image_name=image_name,
|
image_name=image_name,
|
||||||
@ -202,4 +205,5 @@ def deserialize_image_record(image_dict: dict) -> ImageRecord:
|
|||||||
deleted_at=deleted_at,
|
deleted_at=deleted_at,
|
||||||
is_intermediate=is_intermediate,
|
is_intermediate=is_intermediate,
|
||||||
starred=starred,
|
starred=starred,
|
||||||
|
has_workflow=has_workflow,
|
||||||
)
|
)
|
||||||
|
@ -5,7 +5,7 @@ from typing import Optional, Union, cast
|
|||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import MetadataField, MetadataFieldValidator
|
from invokeai.app.invocations.baseinvocation import MetadataField, MetadataFieldValidator
|
||||||
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
||||||
from invokeai.app.services.shared.sqlite import SqliteDatabase
|
from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase
|
||||||
|
|
||||||
from .image_records_base import ImageRecordStorageBase
|
from .image_records_base import ImageRecordStorageBase
|
||||||
from .image_records_common import (
|
from .image_records_common import (
|
||||||
@ -117,6 +117,16 @@ class SqliteImageRecordStorage(ImageRecordStorageBase):
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._cursor.execute("PRAGMA table_info(images)")
|
||||||
|
columns = [column[1] for column in self._cursor.fetchall()]
|
||||||
|
if "has_workflow" not in columns:
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
ALTER TABLE images
|
||||||
|
ADD COLUMN has_workflow BOOLEAN DEFAULT FALSE;
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
def get(self, image_name: str) -> ImageRecord:
|
def get(self, image_name: str) -> ImageRecord:
|
||||||
try:
|
try:
|
||||||
self._lock.acquire()
|
self._lock.acquire()
|
||||||
@ -408,6 +418,7 @@ class SqliteImageRecordStorage(ImageRecordStorageBase):
|
|||||||
image_category: ImageCategory,
|
image_category: ImageCategory,
|
||||||
width: int,
|
width: int,
|
||||||
height: int,
|
height: int,
|
||||||
|
has_workflow: bool,
|
||||||
is_intermediate: Optional[bool] = False,
|
is_intermediate: Optional[bool] = False,
|
||||||
starred: Optional[bool] = False,
|
starred: Optional[bool] = False,
|
||||||
session_id: Optional[str] = None,
|
session_id: Optional[str] = None,
|
||||||
@ -429,9 +440,10 @@ class SqliteImageRecordStorage(ImageRecordStorageBase):
|
|||||||
session_id,
|
session_id,
|
||||||
metadata,
|
metadata,
|
||||||
is_intermediate,
|
is_intermediate,
|
||||||
starred
|
starred,
|
||||||
|
has_workflow
|
||||||
)
|
)
|
||||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?);
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);
|
||||||
""",
|
""",
|
||||||
(
|
(
|
||||||
image_name,
|
image_name,
|
||||||
@ -444,6 +456,7 @@ class SqliteImageRecordStorage(ImageRecordStorageBase):
|
|||||||
metadata_json,
|
metadata_json,
|
||||||
is_intermediate,
|
is_intermediate,
|
||||||
starred,
|
starred,
|
||||||
|
has_workflow,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
self._conn.commit()
|
self._conn.commit()
|
||||||
|
@ -3,7 +3,7 @@ from typing import Callable, Optional
|
|||||||
|
|
||||||
from PIL.Image import Image as PILImageType
|
from PIL.Image import Image as PILImageType
|
||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import MetadataField, WorkflowField
|
from invokeai.app.invocations.baseinvocation import MetadataField
|
||||||
from invokeai.app.services.image_records.image_records_common import (
|
from invokeai.app.services.image_records.image_records_common import (
|
||||||
ImageCategory,
|
ImageCategory,
|
||||||
ImageRecord,
|
ImageRecord,
|
||||||
@ -12,6 +12,7 @@ from invokeai.app.services.image_records.image_records_common import (
|
|||||||
)
|
)
|
||||||
from invokeai.app.services.images.images_common import ImageDTO
|
from invokeai.app.services.images.images_common import ImageDTO
|
||||||
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import WorkflowWithoutID
|
||||||
|
|
||||||
|
|
||||||
class ImageServiceABC(ABC):
|
class ImageServiceABC(ABC):
|
||||||
@ -51,7 +52,7 @@ class ImageServiceABC(ABC):
|
|||||||
board_id: Optional[str] = None,
|
board_id: Optional[str] = None,
|
||||||
is_intermediate: Optional[bool] = False,
|
is_intermediate: Optional[bool] = False,
|
||||||
metadata: Optional[MetadataField] = None,
|
metadata: Optional[MetadataField] = None,
|
||||||
workflow: Optional[WorkflowField] = None,
|
workflow: Optional[WorkflowWithoutID] = None,
|
||||||
) -> ImageDTO:
|
) -> ImageDTO:
|
||||||
"""Creates an image, storing the file and its metadata."""
|
"""Creates an image, storing the file and its metadata."""
|
||||||
pass
|
pass
|
||||||
@ -85,6 +86,11 @@ class ImageServiceABC(ABC):
|
|||||||
"""Gets an image's metadata."""
|
"""Gets an image's metadata."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_workflow(self, image_name: str) -> Optional[WorkflowWithoutID]:
|
||||||
|
"""Gets an image's workflow."""
|
||||||
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_path(self, image_name: str, thumbnail: bool = False) -> str:
|
def get_path(self, image_name: str, thumbnail: bool = False) -> str:
|
||||||
"""Gets an image's path."""
|
"""Gets an image's path."""
|
||||||
|
@ -24,11 +24,6 @@ class ImageDTO(ImageRecord, ImageUrlsDTO):
|
|||||||
default=None, description="The id of the board the image belongs to, if one exists."
|
default=None, description="The id of the board the image belongs to, if one exists."
|
||||||
)
|
)
|
||||||
"""The id of the board the image belongs to, if one exists."""
|
"""The id of the board the image belongs to, if one exists."""
|
||||||
workflow_id: Optional[str] = Field(
|
|
||||||
default=None,
|
|
||||||
description="The workflow that generated this image.",
|
|
||||||
)
|
|
||||||
"""The workflow that generated this image."""
|
|
||||||
|
|
||||||
|
|
||||||
def image_record_to_dto(
|
def image_record_to_dto(
|
||||||
@ -36,7 +31,6 @@ def image_record_to_dto(
|
|||||||
image_url: str,
|
image_url: str,
|
||||||
thumbnail_url: str,
|
thumbnail_url: str,
|
||||||
board_id: Optional[str],
|
board_id: Optional[str],
|
||||||
workflow_id: Optional[str],
|
|
||||||
) -> ImageDTO:
|
) -> ImageDTO:
|
||||||
"""Converts an image record to an image DTO."""
|
"""Converts an image record to an image DTO."""
|
||||||
return ImageDTO(
|
return ImageDTO(
|
||||||
@ -44,5 +38,4 @@ def image_record_to_dto(
|
|||||||
image_url=image_url,
|
image_url=image_url,
|
||||||
thumbnail_url=thumbnail_url,
|
thumbnail_url=thumbnail_url,
|
||||||
board_id=board_id,
|
board_id=board_id,
|
||||||
workflow_id=workflow_id,
|
|
||||||
)
|
)
|
||||||
|
@ -2,9 +2,10 @@ from typing import Optional
|
|||||||
|
|
||||||
from PIL.Image import Image as PILImageType
|
from PIL.Image import Image as PILImageType
|
||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import MetadataField, WorkflowField
|
from invokeai.app.invocations.baseinvocation import MetadataField
|
||||||
from invokeai.app.services.invoker import Invoker
|
from invokeai.app.services.invoker import Invoker
|
||||||
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
from invokeai.app.services.shared.pagination import OffsetPaginatedResults
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import WorkflowWithoutID
|
||||||
|
|
||||||
from ..image_files.image_files_common import (
|
from ..image_files.image_files_common import (
|
||||||
ImageFileDeleteException,
|
ImageFileDeleteException,
|
||||||
@ -42,7 +43,7 @@ class ImageService(ImageServiceABC):
|
|||||||
board_id: Optional[str] = None,
|
board_id: Optional[str] = None,
|
||||||
is_intermediate: Optional[bool] = False,
|
is_intermediate: Optional[bool] = False,
|
||||||
metadata: Optional[MetadataField] = None,
|
metadata: Optional[MetadataField] = None,
|
||||||
workflow: Optional[WorkflowField] = None,
|
workflow: Optional[WorkflowWithoutID] = None,
|
||||||
) -> ImageDTO:
|
) -> ImageDTO:
|
||||||
if image_origin not in ResourceOrigin:
|
if image_origin not in ResourceOrigin:
|
||||||
raise InvalidOriginException
|
raise InvalidOriginException
|
||||||
@ -55,12 +56,6 @@ class ImageService(ImageServiceABC):
|
|||||||
(width, height) = image.size
|
(width, height) = image.size
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if workflow is not None:
|
|
||||||
created_workflow = self.__invoker.services.workflow_records.create(workflow)
|
|
||||||
workflow_id = created_workflow.model_dump()["id"]
|
|
||||||
else:
|
|
||||||
workflow_id = None
|
|
||||||
|
|
||||||
# TODO: Consider using a transaction here to ensure consistency between storage and database
|
# TODO: Consider using a transaction here to ensure consistency between storage and database
|
||||||
self.__invoker.services.image_records.save(
|
self.__invoker.services.image_records.save(
|
||||||
# Non-nullable fields
|
# Non-nullable fields
|
||||||
@ -69,6 +64,7 @@ class ImageService(ImageServiceABC):
|
|||||||
image_category=image_category,
|
image_category=image_category,
|
||||||
width=width,
|
width=width,
|
||||||
height=height,
|
height=height,
|
||||||
|
has_workflow=workflow is not None,
|
||||||
# Meta fields
|
# Meta fields
|
||||||
is_intermediate=is_intermediate,
|
is_intermediate=is_intermediate,
|
||||||
# Nullable fields
|
# Nullable fields
|
||||||
@ -78,8 +74,6 @@ class ImageService(ImageServiceABC):
|
|||||||
)
|
)
|
||||||
if board_id is not None:
|
if board_id is not None:
|
||||||
self.__invoker.services.board_image_records.add_image_to_board(board_id=board_id, image_name=image_name)
|
self.__invoker.services.board_image_records.add_image_to_board(board_id=board_id, image_name=image_name)
|
||||||
if workflow_id is not None:
|
|
||||||
self.__invoker.services.workflow_image_records.create(workflow_id=workflow_id, image_name=image_name)
|
|
||||||
self.__invoker.services.image_files.save(
|
self.__invoker.services.image_files.save(
|
||||||
image_name=image_name, image=image, metadata=metadata, workflow=workflow
|
image_name=image_name, image=image, metadata=metadata, workflow=workflow
|
||||||
)
|
)
|
||||||
@ -143,7 +137,6 @@ class ImageService(ImageServiceABC):
|
|||||||
image_url=self.__invoker.services.urls.get_image_url(image_name),
|
image_url=self.__invoker.services.urls.get_image_url(image_name),
|
||||||
thumbnail_url=self.__invoker.services.urls.get_image_url(image_name, True),
|
thumbnail_url=self.__invoker.services.urls.get_image_url(image_name, True),
|
||||||
board_id=self.__invoker.services.board_image_records.get_board_for_image(image_name),
|
board_id=self.__invoker.services.board_image_records.get_board_for_image(image_name),
|
||||||
workflow_id=self.__invoker.services.workflow_image_records.get_workflow_for_image(image_name),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return image_dto
|
return image_dto
|
||||||
@ -164,18 +157,15 @@ class ImageService(ImageServiceABC):
|
|||||||
self.__invoker.services.logger.error("Problem getting image DTO")
|
self.__invoker.services.logger.error("Problem getting image DTO")
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
def get_workflow(self, image_name: str) -> Optional[WorkflowField]:
|
def get_workflow(self, image_name: str) -> Optional[WorkflowWithoutID]:
|
||||||
try:
|
try:
|
||||||
workflow_id = self.__invoker.services.workflow_image_records.get_workflow_for_image(image_name)
|
return self.__invoker.services.image_files.get_workflow(image_name)
|
||||||
if workflow_id is None:
|
except ImageFileNotFoundException:
|
||||||
return None
|
self.__invoker.services.logger.error("Image file not found")
|
||||||
return self.__invoker.services.workflow_records.get(workflow_id)
|
raise
|
||||||
except ImageRecordNotFoundException:
|
except Exception:
|
||||||
self.__invoker.services.logger.error("Image record not found")
|
self.__invoker.services.logger.error("Problem getting image workflow")
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
|
||||||
self.__invoker.services.logger.error("Problem getting image DTO")
|
|
||||||
raise e
|
|
||||||
|
|
||||||
def get_path(self, image_name: str, thumbnail: bool = False) -> str:
|
def get_path(self, image_name: str, thumbnail: bool = False) -> str:
|
||||||
try:
|
try:
|
||||||
@ -223,7 +213,6 @@ class ImageService(ImageServiceABC):
|
|||||||
image_url=self.__invoker.services.urls.get_image_url(r.image_name),
|
image_url=self.__invoker.services.urls.get_image_url(r.image_name),
|
||||||
thumbnail_url=self.__invoker.services.urls.get_image_url(r.image_name, True),
|
thumbnail_url=self.__invoker.services.urls.get_image_url(r.image_name, True),
|
||||||
board_id=self.__invoker.services.board_image_records.get_board_for_image(r.image_name),
|
board_id=self.__invoker.services.board_image_records.get_board_for_image(r.image_name),
|
||||||
workflow_id=self.__invoker.services.workflow_image_records.get_workflow_for_image(r.image_name),
|
|
||||||
)
|
)
|
||||||
for r in results.items
|
for r in results.items
|
||||||
]
|
]
|
||||||
|
@ -108,6 +108,7 @@ class DefaultInvocationProcessor(InvocationProcessorABC):
|
|||||||
queue_item_id=queue_item.session_queue_item_id,
|
queue_item_id=queue_item.session_queue_item_id,
|
||||||
queue_id=queue_item.session_queue_id,
|
queue_id=queue_item.session_queue_id,
|
||||||
queue_batch_id=queue_item.session_queue_batch_id,
|
queue_batch_id=queue_item.session_queue_batch_id,
|
||||||
|
workflow=queue_item.workflow,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -178,6 +179,7 @@ class DefaultInvocationProcessor(InvocationProcessorABC):
|
|||||||
session_queue_item_id=queue_item.session_queue_item_id,
|
session_queue_item_id=queue_item.session_queue_item_id,
|
||||||
session_queue_id=queue_item.session_queue_id,
|
session_queue_id=queue_item.session_queue_id,
|
||||||
graph_execution_state=graph_execution_state,
|
graph_execution_state=graph_execution_state,
|
||||||
|
workflow=queue_item.workflow,
|
||||||
invoke_all=True,
|
invoke_all=True,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
@ -1,9 +1,12 @@
|
|||||||
# Copyright (c) 2022 Kyle Schouviller (https://github.com/kyle0654)
|
# Copyright (c) 2022 Kyle Schouviller (https://github.com/kyle0654)
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import WorkflowWithoutID
|
||||||
|
|
||||||
|
|
||||||
class InvocationQueueItem(BaseModel):
|
class InvocationQueueItem(BaseModel):
|
||||||
graph_execution_state_id: str = Field(description="The ID of the graph execution state")
|
graph_execution_state_id: str = Field(description="The ID of the graph execution state")
|
||||||
@ -15,5 +18,6 @@ class InvocationQueueItem(BaseModel):
|
|||||||
session_queue_batch_id: str = Field(
|
session_queue_batch_id: str = Field(
|
||||||
description="The ID of the session batch from which this invocation queue item came"
|
description="The ID of the session batch from which this invocation queue item came"
|
||||||
)
|
)
|
||||||
|
workflow: Optional[WorkflowWithoutID] = Field(description="The workflow associated with this queue item")
|
||||||
invoke_all: bool = Field(default=False)
|
invoke_all: bool = Field(default=False)
|
||||||
timestamp: float = Field(default_factory=time.time)
|
timestamp: float = Field(default_factory=time.time)
|
||||||
|
@ -28,7 +28,6 @@ if TYPE_CHECKING:
|
|||||||
from .session_queue.session_queue_base import SessionQueueBase
|
from .session_queue.session_queue_base import SessionQueueBase
|
||||||
from .shared.graph import GraphExecutionState, LibraryGraph
|
from .shared.graph import GraphExecutionState, LibraryGraph
|
||||||
from .urls.urls_base import UrlServiceBase
|
from .urls.urls_base import UrlServiceBase
|
||||||
from .workflow_image_records.workflow_image_records_base import WorkflowImageRecordsStorageBase
|
|
||||||
from .workflow_records.workflow_records_base import WorkflowRecordsStorageBase
|
from .workflow_records.workflow_records_base import WorkflowRecordsStorageBase
|
||||||
|
|
||||||
|
|
||||||
@ -59,7 +58,6 @@ class InvocationServices:
|
|||||||
invocation_cache: "InvocationCacheBase"
|
invocation_cache: "InvocationCacheBase"
|
||||||
names: "NameServiceBase"
|
names: "NameServiceBase"
|
||||||
urls: "UrlServiceBase"
|
urls: "UrlServiceBase"
|
||||||
workflow_image_records: "WorkflowImageRecordsStorageBase"
|
|
||||||
workflow_records: "WorkflowRecordsStorageBase"
|
workflow_records: "WorkflowRecordsStorageBase"
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
@ -87,7 +85,6 @@ class InvocationServices:
|
|||||||
invocation_cache: "InvocationCacheBase",
|
invocation_cache: "InvocationCacheBase",
|
||||||
names: "NameServiceBase",
|
names: "NameServiceBase",
|
||||||
urls: "UrlServiceBase",
|
urls: "UrlServiceBase",
|
||||||
workflow_image_records: "WorkflowImageRecordsStorageBase",
|
|
||||||
workflow_records: "WorkflowRecordsStorageBase",
|
workflow_records: "WorkflowRecordsStorageBase",
|
||||||
):
|
):
|
||||||
self.board_images = board_images
|
self.board_images = board_images
|
||||||
@ -113,5 +110,4 @@ class InvocationServices:
|
|||||||
self.invocation_cache = invocation_cache
|
self.invocation_cache = invocation_cache
|
||||||
self.names = names
|
self.names = names
|
||||||
self.urls = urls
|
self.urls = urls
|
||||||
self.workflow_image_records = workflow_image_records
|
|
||||||
self.workflow_records = workflow_records
|
self.workflow_records = workflow_records
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import WorkflowWithoutID
|
||||||
|
|
||||||
from .invocation_queue.invocation_queue_common import InvocationQueueItem
|
from .invocation_queue.invocation_queue_common import InvocationQueueItem
|
||||||
from .invocation_services import InvocationServices
|
from .invocation_services import InvocationServices
|
||||||
from .shared.graph import Graph, GraphExecutionState
|
from .shared.graph import Graph, GraphExecutionState
|
||||||
@ -22,6 +24,7 @@ class Invoker:
|
|||||||
session_queue_item_id: int,
|
session_queue_item_id: int,
|
||||||
session_queue_batch_id: str,
|
session_queue_batch_id: str,
|
||||||
graph_execution_state: GraphExecutionState,
|
graph_execution_state: GraphExecutionState,
|
||||||
|
workflow: Optional[WorkflowWithoutID] = None,
|
||||||
invoke_all: bool = False,
|
invoke_all: bool = False,
|
||||||
) -> Optional[str]:
|
) -> Optional[str]:
|
||||||
"""Determines the next node to invoke and enqueues it, preparing if needed.
|
"""Determines the next node to invoke and enqueues it, preparing if needed.
|
||||||
@ -43,6 +46,7 @@ class Invoker:
|
|||||||
session_queue_batch_id=session_queue_batch_id,
|
session_queue_batch_id=session_queue_batch_id,
|
||||||
graph_execution_state_id=graph_execution_state.id,
|
graph_execution_state_id=graph_execution_state.id,
|
||||||
invocation_id=invocation.id,
|
invocation_id=invocation.id,
|
||||||
|
workflow=workflow,
|
||||||
invoke_all=invoke_all,
|
invoke_all=invoke_all,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -5,7 +5,7 @@ from typing import Generic, Optional, TypeVar, get_args
|
|||||||
from pydantic import BaseModel, TypeAdapter
|
from pydantic import BaseModel, TypeAdapter
|
||||||
|
|
||||||
from invokeai.app.services.shared.pagination import PaginatedResults
|
from invokeai.app.services.shared.pagination import PaginatedResults
|
||||||
from invokeai.app.services.shared.sqlite import SqliteDatabase
|
from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase
|
||||||
|
|
||||||
from .item_storage_base import ItemStorageABC
|
from .item_storage_base import ItemStorageABC
|
||||||
|
|
||||||
|
@ -52,7 +52,7 @@ from invokeai.backend.model_manager.config import (
|
|||||||
ModelType,
|
ModelType,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..shared.sqlite import SqliteDatabase
|
from ..shared.sqlite.sqlite_database import SqliteDatabase
|
||||||
from .model_records_base import (
|
from .model_records_base import (
|
||||||
CONFIG_FILE_VERSION,
|
CONFIG_FILE_VERSION,
|
||||||
DuplicateModelException,
|
DuplicateModelException,
|
||||||
|
@ -114,6 +114,7 @@ class DefaultSessionProcessor(SessionProcessorBase):
|
|||||||
session_queue_id=queue_item.queue_id,
|
session_queue_id=queue_item.queue_id,
|
||||||
session_queue_item_id=queue_item.item_id,
|
session_queue_item_id=queue_item.item_id,
|
||||||
graph_execution_state=queue_item.session,
|
graph_execution_state=queue_item.session,
|
||||||
|
workflow=queue_item.workflow,
|
||||||
invoke_all=True,
|
invoke_all=True,
|
||||||
)
|
)
|
||||||
queue_item = None
|
queue_item = None
|
||||||
|
@ -8,6 +8,10 @@ from pydantic_core import to_jsonable_python
|
|||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import BaseInvocation
|
from invokeai.app.invocations.baseinvocation import BaseInvocation
|
||||||
from invokeai.app.services.shared.graph import Graph, GraphExecutionState, NodeNotFoundError
|
from invokeai.app.services.shared.graph import Graph, GraphExecutionState, NodeNotFoundError
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import (
|
||||||
|
WorkflowWithoutID,
|
||||||
|
WorkflowWithoutIDValidator,
|
||||||
|
)
|
||||||
from invokeai.app.util.misc import uuid_string
|
from invokeai.app.util.misc import uuid_string
|
||||||
|
|
||||||
# region Errors
|
# region Errors
|
||||||
@ -66,6 +70,9 @@ class Batch(BaseModel):
|
|||||||
batch_id: str = Field(default_factory=uuid_string, description="The ID of the batch")
|
batch_id: str = Field(default_factory=uuid_string, description="The ID of the batch")
|
||||||
data: Optional[BatchDataCollection] = Field(default=None, description="The batch data collection.")
|
data: Optional[BatchDataCollection] = Field(default=None, description="The batch data collection.")
|
||||||
graph: Graph = Field(description="The graph to initialize the session with")
|
graph: Graph = Field(description="The graph to initialize the session with")
|
||||||
|
workflow: Optional[WorkflowWithoutID] = Field(
|
||||||
|
default=None, description="The workflow to initialize the session with"
|
||||||
|
)
|
||||||
runs: int = Field(
|
runs: int = Field(
|
||||||
default=1, ge=1, description="Int stating how many times to iterate through all possible batch indices"
|
default=1, ge=1, description="Int stating how many times to iterate through all possible batch indices"
|
||||||
)
|
)
|
||||||
@ -164,6 +171,14 @@ def get_session(queue_item_dict: dict) -> GraphExecutionState:
|
|||||||
return session
|
return session
|
||||||
|
|
||||||
|
|
||||||
|
def get_workflow(queue_item_dict: dict) -> Optional[WorkflowWithoutID]:
|
||||||
|
workflow_raw = queue_item_dict.get("workflow", None)
|
||||||
|
if workflow_raw is not None:
|
||||||
|
workflow = WorkflowWithoutIDValidator.validate_json(workflow_raw, strict=False)
|
||||||
|
return workflow
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class SessionQueueItemWithoutGraph(BaseModel):
|
class SessionQueueItemWithoutGraph(BaseModel):
|
||||||
"""Session queue item without the full graph. Used for serialization."""
|
"""Session queue item without the full graph. Used for serialization."""
|
||||||
|
|
||||||
@ -213,12 +228,16 @@ class SessionQueueItemDTO(SessionQueueItemWithoutGraph):
|
|||||||
|
|
||||||
class SessionQueueItem(SessionQueueItemWithoutGraph):
|
class SessionQueueItem(SessionQueueItemWithoutGraph):
|
||||||
session: GraphExecutionState = Field(description="The fully-populated session to be executed")
|
session: GraphExecutionState = Field(description="The fully-populated session to be executed")
|
||||||
|
workflow: Optional[WorkflowWithoutID] = Field(
|
||||||
|
default=None, description="The workflow associated with this queue item"
|
||||||
|
)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def queue_item_from_dict(cls, queue_item_dict: dict) -> "SessionQueueItem":
|
def queue_item_from_dict(cls, queue_item_dict: dict) -> "SessionQueueItem":
|
||||||
# must parse these manually
|
# must parse these manually
|
||||||
queue_item_dict["field_values"] = get_field_values(queue_item_dict)
|
queue_item_dict["field_values"] = get_field_values(queue_item_dict)
|
||||||
queue_item_dict["session"] = get_session(queue_item_dict)
|
queue_item_dict["session"] = get_session(queue_item_dict)
|
||||||
|
queue_item_dict["workflow"] = get_workflow(queue_item_dict)
|
||||||
return SessionQueueItem(**queue_item_dict)
|
return SessionQueueItem(**queue_item_dict)
|
||||||
|
|
||||||
model_config = ConfigDict(
|
model_config = ConfigDict(
|
||||||
@ -334,7 +353,7 @@ def populate_graph(graph: Graph, node_field_values: Iterable[NodeFieldValue]) ->
|
|||||||
|
|
||||||
def create_session_nfv_tuples(
|
def create_session_nfv_tuples(
|
||||||
batch: Batch, maximum: int
|
batch: Batch, maximum: int
|
||||||
) -> Generator[tuple[GraphExecutionState, list[NodeFieldValue]], None, None]:
|
) -> Generator[tuple[GraphExecutionState, list[NodeFieldValue], Optional[WorkflowWithoutID]], None, None]:
|
||||||
"""
|
"""
|
||||||
Create all graph permutations from the given batch data and graph. Yields tuples
|
Create all graph permutations from the given batch data and graph. Yields tuples
|
||||||
of the form (graph, batch_data_items) where batch_data_items is the list of BatchDataItems
|
of the form (graph, batch_data_items) where batch_data_items is the list of BatchDataItems
|
||||||
@ -365,7 +384,7 @@ def create_session_nfv_tuples(
|
|||||||
return
|
return
|
||||||
flat_node_field_values = list(chain.from_iterable(d))
|
flat_node_field_values = list(chain.from_iterable(d))
|
||||||
graph = populate_graph(batch.graph, flat_node_field_values)
|
graph = populate_graph(batch.graph, flat_node_field_values)
|
||||||
yield (GraphExecutionState(graph=graph), flat_node_field_values)
|
yield (GraphExecutionState(graph=graph), flat_node_field_values, batch.workflow)
|
||||||
count += 1
|
count += 1
|
||||||
|
|
||||||
|
|
||||||
@ -391,12 +410,14 @@ def calc_session_count(batch: Batch) -> int:
|
|||||||
class SessionQueueValueToInsert(NamedTuple):
|
class SessionQueueValueToInsert(NamedTuple):
|
||||||
"""A tuple of values to insert into the session_queue table"""
|
"""A tuple of values to insert into the session_queue table"""
|
||||||
|
|
||||||
|
# Careful with the ordering of this - it must match the insert statement
|
||||||
queue_id: str # queue_id
|
queue_id: str # queue_id
|
||||||
session: str # session json
|
session: str # session json
|
||||||
session_id: str # session_id
|
session_id: str # session_id
|
||||||
batch_id: str # batch_id
|
batch_id: str # batch_id
|
||||||
field_values: Optional[str] # field_values json
|
field_values: Optional[str] # field_values json
|
||||||
priority: int # priority
|
priority: int # priority
|
||||||
|
workflow: Optional[str] # workflow json
|
||||||
|
|
||||||
|
|
||||||
ValuesToInsert: TypeAlias = list[SessionQueueValueToInsert]
|
ValuesToInsert: TypeAlias = list[SessionQueueValueToInsert]
|
||||||
@ -404,7 +425,7 @@ ValuesToInsert: TypeAlias = list[SessionQueueValueToInsert]
|
|||||||
|
|
||||||
def prepare_values_to_insert(queue_id: str, batch: Batch, priority: int, max_new_queue_items: int) -> ValuesToInsert:
|
def prepare_values_to_insert(queue_id: str, batch: Batch, priority: int, max_new_queue_items: int) -> ValuesToInsert:
|
||||||
values_to_insert: ValuesToInsert = []
|
values_to_insert: ValuesToInsert = []
|
||||||
for session, field_values in create_session_nfv_tuples(batch, max_new_queue_items):
|
for session, field_values, workflow in create_session_nfv_tuples(batch, max_new_queue_items):
|
||||||
# sessions must have unique id
|
# sessions must have unique id
|
||||||
session.id = uuid_string()
|
session.id = uuid_string()
|
||||||
values_to_insert.append(
|
values_to_insert.append(
|
||||||
@ -416,6 +437,7 @@ def prepare_values_to_insert(queue_id: str, batch: Batch, priority: int, max_new
|
|||||||
# must use pydantic_encoder bc field_values is a list of models
|
# must use pydantic_encoder bc field_values is a list of models
|
||||||
json.dumps(field_values, default=to_jsonable_python) if field_values else None, # field_values (json)
|
json.dumps(field_values, default=to_jsonable_python) if field_values else None, # field_values (json)
|
||||||
priority, # priority
|
priority, # priority
|
||||||
|
json.dumps(workflow, default=to_jsonable_python) if workflow else None, # workflow (json)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return values_to_insert
|
return values_to_insert
|
||||||
|
@ -28,7 +28,7 @@ from invokeai.app.services.session_queue.session_queue_common import (
|
|||||||
prepare_values_to_insert,
|
prepare_values_to_insert,
|
||||||
)
|
)
|
||||||
from invokeai.app.services.shared.pagination import CursorPaginatedResults
|
from invokeai.app.services.shared.pagination import CursorPaginatedResults
|
||||||
from invokeai.app.services.shared.sqlite import SqliteDatabase
|
from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase
|
||||||
|
|
||||||
|
|
||||||
class SqliteSessionQueue(SessionQueueBase):
|
class SqliteSessionQueue(SessionQueueBase):
|
||||||
@ -199,6 +199,15 @@ class SqliteSessionQueue(SessionQueueBase):
|
|||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self.__cursor.execute("PRAGMA table_info(session_queue)")
|
||||||
|
columns = [column[1] for column in self.__cursor.fetchall()]
|
||||||
|
if "workflow" not in columns:
|
||||||
|
self.__cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
ALTER TABLE session_queue ADD COLUMN workflow TEXT;
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
self.__conn.commit()
|
self.__conn.commit()
|
||||||
except Exception:
|
except Exception:
|
||||||
self.__conn.rollback()
|
self.__conn.rollback()
|
||||||
@ -281,8 +290,8 @@ class SqliteSessionQueue(SessionQueueBase):
|
|||||||
|
|
||||||
self.__cursor.executemany(
|
self.__cursor.executemany(
|
||||||
"""--sql
|
"""--sql
|
||||||
INSERT INTO session_queue (queue_id, session, session_id, batch_id, field_values, priority)
|
INSERT INTO session_queue (queue_id, session, session_id, batch_id, field_values, priority, workflow)
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||||
""",
|
""",
|
||||||
values_to_insert,
|
values_to_insert,
|
||||||
)
|
)
|
||||||
|
10
invokeai/app/services/shared/sqlite/sqlite_common.py
Normal file
10
invokeai/app/services/shared/sqlite/sqlite_common.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from invokeai.app.util.metaenum import MetaEnum
|
||||||
|
|
||||||
|
sqlite_memory = ":memory:"
|
||||||
|
|
||||||
|
|
||||||
|
class SQLiteDirection(str, Enum, metaclass=MetaEnum):
|
||||||
|
Ascending = "ASC"
|
||||||
|
Descending = "DESC"
|
@ -4,8 +4,7 @@ from logging import Logger
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from invokeai.app.services.config import InvokeAIAppConfig
|
from invokeai.app.services.config import InvokeAIAppConfig
|
||||||
|
from invokeai.app.services.shared.sqlite.sqlite_common import sqlite_memory
|
||||||
sqlite_memory = ":memory:"
|
|
||||||
|
|
||||||
|
|
||||||
class SqliteDatabase:
|
class SqliteDatabase:
|
||||||
@ -32,19 +31,17 @@ class SqliteDatabase:
|
|||||||
self.conn.execute("PRAGMA foreign_keys = ON;")
|
self.conn.execute("PRAGMA foreign_keys = ON;")
|
||||||
|
|
||||||
def clean(self) -> None:
|
def clean(self) -> None:
|
||||||
try:
|
with self.lock:
|
||||||
if self.db_path == sqlite_memory:
|
try:
|
||||||
return
|
if self.db_path == sqlite_memory:
|
||||||
initial_db_size = Path(self.db_path).stat().st_size
|
return
|
||||||
self.lock.acquire()
|
initial_db_size = Path(self.db_path).stat().st_size
|
||||||
self.conn.execute("VACUUM;")
|
self.conn.execute("VACUUM;")
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
final_db_size = Path(self.db_path).stat().st_size
|
final_db_size = Path(self.db_path).stat().st_size
|
||||||
freed_space_in_mb = round((initial_db_size - final_db_size) / 1024 / 1024, 2)
|
freed_space_in_mb = round((initial_db_size - final_db_size) / 1024 / 1024, 2)
|
||||||
if freed_space_in_mb > 0:
|
if freed_space_in_mb > 0:
|
||||||
self._logger.info(f"Cleaned database (freed {freed_space_in_mb}MB)")
|
self._logger.info(f"Cleaned database (freed {freed_space_in_mb}MB)")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._logger.error(f"Error cleaning database: {e}")
|
self._logger.error(f"Error cleaning database: {e}")
|
||||||
raise e
|
raise
|
||||||
finally:
|
|
||||||
self.lock.release()
|
|
@ -1,23 +0,0 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
|
|
||||||
class WorkflowImageRecordsStorageBase(ABC):
|
|
||||||
"""Abstract base class for the one-to-many workflow-image relationship record storage."""
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def create(
|
|
||||||
self,
|
|
||||||
workflow_id: str,
|
|
||||||
image_name: str,
|
|
||||||
) -> None:
|
|
||||||
"""Creates a workflow-image record."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_workflow_for_image(
|
|
||||||
self,
|
|
||||||
image_name: str,
|
|
||||||
) -> Optional[str]:
|
|
||||||
"""Gets an image's workflow id, if it has one."""
|
|
||||||
pass
|
|
@ -1,122 +0,0 @@
|
|||||||
import sqlite3
|
|
||||||
import threading
|
|
||||||
from typing import Optional, cast
|
|
||||||
|
|
||||||
from invokeai.app.services.shared.sqlite import SqliteDatabase
|
|
||||||
from invokeai.app.services.workflow_image_records.workflow_image_records_base import WorkflowImageRecordsStorageBase
|
|
||||||
|
|
||||||
|
|
||||||
class SqliteWorkflowImageRecordsStorage(WorkflowImageRecordsStorageBase):
|
|
||||||
"""SQLite implementation of WorkflowImageRecordsStorageBase."""
|
|
||||||
|
|
||||||
_conn: sqlite3.Connection
|
|
||||||
_cursor: sqlite3.Cursor
|
|
||||||
_lock: threading.RLock
|
|
||||||
|
|
||||||
def __init__(self, db: SqliteDatabase) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self._lock = db.lock
|
|
||||||
self._conn = db.conn
|
|
||||||
self._cursor = self._conn.cursor()
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._lock.acquire()
|
|
||||||
self._create_tables()
|
|
||||||
self._conn.commit()
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
def _create_tables(self) -> None:
|
|
||||||
# Create the `workflow_images` junction table.
|
|
||||||
self._cursor.execute(
|
|
||||||
"""--sql
|
|
||||||
CREATE TABLE IF NOT EXISTS workflow_images (
|
|
||||||
workflow_id TEXT NOT NULL,
|
|
||||||
image_name TEXT NOT NULL,
|
|
||||||
created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
|
|
||||||
-- updated via trigger
|
|
||||||
updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
|
|
||||||
-- Soft delete, currently unused
|
|
||||||
deleted_at DATETIME,
|
|
||||||
-- enforce one-to-many relationship between workflows and images using PK
|
|
||||||
-- (we can extend this to many-to-many later)
|
|
||||||
PRIMARY KEY (image_name),
|
|
||||||
FOREIGN KEY (workflow_id) REFERENCES workflows (workflow_id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (image_name) REFERENCES images (image_name) ON DELETE CASCADE
|
|
||||||
);
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add index for workflow id
|
|
||||||
self._cursor.execute(
|
|
||||||
"""--sql
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_workflow_images_workflow_id ON workflow_images (workflow_id);
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add index for workflow id, sorted by created_at
|
|
||||||
self._cursor.execute(
|
|
||||||
"""--sql
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_workflow_images_workflow_id_created_at ON workflow_images (workflow_id, created_at);
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add trigger for `updated_at`.
|
|
||||||
self._cursor.execute(
|
|
||||||
"""--sql
|
|
||||||
CREATE TRIGGER IF NOT EXISTS tg_workflow_images_updated_at
|
|
||||||
AFTER UPDATE
|
|
||||||
ON workflow_images FOR EACH ROW
|
|
||||||
BEGIN
|
|
||||||
UPDATE workflow_images SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')
|
|
||||||
WHERE workflow_id = old.workflow_id AND image_name = old.image_name;
|
|
||||||
END;
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
|
|
||||||
def create(
|
|
||||||
self,
|
|
||||||
workflow_id: str,
|
|
||||||
image_name: str,
|
|
||||||
) -> None:
|
|
||||||
"""Creates a workflow-image record."""
|
|
||||||
try:
|
|
||||||
self._lock.acquire()
|
|
||||||
self._cursor.execute(
|
|
||||||
"""--sql
|
|
||||||
INSERT INTO workflow_images (workflow_id, image_name)
|
|
||||||
VALUES (?, ?);
|
|
||||||
""",
|
|
||||||
(workflow_id, image_name),
|
|
||||||
)
|
|
||||||
self._conn.commit()
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
self._conn.rollback()
|
|
||||||
raise e
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
def get_workflow_for_image(
|
|
||||||
self,
|
|
||||||
image_name: str,
|
|
||||||
) -> Optional[str]:
|
|
||||||
"""Gets an image's workflow id, if it has one."""
|
|
||||||
try:
|
|
||||||
self._lock.acquire()
|
|
||||||
self._cursor.execute(
|
|
||||||
"""--sql
|
|
||||||
SELECT workflow_id
|
|
||||||
FROM workflow_images
|
|
||||||
WHERE image_name = ?;
|
|
||||||
""",
|
|
||||||
(image_name,),
|
|
||||||
)
|
|
||||||
result = self._cursor.fetchone()
|
|
||||||
if result is None:
|
|
||||||
return None
|
|
||||||
return cast(str, result[0])
|
|
||||||
except sqlite3.Error as e:
|
|
||||||
self._conn.rollback()
|
|
||||||
raise e
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
@ -0,0 +1,17 @@
|
|||||||
|
# Default Workflows
|
||||||
|
|
||||||
|
Workflows placed in this directory will be synced to the `workflow_library` as
|
||||||
|
_default workflows_ on app startup.
|
||||||
|
|
||||||
|
- Default workflows are not editable by users. If they are loaded and saved,
|
||||||
|
they will save as a copy of the default workflow.
|
||||||
|
- Default workflows must have the `meta.category` property set to `"default"`.
|
||||||
|
An exception will be raised during sync if this is not set correctly.
|
||||||
|
- Default workflows appear on the "Default Workflows" tab of the Workflow
|
||||||
|
Library.
|
||||||
|
|
||||||
|
After adding or updating default workflows, you **must** start the app up and
|
||||||
|
load them to ensure:
|
||||||
|
|
||||||
|
- The workflow loads without warning or errors
|
||||||
|
- The workflow runs successfully
|
@ -0,0 +1,798 @@
|
|||||||
|
{
|
||||||
|
"name": "Text to Image - SD1.5",
|
||||||
|
"author": "InvokeAI",
|
||||||
|
"description": "Sample text to image workflow for Stable Diffusion 1.5/2",
|
||||||
|
"version": "1.1.0",
|
||||||
|
"contact": "invoke@invoke.ai",
|
||||||
|
"tags": "text2image, SD1.5, SD2, default",
|
||||||
|
"notes": "",
|
||||||
|
"exposedFields": [
|
||||||
|
{
|
||||||
|
"nodeId": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
||||||
|
"fieldName": "model"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nodeId": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
|
||||||
|
"fieldName": "prompt"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nodeId": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
|
||||||
|
"fieldName": "prompt"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nodeId": "55705012-79b9-4aac-9f26-c0b10309785b",
|
||||||
|
"fieldName": "width"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nodeId": "55705012-79b9-4aac-9f26-c0b10309785b",
|
||||||
|
"fieldName": "height"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"meta": {
|
||||||
|
"category": "default",
|
||||||
|
"version": "2.0.0"
|
||||||
|
},
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
|
||||||
|
"type": "compel",
|
||||||
|
"label": "Negative Compel Prompt",
|
||||||
|
"isOpen": true,
|
||||||
|
"notes": "",
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true,
|
||||||
|
"version": "1.0.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"id": "7739aff6-26cb-4016-8897-5a1fb2305e4e",
|
||||||
|
"name": "prompt",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "Negative Prompt",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "StringField"
|
||||||
|
},
|
||||||
|
"value": ""
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"id": "48d23dce-a6ae-472a-9f8c-22a714ea5ce0",
|
||||||
|
"name": "clip",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "ClipField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"conditioning": {
|
||||||
|
"id": "37cf3a9d-f6b7-4b64-8ff6-2558c5ecc447",
|
||||||
|
"name": "conditioning",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "ConditioningField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": 320,
|
||||||
|
"height": 259,
|
||||||
|
"position": {
|
||||||
|
"x": 1000,
|
||||||
|
"y": 350
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "55705012-79b9-4aac-9f26-c0b10309785b",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "55705012-79b9-4aac-9f26-c0b10309785b",
|
||||||
|
"type": "noise",
|
||||||
|
"label": "",
|
||||||
|
"isOpen": true,
|
||||||
|
"notes": "",
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true,
|
||||||
|
"version": "1.0.1",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"inputs": {
|
||||||
|
"seed": {
|
||||||
|
"id": "6431737c-918a-425d-a3b4-5d57e2f35d4d",
|
||||||
|
"name": "seed",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
},
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"width": {
|
||||||
|
"id": "38fc5b66-fe6e-47c8-bba9-daf58e454ed7",
|
||||||
|
"name": "width",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
},
|
||||||
|
"value": 512
|
||||||
|
},
|
||||||
|
"height": {
|
||||||
|
"id": "16298330-e2bf-4872-a514-d6923df53cbb",
|
||||||
|
"name": "height",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
},
|
||||||
|
"value": 512
|
||||||
|
},
|
||||||
|
"use_cpu": {
|
||||||
|
"id": "c7c436d3-7a7a-4e76-91e4-c6deb271623c",
|
||||||
|
"name": "use_cpu",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "BooleanField"
|
||||||
|
},
|
||||||
|
"value": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"noise": {
|
||||||
|
"id": "50f650dc-0184-4e23-a927-0497a96fe954",
|
||||||
|
"name": "noise",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "LatentsField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": {
|
||||||
|
"id": "bb8a452b-133d-42d1-ae4a-3843d7e4109a",
|
||||||
|
"name": "width",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"height": {
|
||||||
|
"id": "35cfaa12-3b8b-4b7a-a884-327ff3abddd9",
|
||||||
|
"name": "height",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": 320,
|
||||||
|
"height": 388,
|
||||||
|
"position": {
|
||||||
|
"x": 600,
|
||||||
|
"y": 325
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
||||||
|
"type": "main_model_loader",
|
||||||
|
"label": "",
|
||||||
|
"isOpen": true,
|
||||||
|
"notes": "",
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true,
|
||||||
|
"version": "1.0.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"inputs": {
|
||||||
|
"model": {
|
||||||
|
"id": "993eabd2-40fd-44fe-bce7-5d0c7075ddab",
|
||||||
|
"name": "model",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "MainModelField"
|
||||||
|
},
|
||||||
|
"value": {
|
||||||
|
"model_name": "stable-diffusion-v1-5",
|
||||||
|
"base_model": "sd-1",
|
||||||
|
"model_type": "main"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"unet": {
|
||||||
|
"id": "5c18c9db-328d-46d0-8cb9-143391c410be",
|
||||||
|
"name": "unet",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "UNetField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"id": "6effcac0-ec2f-4bf5-a49e-a2c29cf921f4",
|
||||||
|
"name": "clip",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "ClipField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"id": "57683ba3-f5f5-4f58-b9a2-4b83dacad4a1",
|
||||||
|
"name": "vae",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "VaeField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": 320,
|
||||||
|
"height": 226,
|
||||||
|
"position": {
|
||||||
|
"x": 600,
|
||||||
|
"y": 25
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
|
||||||
|
"type": "compel",
|
||||||
|
"label": "Positive Compel Prompt",
|
||||||
|
"isOpen": true,
|
||||||
|
"notes": "",
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true,
|
||||||
|
"version": "1.0.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"inputs": {
|
||||||
|
"prompt": {
|
||||||
|
"id": "7739aff6-26cb-4016-8897-5a1fb2305e4e",
|
||||||
|
"name": "prompt",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "Positive Prompt",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "StringField"
|
||||||
|
},
|
||||||
|
"value": "Super cute tiger cub, national geographic award-winning photograph"
|
||||||
|
},
|
||||||
|
"clip": {
|
||||||
|
"id": "48d23dce-a6ae-472a-9f8c-22a714ea5ce0",
|
||||||
|
"name": "clip",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "ClipField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"conditioning": {
|
||||||
|
"id": "37cf3a9d-f6b7-4b64-8ff6-2558c5ecc447",
|
||||||
|
"name": "conditioning",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "ConditioningField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": 320,
|
||||||
|
"height": 259,
|
||||||
|
"position": {
|
||||||
|
"x": 1000,
|
||||||
|
"y": 25
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "ea94bc37-d995-4a83-aa99-4af42479f2f2",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "ea94bc37-d995-4a83-aa99-4af42479f2f2",
|
||||||
|
"type": "rand_int",
|
||||||
|
"label": "Random Seed",
|
||||||
|
"isOpen": false,
|
||||||
|
"notes": "",
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": false,
|
||||||
|
"version": "1.0.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"inputs": {
|
||||||
|
"low": {
|
||||||
|
"id": "3ec65a37-60ba-4b6c-a0b2-553dd7a84b84",
|
||||||
|
"name": "low",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
},
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"high": {
|
||||||
|
"id": "085f853a-1a5f-494d-8bec-e4ba29a3f2d1",
|
||||||
|
"name": "high",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
},
|
||||||
|
"value": 2147483647
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"value": {
|
||||||
|
"id": "812ade4d-7699-4261-b9fc-a6c9d2ab55ee",
|
||||||
|
"name": "value",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": 320,
|
||||||
|
"height": 32,
|
||||||
|
"position": {
|
||||||
|
"x": 600,
|
||||||
|
"y": 275
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "eea2702a-19fb-45b5-9d75-56b4211ec03c",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "eea2702a-19fb-45b5-9d75-56b4211ec03c",
|
||||||
|
"type": "denoise_latents",
|
||||||
|
"label": "",
|
||||||
|
"isOpen": true,
|
||||||
|
"notes": "",
|
||||||
|
"isIntermediate": true,
|
||||||
|
"useCache": true,
|
||||||
|
"version": "1.5.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"inputs": {
|
||||||
|
"positive_conditioning": {
|
||||||
|
"id": "90b7f4f8-ada7-4028-8100-d2e54f192052",
|
||||||
|
"name": "positive_conditioning",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "ConditioningField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"negative_conditioning": {
|
||||||
|
"id": "9393779e-796c-4f64-b740-902a1177bf53",
|
||||||
|
"name": "negative_conditioning",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "ConditioningField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"noise": {
|
||||||
|
"id": "8e17f1e5-4f98-40b1-b7f4-86aeeb4554c1",
|
||||||
|
"name": "noise",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "LatentsField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"steps": {
|
||||||
|
"id": "9b63302d-6bd2-42c9-ac13-9b1afb51af88",
|
||||||
|
"name": "steps",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
},
|
||||||
|
"value": 50
|
||||||
|
},
|
||||||
|
"cfg_scale": {
|
||||||
|
"id": "87dd04d3-870e-49e1-98bf-af003a810109",
|
||||||
|
"name": "cfg_scale",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": true,
|
||||||
|
"name": "FloatField"
|
||||||
|
},
|
||||||
|
"value": 7.5
|
||||||
|
},
|
||||||
|
"denoising_start": {
|
||||||
|
"id": "f369d80f-4931-4740-9bcd-9f0620719fab",
|
||||||
|
"name": "denoising_start",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "FloatField"
|
||||||
|
},
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"denoising_end": {
|
||||||
|
"id": "747d10e5-6f02-445c-994c-0604d814de8c",
|
||||||
|
"name": "denoising_end",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "FloatField"
|
||||||
|
},
|
||||||
|
"value": 1
|
||||||
|
},
|
||||||
|
"scheduler": {
|
||||||
|
"id": "1de84a4e-3a24-4ec8-862b-16ce49633b9b",
|
||||||
|
"name": "scheduler",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "SchedulerField"
|
||||||
|
},
|
||||||
|
"value": "unipc"
|
||||||
|
},
|
||||||
|
"unet": {
|
||||||
|
"id": "ffa6fef4-3ce2-4bdb-9296-9a834849489b",
|
||||||
|
"name": "unet",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "UNetField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"control": {
|
||||||
|
"id": "077b64cb-34be-4fcc-83f2-e399807a02bd",
|
||||||
|
"name": "control",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": true,
|
||||||
|
"name": "ControlField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ip_adapter": {
|
||||||
|
"id": "1d6948f7-3a65-4a65-a20c-768b287251aa",
|
||||||
|
"name": "ip_adapter",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": true,
|
||||||
|
"name": "IPAdapterField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"t2i_adapter": {
|
||||||
|
"id": "75e67b09-952f-4083-aaf4-6b804d690412",
|
||||||
|
"name": "t2i_adapter",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": true,
|
||||||
|
"name": "T2IAdapterField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cfg_rescale_multiplier": {
|
||||||
|
"id": "9101f0a6-5fe0-4826-b7b3-47e5d506826c",
|
||||||
|
"name": "cfg_rescale_multiplier",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "FloatField"
|
||||||
|
},
|
||||||
|
"value": 0
|
||||||
|
},
|
||||||
|
"latents": {
|
||||||
|
"id": "334d4ba3-5a99-4195-82c5-86fb3f4f7d43",
|
||||||
|
"name": "latents",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "LatentsField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"denoise_mask": {
|
||||||
|
"id": "0d3dbdbf-b014-4e95-8b18-ff2ff9cb0bfa",
|
||||||
|
"name": "denoise_mask",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "DenoiseMaskField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"latents": {
|
||||||
|
"id": "70fa5bbc-0c38-41bb-861a-74d6d78d2f38",
|
||||||
|
"name": "latents",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "LatentsField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": {
|
||||||
|
"id": "98ee0e6c-82aa-4e8f-8be5-dc5f00ee47f0",
|
||||||
|
"name": "width",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"height": {
|
||||||
|
"id": "e8cb184a-5e1a-47c8-9695-4b8979564f5d",
|
||||||
|
"name": "height",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": 320,
|
||||||
|
"height": 703,
|
||||||
|
"position": {
|
||||||
|
"x": 1400,
|
||||||
|
"y": 25
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
|
||||||
|
"type": "invocation",
|
||||||
|
"data": {
|
||||||
|
"id": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
|
||||||
|
"type": "l2i",
|
||||||
|
"label": "",
|
||||||
|
"isOpen": true,
|
||||||
|
"notes": "",
|
||||||
|
"isIntermediate": false,
|
||||||
|
"useCache": true,
|
||||||
|
"version": "1.2.0",
|
||||||
|
"nodePack": "invokeai",
|
||||||
|
"inputs": {
|
||||||
|
"metadata": {
|
||||||
|
"id": "ab375f12-0042-4410-9182-29e30db82c85",
|
||||||
|
"name": "metadata",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "MetadataField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"latents": {
|
||||||
|
"id": "3a7e7efd-bff5-47d7-9d48-615127afee78",
|
||||||
|
"name": "latents",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "LatentsField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vae": {
|
||||||
|
"id": "a1f5f7a1-0795-4d58-b036-7820c0b0ef2b",
|
||||||
|
"name": "vae",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "VaeField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tiled": {
|
||||||
|
"id": "da52059a-0cee-4668-942f-519aa794d739",
|
||||||
|
"name": "tiled",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "BooleanField"
|
||||||
|
},
|
||||||
|
"value": false
|
||||||
|
},
|
||||||
|
"fp32": {
|
||||||
|
"id": "c4841df3-b24e-4140-be3b-ccd454c2522c",
|
||||||
|
"name": "fp32",
|
||||||
|
"fieldKind": "input",
|
||||||
|
"label": "",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "BooleanField"
|
||||||
|
},
|
||||||
|
"value": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": {
|
||||||
|
"image": {
|
||||||
|
"id": "72d667d0-cf85-459d-abf2-28bd8b823fe7",
|
||||||
|
"name": "image",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "ImageField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": {
|
||||||
|
"id": "c8c907d8-1066-49d1-b9a6-83bdcd53addc",
|
||||||
|
"name": "width",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"height": {
|
||||||
|
"id": "230f359c-b4ea-436c-b372-332d7dcdca85",
|
||||||
|
"name": "height",
|
||||||
|
"fieldKind": "output",
|
||||||
|
"type": {
|
||||||
|
"isCollection": false,
|
||||||
|
"isCollectionOrScalar": false,
|
||||||
|
"name": "IntegerField"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"width": 320,
|
||||||
|
"height": 266,
|
||||||
|
"position": {
|
||||||
|
"x": 1800,
|
||||||
|
"y": 25
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"edges": [
|
||||||
|
{
|
||||||
|
"id": "reactflow__edge-ea94bc37-d995-4a83-aa99-4af42479f2f2value-55705012-79b9-4aac-9f26-c0b10309785bseed",
|
||||||
|
"source": "ea94bc37-d995-4a83-aa99-4af42479f2f2",
|
||||||
|
"target": "55705012-79b9-4aac-9f26-c0b10309785b",
|
||||||
|
"type": "default",
|
||||||
|
"sourceHandle": "value",
|
||||||
|
"targetHandle": "seed"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reactflow__edge-c8d55139-f380-4695-b7f2-8b3d1e1e3db8clip-7d8bf987-284f-413a-b2fd-d825445a5d6cclip",
|
||||||
|
"source": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
||||||
|
"target": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
|
||||||
|
"type": "default",
|
||||||
|
"sourceHandle": "clip",
|
||||||
|
"targetHandle": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reactflow__edge-c8d55139-f380-4695-b7f2-8b3d1e1e3db8clip-93dc02a4-d05b-48ed-b99c-c9b616af3402clip",
|
||||||
|
"source": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
||||||
|
"target": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
|
||||||
|
"type": "default",
|
||||||
|
"sourceHandle": "clip",
|
||||||
|
"targetHandle": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reactflow__edge-55705012-79b9-4aac-9f26-c0b10309785bnoise-eea2702a-19fb-45b5-9d75-56b4211ec03cnoise",
|
||||||
|
"source": "55705012-79b9-4aac-9f26-c0b10309785b",
|
||||||
|
"target": "eea2702a-19fb-45b5-9d75-56b4211ec03c",
|
||||||
|
"type": "default",
|
||||||
|
"sourceHandle": "noise",
|
||||||
|
"targetHandle": "noise"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reactflow__edge-7d8bf987-284f-413a-b2fd-d825445a5d6cconditioning-eea2702a-19fb-45b5-9d75-56b4211ec03cpositive_conditioning",
|
||||||
|
"source": "7d8bf987-284f-413a-b2fd-d825445a5d6c",
|
||||||
|
"target": "eea2702a-19fb-45b5-9d75-56b4211ec03c",
|
||||||
|
"type": "default",
|
||||||
|
"sourceHandle": "conditioning",
|
||||||
|
"targetHandle": "positive_conditioning"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reactflow__edge-93dc02a4-d05b-48ed-b99c-c9b616af3402conditioning-eea2702a-19fb-45b5-9d75-56b4211ec03cnegative_conditioning",
|
||||||
|
"source": "93dc02a4-d05b-48ed-b99c-c9b616af3402",
|
||||||
|
"target": "eea2702a-19fb-45b5-9d75-56b4211ec03c",
|
||||||
|
"type": "default",
|
||||||
|
"sourceHandle": "conditioning",
|
||||||
|
"targetHandle": "negative_conditioning"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reactflow__edge-c8d55139-f380-4695-b7f2-8b3d1e1e3db8unet-eea2702a-19fb-45b5-9d75-56b4211ec03cunet",
|
||||||
|
"source": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
||||||
|
"target": "eea2702a-19fb-45b5-9d75-56b4211ec03c",
|
||||||
|
"type": "default",
|
||||||
|
"sourceHandle": "unet",
|
||||||
|
"targetHandle": "unet"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reactflow__edge-eea2702a-19fb-45b5-9d75-56b4211ec03clatents-58c957f5-0d01-41fc-a803-b2bbf0413d4flatents",
|
||||||
|
"source": "eea2702a-19fb-45b5-9d75-56b4211ec03c",
|
||||||
|
"target": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
|
||||||
|
"type": "default",
|
||||||
|
"sourceHandle": "latents",
|
||||||
|
"targetHandle": "latents"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reactflow__edge-c8d55139-f380-4695-b7f2-8b3d1e1e3db8vae-58c957f5-0d01-41fc-a803-b2bbf0413d4fvae",
|
||||||
|
"source": "c8d55139-f380-4695-b7f2-8b3d1e1e3db8",
|
||||||
|
"target": "58c957f5-0d01-41fc-a803-b2bbf0413d4f",
|
||||||
|
"type": "default",
|
||||||
|
"sourceHandle": "vae",
|
||||||
|
"targetHandle": "vae"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
@ -1,17 +1,50 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import WorkflowField
|
from invokeai.app.services.shared.pagination import PaginatedResults
|
||||||
|
from invokeai.app.services.shared.sqlite.sqlite_common import SQLiteDirection
|
||||||
|
from invokeai.app.services.workflow_records.workflow_records_common import (
|
||||||
|
Workflow,
|
||||||
|
WorkflowCategory,
|
||||||
|
WorkflowRecordDTO,
|
||||||
|
WorkflowRecordListItemDTO,
|
||||||
|
WorkflowRecordOrderBy,
|
||||||
|
WorkflowWithoutID,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class WorkflowRecordsStorageBase(ABC):
|
class WorkflowRecordsStorageBase(ABC):
|
||||||
"""Base class for workflow storage services."""
|
"""Base class for workflow storage services."""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get(self, workflow_id: str) -> WorkflowField:
|
def get(self, workflow_id: str) -> WorkflowRecordDTO:
|
||||||
"""Get workflow by id."""
|
"""Get workflow by id."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def create(self, workflow: WorkflowField) -> WorkflowField:
|
def create(self, workflow: WorkflowWithoutID) -> WorkflowRecordDTO:
|
||||||
"""Creates a workflow."""
|
"""Creates a workflow."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def update(self, workflow: Workflow) -> WorkflowRecordDTO:
|
||||||
|
"""Updates a workflow."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def delete(self, workflow_id: str) -> None:
|
||||||
|
"""Deletes a workflow."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_many(
|
||||||
|
self,
|
||||||
|
page: int,
|
||||||
|
per_page: int,
|
||||||
|
order_by: WorkflowRecordOrderBy,
|
||||||
|
direction: SQLiteDirection,
|
||||||
|
category: WorkflowCategory,
|
||||||
|
query: Optional[str],
|
||||||
|
) -> PaginatedResults[WorkflowRecordListItemDTO]:
|
||||||
|
"""Gets many workflows."""
|
||||||
|
pass
|
||||||
|
@ -1,2 +1,105 @@
|
|||||||
|
import datetime
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Union
|
||||||
|
|
||||||
|
import semver
|
||||||
|
from pydantic import BaseModel, Field, JsonValue, TypeAdapter, field_validator
|
||||||
|
|
||||||
|
from invokeai.app.util.metaenum import MetaEnum
|
||||||
|
from invokeai.app.util.misc import uuid_string
|
||||||
|
|
||||||
|
__workflow_meta_version__ = semver.Version.parse("1.0.0")
|
||||||
|
|
||||||
|
|
||||||
|
class ExposedField(BaseModel):
|
||||||
|
nodeId: str
|
||||||
|
fieldName: str
|
||||||
|
|
||||||
|
|
||||||
class WorkflowNotFoundError(Exception):
|
class WorkflowNotFoundError(Exception):
|
||||||
"""Raised when a workflow is not found"""
|
"""Raised when a workflow is not found"""
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowRecordOrderBy(str, Enum, metaclass=MetaEnum):
|
||||||
|
"""The order by options for workflow records"""
|
||||||
|
|
||||||
|
CreatedAt = "created_at"
|
||||||
|
UpdatedAt = "updated_at"
|
||||||
|
OpenedAt = "opened_at"
|
||||||
|
Name = "name"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowCategory(str, Enum, metaclass=MetaEnum):
|
||||||
|
User = "user"
|
||||||
|
Default = "default"
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowMeta(BaseModel):
|
||||||
|
version: str = Field(description="The version of the workflow schema.")
|
||||||
|
category: WorkflowCategory = Field(
|
||||||
|
default=WorkflowCategory.User, description="The category of the workflow (user or default)."
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator("version")
|
||||||
|
def validate_version(cls, version: str):
|
||||||
|
try:
|
||||||
|
semver.Version.parse(version)
|
||||||
|
return version
|
||||||
|
except Exception:
|
||||||
|
raise ValueError(f"Invalid workflow meta version: {version}")
|
||||||
|
|
||||||
|
def to_semver(self) -> semver.Version:
|
||||||
|
return semver.Version.parse(self.version)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowWithoutID(BaseModel):
|
||||||
|
name: str = Field(description="The name of the workflow.")
|
||||||
|
author: str = Field(description="The author of the workflow.")
|
||||||
|
description: str = Field(description="The description of the workflow.")
|
||||||
|
version: str = Field(description="The version of the workflow.")
|
||||||
|
contact: str = Field(description="The contact of the workflow.")
|
||||||
|
tags: str = Field(description="The tags of the workflow.")
|
||||||
|
notes: str = Field(description="The notes of the workflow.")
|
||||||
|
exposedFields: list[ExposedField] = Field(description="The exposed fields of the workflow.")
|
||||||
|
meta: WorkflowMeta = Field(description="The meta of the workflow.")
|
||||||
|
# TODO: nodes and edges are very loosely typed
|
||||||
|
nodes: list[dict[str, JsonValue]] = Field(description="The nodes of the workflow.")
|
||||||
|
edges: list[dict[str, JsonValue]] = Field(description="The edges of the workflow.")
|
||||||
|
|
||||||
|
|
||||||
|
WorkflowWithoutIDValidator = TypeAdapter(WorkflowWithoutID)
|
||||||
|
|
||||||
|
|
||||||
|
class Workflow(WorkflowWithoutID):
|
||||||
|
id: str = Field(default_factory=uuid_string, description="The id of the workflow.")
|
||||||
|
|
||||||
|
|
||||||
|
WorkflowValidator = TypeAdapter(Workflow)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowRecordDTOBase(BaseModel):
|
||||||
|
workflow_id: str = Field(description="The id of the workflow.")
|
||||||
|
name: str = Field(description="The name of the workflow.")
|
||||||
|
created_at: Union[datetime.datetime, str] = Field(description="The created timestamp of the workflow.")
|
||||||
|
updated_at: Union[datetime.datetime, str] = Field(description="The updated timestamp of the workflow.")
|
||||||
|
opened_at: Union[datetime.datetime, str] = Field(description="The opened timestamp of the workflow.")
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowRecordDTO(WorkflowRecordDTOBase):
|
||||||
|
workflow: Workflow = Field(description="The workflow.")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: dict[str, Any]) -> "WorkflowRecordDTO":
|
||||||
|
data["workflow"] = WorkflowValidator.validate_json(data.get("workflow", ""))
|
||||||
|
return WorkflowRecordDTOValidator.validate_python(data)
|
||||||
|
|
||||||
|
|
||||||
|
WorkflowRecordDTOValidator = TypeAdapter(WorkflowRecordDTO)
|
||||||
|
|
||||||
|
|
||||||
|
class WorkflowRecordListItemDTO(WorkflowRecordDTOBase):
|
||||||
|
description: str = Field(description="The description of the workflow.")
|
||||||
|
category: WorkflowCategory = Field(description="The description of the workflow.")
|
||||||
|
|
||||||
|
|
||||||
|
WorkflowRecordListItemDTOValidator = TypeAdapter(WorkflowRecordListItemDTO)
|
||||||
|
@ -1,20 +1,25 @@
|
|||||||
import sqlite3
|
from pathlib import Path
|
||||||
import threading
|
from typing import Optional
|
||||||
|
|
||||||
from invokeai.app.invocations.baseinvocation import WorkflowField, WorkflowFieldValidator
|
|
||||||
from invokeai.app.services.invoker import Invoker
|
from invokeai.app.services.invoker import Invoker
|
||||||
from invokeai.app.services.shared.sqlite import SqliteDatabase
|
from invokeai.app.services.shared.pagination import PaginatedResults
|
||||||
|
from invokeai.app.services.shared.sqlite.sqlite_common import SQLiteDirection
|
||||||
|
from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase
|
||||||
from invokeai.app.services.workflow_records.workflow_records_base import WorkflowRecordsStorageBase
|
from invokeai.app.services.workflow_records.workflow_records_base import WorkflowRecordsStorageBase
|
||||||
from invokeai.app.services.workflow_records.workflow_records_common import WorkflowNotFoundError
|
from invokeai.app.services.workflow_records.workflow_records_common import (
|
||||||
from invokeai.app.util.misc import uuid_string
|
Workflow,
|
||||||
|
WorkflowCategory,
|
||||||
|
WorkflowNotFoundError,
|
||||||
|
WorkflowRecordDTO,
|
||||||
|
WorkflowRecordListItemDTO,
|
||||||
|
WorkflowRecordListItemDTOValidator,
|
||||||
|
WorkflowRecordOrderBy,
|
||||||
|
WorkflowValidator,
|
||||||
|
WorkflowWithoutID,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class SqliteWorkflowRecordsStorage(WorkflowRecordsStorageBase):
|
class SqliteWorkflowRecordsStorage(WorkflowRecordsStorageBase):
|
||||||
_invoker: Invoker
|
|
||||||
_conn: sqlite3.Connection
|
|
||||||
_cursor: sqlite3.Cursor
|
|
||||||
_lock: threading.RLock
|
|
||||||
|
|
||||||
def __init__(self, db: SqliteDatabase) -> None:
|
def __init__(self, db: SqliteDatabase) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self._lock = db.lock
|
self._lock = db.lock
|
||||||
@ -24,14 +29,25 @@ class SqliteWorkflowRecordsStorage(WorkflowRecordsStorageBase):
|
|||||||
|
|
||||||
def start(self, invoker: Invoker) -> None:
|
def start(self, invoker: Invoker) -> None:
|
||||||
self._invoker = invoker
|
self._invoker = invoker
|
||||||
|
self._sync_default_workflows()
|
||||||
|
|
||||||
def get(self, workflow_id: str) -> WorkflowField:
|
def get(self, workflow_id: str) -> WorkflowRecordDTO:
|
||||||
|
"""Gets a workflow by ID. Updates the opened_at column."""
|
||||||
try:
|
try:
|
||||||
self._lock.acquire()
|
self._lock.acquire()
|
||||||
self._cursor.execute(
|
self._cursor.execute(
|
||||||
"""--sql
|
"""--sql
|
||||||
SELECT workflow
|
UPDATE workflow_library
|
||||||
FROM workflows
|
SET opened_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')
|
||||||
|
WHERE workflow_id = ?;
|
||||||
|
""",
|
||||||
|
(workflow_id,),
|
||||||
|
)
|
||||||
|
self._conn.commit()
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
SELECT workflow_id, workflow, name, created_at, updated_at, opened_at
|
||||||
|
FROM workflow_library
|
||||||
WHERE workflow_id = ?;
|
WHERE workflow_id = ?;
|
||||||
""",
|
""",
|
||||||
(workflow_id,),
|
(workflow_id,),
|
||||||
@ -39,25 +55,28 @@ class SqliteWorkflowRecordsStorage(WorkflowRecordsStorageBase):
|
|||||||
row = self._cursor.fetchone()
|
row = self._cursor.fetchone()
|
||||||
if row is None:
|
if row is None:
|
||||||
raise WorkflowNotFoundError(f"Workflow with id {workflow_id} not found")
|
raise WorkflowNotFoundError(f"Workflow with id {workflow_id} not found")
|
||||||
return WorkflowFieldValidator.validate_json(row[0])
|
return WorkflowRecordDTO.from_dict(dict(row))
|
||||||
except Exception:
|
except Exception:
|
||||||
self._conn.rollback()
|
self._conn.rollback()
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
self._lock.release()
|
self._lock.release()
|
||||||
|
|
||||||
def create(self, workflow: WorkflowField) -> WorkflowField:
|
def create(self, workflow: WorkflowWithoutID) -> WorkflowRecordDTO:
|
||||||
try:
|
try:
|
||||||
# workflows do not have ids until they are saved
|
# Only user workflows may be created by this method
|
||||||
workflow_id = uuid_string()
|
assert workflow.meta.category is WorkflowCategory.User
|
||||||
workflow.root["id"] = workflow_id
|
workflow_with_id = WorkflowValidator.validate_python(workflow.model_dump())
|
||||||
self._lock.acquire()
|
self._lock.acquire()
|
||||||
self._cursor.execute(
|
self._cursor.execute(
|
||||||
"""--sql
|
"""--sql
|
||||||
INSERT INTO workflows(workflow)
|
INSERT OR IGNORE INTO workflow_library (
|
||||||
VALUES (?);
|
workflow_id,
|
||||||
|
workflow
|
||||||
|
)
|
||||||
|
VALUES (?, ?);
|
||||||
""",
|
""",
|
||||||
(workflow.model_dump_json(),),
|
(workflow_with_id.id, workflow_with_id.model_dump_json()),
|
||||||
)
|
)
|
||||||
self._conn.commit()
|
self._conn.commit()
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -65,35 +84,231 @@ class SqliteWorkflowRecordsStorage(WorkflowRecordsStorageBase):
|
|||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
self._lock.release()
|
self._lock.release()
|
||||||
return self.get(workflow_id)
|
return self.get(workflow_with_id.id)
|
||||||
|
|
||||||
|
def update(self, workflow: Workflow) -> WorkflowRecordDTO:
|
||||||
|
try:
|
||||||
|
self._lock.acquire()
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
UPDATE workflow_library
|
||||||
|
SET workflow = ?
|
||||||
|
WHERE workflow_id = ? AND category = 'user';
|
||||||
|
""",
|
||||||
|
(workflow.model_dump_json(), workflow.id),
|
||||||
|
)
|
||||||
|
self._conn.commit()
|
||||||
|
except Exception:
|
||||||
|
self._conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
return self.get(workflow.id)
|
||||||
|
|
||||||
|
def delete(self, workflow_id: str) -> None:
|
||||||
|
try:
|
||||||
|
self._lock.acquire()
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
DELETE from workflow_library
|
||||||
|
WHERE workflow_id = ? AND category = 'user';
|
||||||
|
""",
|
||||||
|
(workflow_id,),
|
||||||
|
)
|
||||||
|
self._conn.commit()
|
||||||
|
except Exception:
|
||||||
|
self._conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_many(
|
||||||
|
self,
|
||||||
|
page: int,
|
||||||
|
per_page: int,
|
||||||
|
order_by: WorkflowRecordOrderBy,
|
||||||
|
direction: SQLiteDirection,
|
||||||
|
category: WorkflowCategory,
|
||||||
|
query: Optional[str] = None,
|
||||||
|
) -> PaginatedResults[WorkflowRecordListItemDTO]:
|
||||||
|
try:
|
||||||
|
self._lock.acquire()
|
||||||
|
# sanitize!
|
||||||
|
assert order_by in WorkflowRecordOrderBy
|
||||||
|
assert direction in SQLiteDirection
|
||||||
|
assert category in WorkflowCategory
|
||||||
|
count_query = "SELECT COUNT(*) FROM workflow_library WHERE category = ?"
|
||||||
|
main_query = """
|
||||||
|
SELECT
|
||||||
|
workflow_id,
|
||||||
|
category,
|
||||||
|
name,
|
||||||
|
description,
|
||||||
|
created_at,
|
||||||
|
updated_at,
|
||||||
|
opened_at
|
||||||
|
FROM workflow_library
|
||||||
|
WHERE category = ?
|
||||||
|
"""
|
||||||
|
main_params: list[int | str] = [category.value]
|
||||||
|
count_params: list[int | str] = [category.value]
|
||||||
|
stripped_query = query.strip() if query else None
|
||||||
|
if stripped_query:
|
||||||
|
wildcard_query = "%" + stripped_query + "%"
|
||||||
|
main_query += " AND name LIKE ? OR description LIKE ? "
|
||||||
|
count_query += " AND name LIKE ? OR description LIKE ?;"
|
||||||
|
main_params.extend([wildcard_query, wildcard_query])
|
||||||
|
count_params.extend([wildcard_query, wildcard_query])
|
||||||
|
|
||||||
|
main_query += f" ORDER BY {order_by.value} {direction.value} LIMIT ? OFFSET ?;"
|
||||||
|
main_params.extend([per_page, page * per_page])
|
||||||
|
self._cursor.execute(main_query, main_params)
|
||||||
|
rows = self._cursor.fetchall()
|
||||||
|
workflows = [WorkflowRecordListItemDTOValidator.validate_python(dict(row)) for row in rows]
|
||||||
|
|
||||||
|
self._cursor.execute(count_query, count_params)
|
||||||
|
total = self._cursor.fetchone()[0]
|
||||||
|
pages = int(total / per_page) + 1
|
||||||
|
|
||||||
|
return PaginatedResults(
|
||||||
|
items=workflows,
|
||||||
|
page=page,
|
||||||
|
per_page=per_page,
|
||||||
|
pages=pages,
|
||||||
|
total=total,
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
self._conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
|
def _sync_default_workflows(self) -> None:
|
||||||
|
"""Syncs default workflows to the database. Internal use only."""
|
||||||
|
|
||||||
|
"""
|
||||||
|
An enhancement might be to only update workflows that have changed. This would require stable
|
||||||
|
default workflow IDs, and properly incrementing the workflow version.
|
||||||
|
|
||||||
|
It's much simpler to just replace them all with whichever workflows are in the directory.
|
||||||
|
|
||||||
|
The downside is that the `updated_at` and `opened_at` timestamps for default workflows are
|
||||||
|
meaningless, as they are overwritten every time the server starts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
self._lock.acquire()
|
||||||
|
workflows: list[Workflow] = []
|
||||||
|
workflows_dir = Path(__file__).parent / Path("default_workflows")
|
||||||
|
workflow_paths = workflows_dir.glob("*.json")
|
||||||
|
for path in workflow_paths:
|
||||||
|
bytes_ = path.read_bytes()
|
||||||
|
workflow = WorkflowValidator.validate_json(bytes_)
|
||||||
|
workflows.append(workflow)
|
||||||
|
# Only default workflows may be managed by this method
|
||||||
|
assert all(w.meta.category is WorkflowCategory.Default for w in workflows)
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
DELETE FROM workflow_library
|
||||||
|
WHERE category = 'default';
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
for w in workflows:
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
INSERT OR REPLACE INTO workflow_library (
|
||||||
|
workflow_id,
|
||||||
|
workflow
|
||||||
|
)
|
||||||
|
VALUES (?, ?);
|
||||||
|
""",
|
||||||
|
(w.id, w.model_dump_json()),
|
||||||
|
)
|
||||||
|
self._conn.commit()
|
||||||
|
except Exception:
|
||||||
|
self._conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._lock.release()
|
||||||
|
|
||||||
def _create_tables(self) -> None:
|
def _create_tables(self) -> None:
|
||||||
try:
|
try:
|
||||||
self._lock.acquire()
|
self._lock.acquire()
|
||||||
self._cursor.execute(
|
self._cursor.execute(
|
||||||
"""--sql
|
"""--sql
|
||||||
CREATE TABLE IF NOT EXISTS workflows (
|
CREATE TABLE IF NOT EXISTS workflow_library (
|
||||||
|
workflow_id TEXT NOT NULL PRIMARY KEY,
|
||||||
workflow TEXT NOT NULL,
|
workflow TEXT NOT NULL,
|
||||||
workflow_id TEXT GENERATED ALWAYS AS (json_extract(workflow, '$.id')) VIRTUAL NOT NULL UNIQUE, -- gets implicit index
|
|
||||||
created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
|
created_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
|
||||||
updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')) -- updated via trigger
|
-- updated via trigger
|
||||||
|
updated_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
|
||||||
|
-- updated manually when retrieving workflow
|
||||||
|
opened_at DATETIME NOT NULL DEFAULT(STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')),
|
||||||
|
-- Generated columns, needed for indexing and searching
|
||||||
|
category TEXT GENERATED ALWAYS as (json_extract(workflow, '$.meta.category')) VIRTUAL NOT NULL,
|
||||||
|
name TEXT GENERATED ALWAYS as (json_extract(workflow, '$.name')) VIRTUAL NOT NULL,
|
||||||
|
description TEXT GENERATED ALWAYS as (json_extract(workflow, '$.description')) VIRTUAL NOT NULL
|
||||||
);
|
);
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
self._cursor.execute(
|
self._cursor.execute(
|
||||||
"""--sql
|
"""--sql
|
||||||
CREATE TRIGGER IF NOT EXISTS tg_workflows_updated_at
|
CREATE TRIGGER IF NOT EXISTS tg_workflow_library_updated_at
|
||||||
AFTER UPDATE
|
AFTER UPDATE
|
||||||
ON workflows FOR EACH ROW
|
ON workflow_library FOR EACH ROW
|
||||||
BEGIN
|
BEGIN
|
||||||
UPDATE workflows
|
UPDATE workflow_library
|
||||||
SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')
|
SET updated_at = STRFTIME('%Y-%m-%d %H:%M:%f', 'NOW')
|
||||||
WHERE workflow_id = old.workflow_id;
|
WHERE workflow_id = old.workflow_id;
|
||||||
END;
|
END;
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_workflow_library_created_at ON workflow_library(created_at);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_workflow_library_updated_at ON workflow_library(updated_at);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_workflow_library_opened_at ON workflow_library(opened_at);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_workflow_library_category ON workflow_library(category);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_workflow_library_name ON workflow_library(name);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_workflow_library_description ON workflow_library(description);
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# We do not need the original `workflows` table or `workflow_images` junction table.
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
DROP TABLE IF EXISTS workflow_images;
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
self._cursor.execute(
|
||||||
|
"""--sql
|
||||||
|
DROP TABLE IF EXISTS workflows;
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
self._conn.commit()
|
self._conn.commit()
|
||||||
except Exception:
|
except Exception:
|
||||||
self._conn.rollback()
|
self._conn.rollback()
|
||||||
|
@ -11,7 +11,7 @@ from invokeai.app.services.model_records import (
|
|||||||
DuplicateModelException,
|
DuplicateModelException,
|
||||||
ModelRecordServiceSQL,
|
ModelRecordServiceSQL,
|
||||||
)
|
)
|
||||||
from invokeai.app.services.shared.sqlite import SqliteDatabase
|
from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase
|
||||||
from invokeai.backend.model_manager.config import (
|
from invokeai.backend.model_manager.config import (
|
||||||
AnyModelConfig,
|
AnyModelConfig,
|
||||||
BaseModelType,
|
BaseModelType,
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
dist/
|
dist/
|
||||||
public/locales/*.json
|
public/locales/*.json
|
||||||
|
!public/locales/en.json
|
||||||
.husky/
|
.husky/
|
||||||
node_modules/
|
node_modules/
|
||||||
patches/
|
patches/
|
||||||
|
@ -67,7 +67,9 @@
|
|||||||
"controlNet": "ControlNet",
|
"controlNet": "ControlNet",
|
||||||
"controlAdapter": "Control Adapter",
|
"controlAdapter": "Control Adapter",
|
||||||
"data": "Data",
|
"data": "Data",
|
||||||
|
"delete": "Delete",
|
||||||
"details": "Details",
|
"details": "Details",
|
||||||
|
"direction": "Direction",
|
||||||
"ipAdapter": "IP Adapter",
|
"ipAdapter": "IP Adapter",
|
||||||
"t2iAdapter": "T2I Adapter",
|
"t2iAdapter": "T2I Adapter",
|
||||||
"darkMode": "Dark Mode",
|
"darkMode": "Dark Mode",
|
||||||
@ -115,6 +117,7 @@
|
|||||||
"nodesDesc": "A node based system for the generation of images is under development currently. Stay tuned for updates about this amazing feature.",
|
"nodesDesc": "A node based system for the generation of images is under development currently. Stay tuned for updates about this amazing feature.",
|
||||||
"notInstalled": "Not $t(common.installed)",
|
"notInstalled": "Not $t(common.installed)",
|
||||||
"openInNewTab": "Open in New Tab",
|
"openInNewTab": "Open in New Tab",
|
||||||
|
"orderBy": "Order By",
|
||||||
"outpaint": "outpaint",
|
"outpaint": "outpaint",
|
||||||
"outputs": "Outputs",
|
"outputs": "Outputs",
|
||||||
"postProcessDesc1": "Invoke AI offers a wide variety of post processing features. Image Upscaling and Face Restoration are already available in the WebUI. You can access them from the Advanced Options menu of the Text To Image and Image To Image tabs. You can also process images directly, using the image action buttons above the current image display or in the viewer.",
|
"postProcessDesc1": "Invoke AI offers a wide variety of post processing features. Image Upscaling and Face Restoration are already available in the WebUI. You can access them from the Advanced Options menu of the Text To Image and Image To Image tabs. You can also process images directly, using the image action buttons above the current image display or in the viewer.",
|
||||||
@ -125,6 +128,8 @@
|
|||||||
"random": "Random",
|
"random": "Random",
|
||||||
"reportBugLabel": "Report Bug",
|
"reportBugLabel": "Report Bug",
|
||||||
"safetensors": "Safetensors",
|
"safetensors": "Safetensors",
|
||||||
|
"save": "Save",
|
||||||
|
"saveAs": "Save As",
|
||||||
"settingsLabel": "Settings",
|
"settingsLabel": "Settings",
|
||||||
"simple": "Simple",
|
"simple": "Simple",
|
||||||
"somethingWentWrong": "Something went wrong",
|
"somethingWentWrong": "Something went wrong",
|
||||||
@ -161,8 +166,12 @@
|
|||||||
"txt2img": "Text To Image",
|
"txt2img": "Text To Image",
|
||||||
"unifiedCanvas": "Unified Canvas",
|
"unifiedCanvas": "Unified Canvas",
|
||||||
"unknown": "Unknown",
|
"unknown": "Unknown",
|
||||||
"unknownError": "Unknown Error",
|
"upload": "Upload",
|
||||||
"upload": "Upload"
|
"updated": "Updated",
|
||||||
|
"created": "Created",
|
||||||
|
"prevPage": "Previous Page",
|
||||||
|
"nextPage": "Next Page",
|
||||||
|
"unknownError": "Unknown Error"
|
||||||
},
|
},
|
||||||
"controlnet": {
|
"controlnet": {
|
||||||
"controlAdapter_one": "Control Adapter",
|
"controlAdapter_one": "Control Adapter",
|
||||||
@ -940,9 +949,9 @@
|
|||||||
"problemSettingTitle": "Problem Setting Title",
|
"problemSettingTitle": "Problem Setting Title",
|
||||||
"reloadNodeTemplates": "Reload Node Templates",
|
"reloadNodeTemplates": "Reload Node Templates",
|
||||||
"removeLinearView": "Remove from Linear View",
|
"removeLinearView": "Remove from Linear View",
|
||||||
"resetWorkflow": "Reset Workflow",
|
"resetWorkflow": "Reset Workflow Editor",
|
||||||
"resetWorkflowDesc": "Are you sure you want to reset this workflow?",
|
"resetWorkflowDesc": "Are you sure you want to reset the Workflow Editor?",
|
||||||
"resetWorkflowDesc2": "Resetting the workflow will clear all nodes, edges and workflow details.",
|
"resetWorkflowDesc2": "Resetting the Workflow Editor will clear all nodes, edges and workflow details. Saved workflows will not be affected.",
|
||||||
"scheduler": "Scheduler",
|
"scheduler": "Scheduler",
|
||||||
"schedulerDescription": "TODO",
|
"schedulerDescription": "TODO",
|
||||||
"sDXLMainModelField": "SDXL Model",
|
"sDXLMainModelField": "SDXL Model",
|
||||||
@ -1269,7 +1278,6 @@
|
|||||||
"modelAddedSimple": "Model Added",
|
"modelAddedSimple": "Model Added",
|
||||||
"modelAddFailed": "Model Add Failed",
|
"modelAddFailed": "Model Add Failed",
|
||||||
"nodesBrokenConnections": "Cannot load. Some connections are broken.",
|
"nodesBrokenConnections": "Cannot load. Some connections are broken.",
|
||||||
"nodesCleared": "Nodes Cleared",
|
|
||||||
"nodesCorruptedGraph": "Cannot load. Graph seems to be corrupted.",
|
"nodesCorruptedGraph": "Cannot load. Graph seems to be corrupted.",
|
||||||
"nodesLoaded": "Nodes Loaded",
|
"nodesLoaded": "Nodes Loaded",
|
||||||
"nodesLoadedFailed": "Failed To Load Nodes",
|
"nodesLoadedFailed": "Failed To Load Nodes",
|
||||||
@ -1318,7 +1326,10 @@
|
|||||||
"uploadFailedInvalidUploadDesc": "Must be single PNG or JPEG image",
|
"uploadFailedInvalidUploadDesc": "Must be single PNG or JPEG image",
|
||||||
"uploadFailedUnableToLoadDesc": "Unable to load file",
|
"uploadFailedUnableToLoadDesc": "Unable to load file",
|
||||||
"upscalingFailed": "Upscaling Failed",
|
"upscalingFailed": "Upscaling Failed",
|
||||||
"workflowLoaded": "Workflow Loaded"
|
"workflowLoaded": "Workflow Loaded",
|
||||||
|
"problemRetrievingWorkflow": "Problem Retrieving Workflow",
|
||||||
|
"workflowDeleted": "Workflow Deleted",
|
||||||
|
"problemDeletingWorkflow": "Problem Deleting Workflow"
|
||||||
},
|
},
|
||||||
"tooltip": {
|
"tooltip": {
|
||||||
"feature": {
|
"feature": {
|
||||||
@ -1613,5 +1624,33 @@
|
|||||||
"showIntermediates": "Show Intermediates",
|
"showIntermediates": "Show Intermediates",
|
||||||
"snapToGrid": "Snap to Grid",
|
"snapToGrid": "Snap to Grid",
|
||||||
"undo": "Undo"
|
"undo": "Undo"
|
||||||
|
},
|
||||||
|
"workflows": {
|
||||||
|
"workflows": "Workflows",
|
||||||
|
"workflowLibrary": "Workflow Library",
|
||||||
|
"userWorkflows": "My Workflows",
|
||||||
|
"defaultWorkflows": "Default Workflows",
|
||||||
|
"openWorkflow": "Open Workflow",
|
||||||
|
"uploadWorkflow": "Upload Workflow",
|
||||||
|
"deleteWorkflow": "Delete Workflow",
|
||||||
|
"unnamedWorkflow": "Unnamed Workflow",
|
||||||
|
"downloadWorkflow": "Download Workflow",
|
||||||
|
"saveWorkflow": "Save Workflow",
|
||||||
|
"saveWorkflowAs": "Save Workflow As",
|
||||||
|
"problemSavingWorkflow": "Problem Saving Workflow",
|
||||||
|
"workflowSaved": "Workflow Saved",
|
||||||
|
"noRecentWorkflows": "No Recent Workflows",
|
||||||
|
"noUserWorkflows": "No User Workflows",
|
||||||
|
"noSystemWorkflows": "No System Workflows",
|
||||||
|
"problemLoading": "Problem Loading Workflows",
|
||||||
|
"loading": "Loading Workflows",
|
||||||
|
"noDescription": "No description",
|
||||||
|
"searchWorkflows": "Search Workflows",
|
||||||
|
"clearWorkflowSearchFilter": "Clear Workflow Search Filter",
|
||||||
|
"workflowName": "Workflow Name",
|
||||||
|
"workflowEditorReset": "Workflow Editor Reset"
|
||||||
|
},
|
||||||
|
"app": {
|
||||||
|
"storeNotInitialized": "Store is not initialized"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,6 +3,7 @@ import { buildNodesGraph } from 'features/nodes/util/graph/buildNodesGraph';
|
|||||||
import { queueApi } from 'services/api/endpoints/queue';
|
import { queueApi } from 'services/api/endpoints/queue';
|
||||||
import { BatchConfig } from 'services/api/types';
|
import { BatchConfig } from 'services/api/types';
|
||||||
import { startAppListening } from '..';
|
import { startAppListening } from '..';
|
||||||
|
import { buildWorkflow } from 'features/nodes/util/workflow/buildWorkflow';
|
||||||
|
|
||||||
export const addEnqueueRequestedNodes = () => {
|
export const addEnqueueRequestedNodes = () => {
|
||||||
startAppListening({
|
startAppListening({
|
||||||
@ -10,10 +11,18 @@ export const addEnqueueRequestedNodes = () => {
|
|||||||
enqueueRequested.match(action) && action.payload.tabName === 'nodes',
|
enqueueRequested.match(action) && action.payload.tabName === 'nodes',
|
||||||
effect: async (action, { getState, dispatch }) => {
|
effect: async (action, { getState, dispatch }) => {
|
||||||
const state = getState();
|
const state = getState();
|
||||||
|
const { nodes, edges } = state.nodes;
|
||||||
|
const workflow = state.workflow;
|
||||||
const graph = buildNodesGraph(state.nodes);
|
const graph = buildNodesGraph(state.nodes);
|
||||||
|
const builtWorkflow = buildWorkflow({
|
||||||
|
nodes,
|
||||||
|
edges,
|
||||||
|
workflow,
|
||||||
|
});
|
||||||
const batchConfig: BatchConfig = {
|
const batchConfig: BatchConfig = {
|
||||||
batch: {
|
batch: {
|
||||||
graph,
|
graph,
|
||||||
|
workflow: builtWorkflow,
|
||||||
runs: state.generation.iterations,
|
runs: state.generation.iterations,
|
||||||
},
|
},
|
||||||
prepend: action.payload.prepend,
|
prepend: action.payload.prepend,
|
||||||
|
@ -11,13 +11,11 @@ import {
|
|||||||
TypesafeDroppableData,
|
TypesafeDroppableData,
|
||||||
} from 'features/dnd/types';
|
} from 'features/dnd/types';
|
||||||
import { imageSelected } from 'features/gallery/store/gallerySlice';
|
import { imageSelected } from 'features/gallery/store/gallerySlice';
|
||||||
import {
|
import { fieldImageValueChanged } from 'features/nodes/store/nodesSlice';
|
||||||
fieldImageValueChanged,
|
|
||||||
workflowExposedFieldAdded,
|
|
||||||
} from 'features/nodes/store/nodesSlice';
|
|
||||||
import { initialImageChanged } from 'features/parameters/store/generationSlice';
|
import { initialImageChanged } from 'features/parameters/store/generationSlice';
|
||||||
import { imagesApi } from 'services/api/endpoints/images';
|
import { imagesApi } from 'services/api/endpoints/images';
|
||||||
import { startAppListening } from '../';
|
import { startAppListening } from '../';
|
||||||
|
import { workflowExposedFieldAdded } from 'features/nodes/store/workflowSlice';
|
||||||
|
|
||||||
export const dndDropped = createAction<{
|
export const dndDropped = createAction<{
|
||||||
overData: TypesafeDroppableData;
|
overData: TypesafeDroppableData;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import { logger } from 'app/logging/logger';
|
import { logger } from 'app/logging/logger';
|
||||||
import { parseify } from 'common/util/serialize';
|
import { parseify } from 'common/util/serialize';
|
||||||
import { workflowLoadRequested } from 'features/nodes/store/actions';
|
import { workflowLoadRequested } from 'features/nodes/store/actions';
|
||||||
import { workflowLoaded } from 'features/nodes/store/nodesSlice';
|
import { workflowLoaded } from 'features/nodes/store/actions';
|
||||||
import { $flow } from 'features/nodes/store/reactFlowInstance';
|
import { $flow } from 'features/nodes/store/reactFlowInstance';
|
||||||
import {
|
import {
|
||||||
WorkflowMigrationError,
|
WorkflowMigrationError,
|
||||||
@ -21,7 +21,7 @@ export const addWorkflowLoadRequestedListener = () => {
|
|||||||
actionCreator: workflowLoadRequested,
|
actionCreator: workflowLoadRequested,
|
||||||
effect: (action, { dispatch, getState }) => {
|
effect: (action, { dispatch, getState }) => {
|
||||||
const log = logger('nodes');
|
const log = logger('nodes');
|
||||||
const workflow = action.payload;
|
const { workflow, asCopy } = action.payload;
|
||||||
const nodeTemplates = getState().nodes.nodeTemplates;
|
const nodeTemplates = getState().nodes.nodeTemplates;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@ -29,6 +29,12 @@ export const addWorkflowLoadRequestedListener = () => {
|
|||||||
workflow,
|
workflow,
|
||||||
nodeTemplates
|
nodeTemplates
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (asCopy) {
|
||||||
|
// If we're loading a copy, we need to remove the ID so that the backend will create a new workflow
|
||||||
|
delete validatedWorkflow.id;
|
||||||
|
}
|
||||||
|
|
||||||
dispatch(workflowLoaded(validatedWorkflow));
|
dispatch(workflowLoaded(validatedWorkflow));
|
||||||
if (!warnings.length) {
|
if (!warnings.length) {
|
||||||
dispatch(
|
dispatch(
|
||||||
@ -99,7 +105,6 @@ export const addWorkflowLoadRequestedListener = () => {
|
|||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
// Some other error occurred
|
// Some other error occurred
|
||||||
console.log(e);
|
|
||||||
log.error(
|
log.error(
|
||||||
{ error: parseify(e) },
|
{ error: parseify(e) },
|
||||||
t('nodes.unknownErrorValidatingWorkflow')
|
t('nodes.unknownErrorValidatingWorkflow')
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { Store } from '@reduxjs/toolkit';
|
import { createStore } from 'app/store/store';
|
||||||
import { atom } from 'nanostores';
|
import { atom } from 'nanostores';
|
||||||
|
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
export const $store = atom<
|
||||||
export const $store = atom<Store<any> | undefined>();
|
Readonly<ReturnType<typeof createStore>> | undefined
|
||||||
|
>();
|
||||||
|
@ -14,6 +14,7 @@ import galleryReducer from 'features/gallery/store/gallerySlice';
|
|||||||
import loraReducer from 'features/lora/store/loraSlice';
|
import loraReducer from 'features/lora/store/loraSlice';
|
||||||
import modelmanagerReducer from 'features/modelManager/store/modelManagerSlice';
|
import modelmanagerReducer from 'features/modelManager/store/modelManagerSlice';
|
||||||
import nodesReducer from 'features/nodes/store/nodesSlice';
|
import nodesReducer from 'features/nodes/store/nodesSlice';
|
||||||
|
import workflowReducer from 'features/nodes/store/workflowSlice';
|
||||||
import generationReducer from 'features/parameters/store/generationSlice';
|
import generationReducer from 'features/parameters/store/generationSlice';
|
||||||
import postprocessingReducer from 'features/parameters/store/postprocessingSlice';
|
import postprocessingReducer from 'features/parameters/store/postprocessingSlice';
|
||||||
import queueReducer from 'features/queue/store/queueSlice';
|
import queueReducer from 'features/queue/store/queueSlice';
|
||||||
@ -22,9 +23,11 @@ import configReducer from 'features/system/store/configSlice';
|
|||||||
import systemReducer from 'features/system/store/systemSlice';
|
import systemReducer from 'features/system/store/systemSlice';
|
||||||
import hotkeysReducer from 'features/ui/store/hotkeysSlice';
|
import hotkeysReducer from 'features/ui/store/hotkeysSlice';
|
||||||
import uiReducer from 'features/ui/store/uiSlice';
|
import uiReducer from 'features/ui/store/uiSlice';
|
||||||
|
import { createStore as createIDBKeyValStore, get, set } from 'idb-keyval';
|
||||||
import dynamicMiddlewares from 'redux-dynamic-middlewares';
|
import dynamicMiddlewares from 'redux-dynamic-middlewares';
|
||||||
import { Driver, rememberEnhancer, rememberReducer } from 'redux-remember';
|
import { Driver, rememberEnhancer, rememberReducer } from 'redux-remember';
|
||||||
import { api } from 'services/api';
|
import { api } from 'services/api';
|
||||||
|
import { authToastMiddleware } from 'services/api/authToastMiddleware';
|
||||||
import { STORAGE_PREFIX } from './constants';
|
import { STORAGE_PREFIX } from './constants';
|
||||||
import { serialize } from './enhancers/reduxRemember/serialize';
|
import { serialize } from './enhancers/reduxRemember/serialize';
|
||||||
import { unserialize } from './enhancers/reduxRemember/unserialize';
|
import { unserialize } from './enhancers/reduxRemember/unserialize';
|
||||||
@ -32,8 +35,6 @@ import { actionSanitizer } from './middleware/devtools/actionSanitizer';
|
|||||||
import { actionsDenylist } from './middleware/devtools/actionsDenylist';
|
import { actionsDenylist } from './middleware/devtools/actionsDenylist';
|
||||||
import { stateSanitizer } from './middleware/devtools/stateSanitizer';
|
import { stateSanitizer } from './middleware/devtools/stateSanitizer';
|
||||||
import { listenerMiddleware } from './middleware/listenerMiddleware';
|
import { listenerMiddleware } from './middleware/listenerMiddleware';
|
||||||
import { createStore as createIDBKeyValStore, get, set } from 'idb-keyval';
|
|
||||||
import { authToastMiddleware } from 'services/api/authToastMiddleware';
|
|
||||||
|
|
||||||
const allReducers = {
|
const allReducers = {
|
||||||
canvas: canvasReducer,
|
canvas: canvasReducer,
|
||||||
@ -53,6 +54,7 @@ const allReducers = {
|
|||||||
modelmanager: modelmanagerReducer,
|
modelmanager: modelmanagerReducer,
|
||||||
sdxl: sdxlReducer,
|
sdxl: sdxlReducer,
|
||||||
queue: queueReducer,
|
queue: queueReducer,
|
||||||
|
workflow: workflowReducer,
|
||||||
[api.reducerPath]: api.reducer,
|
[api.reducerPath]: api.reducer,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -66,6 +68,7 @@ const rememberedKeys: (keyof typeof allReducers)[] = [
|
|||||||
'generation',
|
'generation',
|
||||||
'sdxl',
|
'sdxl',
|
||||||
'nodes',
|
'nodes',
|
||||||
|
'workflow',
|
||||||
'postprocessing',
|
'postprocessing',
|
||||||
'system',
|
'system',
|
||||||
'ui',
|
'ui',
|
||||||
|
@ -23,7 +23,8 @@ export type AppFeature =
|
|||||||
| 'resumeQueue'
|
| 'resumeQueue'
|
||||||
| 'prependQueue'
|
| 'prependQueue'
|
||||||
| 'invocationCache'
|
| 'invocationCache'
|
||||||
| 'bulkDownload';
|
| 'bulkDownload'
|
||||||
|
| 'workflowLibrary';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A disable-able Stable Diffusion feature
|
* A disable-able Stable Diffusion feature
|
||||||
|
@ -1,4 +1,10 @@
|
|||||||
import { FormControl, FormLabel, Tooltip, forwardRef } from '@chakra-ui/react';
|
import {
|
||||||
|
FormControl,
|
||||||
|
FormControlProps,
|
||||||
|
FormLabel,
|
||||||
|
Tooltip,
|
||||||
|
forwardRef,
|
||||||
|
} from '@chakra-ui/react';
|
||||||
import { Select, SelectProps } from '@mantine/core';
|
import { Select, SelectProps } from '@mantine/core';
|
||||||
import { useMantineSelectStyles } from 'mantine-theme/hooks/useMantineSelectStyles';
|
import { useMantineSelectStyles } from 'mantine-theme/hooks/useMantineSelectStyles';
|
||||||
import { RefObject, memo } from 'react';
|
import { RefObject, memo } from 'react';
|
||||||
@ -13,10 +19,19 @@ export type IAISelectProps = Omit<SelectProps, 'label'> & {
|
|||||||
tooltip?: string | null;
|
tooltip?: string | null;
|
||||||
inputRef?: RefObject<HTMLInputElement>;
|
inputRef?: RefObject<HTMLInputElement>;
|
||||||
label?: string;
|
label?: string;
|
||||||
|
formControlProps?: FormControlProps;
|
||||||
};
|
};
|
||||||
|
|
||||||
const IAIMantineSelect = forwardRef((props: IAISelectProps, ref) => {
|
const IAIMantineSelect = forwardRef((props: IAISelectProps, ref) => {
|
||||||
const { tooltip, inputRef, label, disabled, required, ...rest } = props;
|
const {
|
||||||
|
tooltip,
|
||||||
|
formControlProps,
|
||||||
|
inputRef,
|
||||||
|
label,
|
||||||
|
disabled,
|
||||||
|
required,
|
||||||
|
...rest
|
||||||
|
} = props;
|
||||||
|
|
||||||
const styles = useMantineSelectStyles();
|
const styles = useMantineSelectStyles();
|
||||||
|
|
||||||
@ -28,6 +43,7 @@ const IAIMantineSelect = forwardRef((props: IAISelectProps, ref) => {
|
|||||||
isDisabled={disabled}
|
isDisabled={disabled}
|
||||||
position="static"
|
position="static"
|
||||||
data-testid={`select-${label || props.placeholder}`}
|
data-testid={`select-${label || props.placeholder}`}
|
||||||
|
{...formControlProps}
|
||||||
>
|
>
|
||||||
<FormLabel>{label}</FormLabel>
|
<FormLabel>{label}</FormLabel>
|
||||||
<Select disabled={disabled} ref={inputRef} styles={styles} {...rest} />
|
<Select disabled={disabled} ref={inputRef} styles={styles} {...rest} />
|
||||||
|
1
invokeai/frontend/web/src/common/components/Nbsp.tsx
Normal file
1
invokeai/frontend/web/src/common/components/Nbsp.tsx
Normal file
@ -0,0 +1 @@
|
|||||||
|
export const Nbsp = () => <>{'\u00A0'}</>;
|
@ -16,7 +16,8 @@ import { useAppDispatch, useAppSelector } from 'app/store/storeHooks';
|
|||||||
import IAIIconButton from 'common/components/IAIIconButton';
|
import IAIIconButton from 'common/components/IAIIconButton';
|
||||||
import { DeleteImageButton } from 'features/deleteImageModal/components/DeleteImageButton';
|
import { DeleteImageButton } from 'features/deleteImageModal/components/DeleteImageButton';
|
||||||
import { imagesToDeleteSelected } from 'features/deleteImageModal/store/slice';
|
import { imagesToDeleteSelected } from 'features/deleteImageModal/store/slice';
|
||||||
import { workflowLoadRequested } from 'features/nodes/store/actions';
|
import SingleSelectionMenuItems from 'features/gallery/components/ImageContextMenu/SingleSelectionMenuItems';
|
||||||
|
import { sentImageToImg2Img } from 'features/gallery/store/actions';
|
||||||
import ParamUpscalePopover from 'features/parameters/components/Parameters/Upscale/ParamUpscaleSettings';
|
import ParamUpscalePopover from 'features/parameters/components/Parameters/Upscale/ParamUpscaleSettings';
|
||||||
import { useRecallParameters } from 'features/parameters/hooks/useRecallParameters';
|
import { useRecallParameters } from 'features/parameters/hooks/useRecallParameters';
|
||||||
import { initialImageSelected } from 'features/parameters/store/actions';
|
import { initialImageSelected } from 'features/parameters/store/actions';
|
||||||
@ -27,6 +28,7 @@ import {
|
|||||||
setShouldShowImageDetails,
|
setShouldShowImageDetails,
|
||||||
setShouldShowProgressInViewer,
|
setShouldShowProgressInViewer,
|
||||||
} from 'features/ui/store/uiSlice';
|
} from 'features/ui/store/uiSlice';
|
||||||
|
import { useGetAndLoadEmbeddedWorkflow } from 'features/workflowLibrary/hooks/useGetAndLoadEmbeddedWorkflow';
|
||||||
import { memo, useCallback } from 'react';
|
import { memo, useCallback } from 'react';
|
||||||
import { useHotkeys } from 'react-hotkeys-hook';
|
import { useHotkeys } from 'react-hotkeys-hook';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
@ -41,10 +43,7 @@ import {
|
|||||||
import { FaCircleNodes, FaEllipsis } from 'react-icons/fa6';
|
import { FaCircleNodes, FaEllipsis } from 'react-icons/fa6';
|
||||||
import { useGetImageDTOQuery } from 'services/api/endpoints/images';
|
import { useGetImageDTOQuery } from 'services/api/endpoints/images';
|
||||||
import { useDebouncedMetadata } from 'services/api/hooks/useDebouncedMetadata';
|
import { useDebouncedMetadata } from 'services/api/hooks/useDebouncedMetadata';
|
||||||
import { useDebouncedWorkflow } from 'services/api/hooks/useDebouncedWorkflow';
|
|
||||||
import { menuListMotionProps } from 'theme/components/menu';
|
import { menuListMotionProps } from 'theme/components/menu';
|
||||||
import { sentImageToImg2Img } from 'features/gallery/store/actions';
|
|
||||||
import SingleSelectionMenuItems from 'features/gallery/components/ImageContextMenu/SingleSelectionMenuItems';
|
|
||||||
|
|
||||||
const currentImageButtonsSelector = createSelector(
|
const currentImageButtonsSelector = createSelector(
|
||||||
[stateSelector, activeTabNameSelector],
|
[stateSelector, activeTabNameSelector],
|
||||||
@ -111,18 +110,17 @@ const CurrentImageButtons = () => {
|
|||||||
lastSelectedImage?.image_name
|
lastSelectedImage?.image_name
|
||||||
);
|
);
|
||||||
|
|
||||||
const { workflow, isLoading: isLoadingWorkflow } = useDebouncedWorkflow(
|
const { getAndLoadEmbeddedWorkflow, getAndLoadEmbeddedWorkflowResult } =
|
||||||
lastSelectedImage?.workflow_id
|
useGetAndLoadEmbeddedWorkflow({});
|
||||||
);
|
|
||||||
|
|
||||||
const handleLoadWorkflow = useCallback(() => {
|
const handleLoadWorkflow = useCallback(() => {
|
||||||
if (!workflow) {
|
if (!lastSelectedImage || !lastSelectedImage.has_workflow) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
dispatch(workflowLoadRequested(workflow));
|
getAndLoadEmbeddedWorkflow(lastSelectedImage.image_name);
|
||||||
}, [dispatch, workflow]);
|
}, [getAndLoadEmbeddedWorkflow, lastSelectedImage]);
|
||||||
|
|
||||||
useHotkeys('w', handleLoadWorkflow, [workflow]);
|
useHotkeys('w', handleLoadWorkflow, [lastSelectedImage]);
|
||||||
|
|
||||||
const handleClickUseAllParameters = useCallback(() => {
|
const handleClickUseAllParameters = useCallback(() => {
|
||||||
recallAllParameters(metadata);
|
recallAllParameters(metadata);
|
||||||
@ -255,12 +253,12 @@ const CurrentImageButtons = () => {
|
|||||||
|
|
||||||
<ButtonGroup isAttached={true} isDisabled={shouldDisableToolbarButtons}>
|
<ButtonGroup isAttached={true} isDisabled={shouldDisableToolbarButtons}>
|
||||||
<IAIIconButton
|
<IAIIconButton
|
||||||
isLoading={isLoadingWorkflow}
|
|
||||||
icon={<FaCircleNodes />}
|
icon={<FaCircleNodes />}
|
||||||
tooltip={`${t('nodes.loadWorkflow')} (W)`}
|
tooltip={`${t('nodes.loadWorkflow')} (W)`}
|
||||||
aria-label={`${t('nodes.loadWorkflow')} (W)`}
|
aria-label={`${t('nodes.loadWorkflow')} (W)`}
|
||||||
isDisabled={!workflow}
|
isDisabled={!imageDTO?.has_workflow}
|
||||||
onClick={handleLoadWorkflow}
|
onClick={handleLoadWorkflow}
|
||||||
|
isLoading={getAndLoadEmbeddedWorkflowResult.isLoading}
|
||||||
/>
|
/>
|
||||||
<IAIIconButton
|
<IAIIconButton
|
||||||
isLoading={isLoadingMetadata}
|
isLoading={isLoadingMetadata}
|
||||||
|
@ -3,18 +3,22 @@ import { useStore } from '@nanostores/react';
|
|||||||
import { useAppToaster } from 'app/components/Toaster';
|
import { useAppToaster } from 'app/components/Toaster';
|
||||||
import { $customStarUI } from 'app/store/nanostores/customStarUI';
|
import { $customStarUI } from 'app/store/nanostores/customStarUI';
|
||||||
import { useAppDispatch } from 'app/store/storeHooks';
|
import { useAppDispatch } from 'app/store/storeHooks';
|
||||||
|
import { useCopyImageToClipboard } from 'common/hooks/useCopyImageToClipboard';
|
||||||
import { setInitialCanvasImage } from 'features/canvas/store/canvasSlice';
|
import { setInitialCanvasImage } from 'features/canvas/store/canvasSlice';
|
||||||
import {
|
import {
|
||||||
imagesToChangeSelected,
|
imagesToChangeSelected,
|
||||||
isModalOpenChanged,
|
isModalOpenChanged,
|
||||||
} from 'features/changeBoardModal/store/slice';
|
} from 'features/changeBoardModal/store/slice';
|
||||||
import { imagesToDeleteSelected } from 'features/deleteImageModal/store/slice';
|
import { imagesToDeleteSelected } from 'features/deleteImageModal/store/slice';
|
||||||
import { workflowLoadRequested } from 'features/nodes/store/actions';
|
import {
|
||||||
|
sentImageToCanvas,
|
||||||
|
sentImageToImg2Img,
|
||||||
|
} from 'features/gallery/store/actions';
|
||||||
import { useRecallParameters } from 'features/parameters/hooks/useRecallParameters';
|
import { useRecallParameters } from 'features/parameters/hooks/useRecallParameters';
|
||||||
import { initialImageSelected } from 'features/parameters/store/actions';
|
import { initialImageSelected } from 'features/parameters/store/actions';
|
||||||
import { useFeatureStatus } from 'features/system/hooks/useFeatureStatus';
|
import { useFeatureStatus } from 'features/system/hooks/useFeatureStatus';
|
||||||
import { useCopyImageToClipboard } from 'common/hooks/useCopyImageToClipboard';
|
|
||||||
import { setActiveTab } from 'features/ui/store/uiSlice';
|
import { setActiveTab } from 'features/ui/store/uiSlice';
|
||||||
|
import { useGetAndLoadEmbeddedWorkflow } from 'features/workflowLibrary/hooks/useGetAndLoadEmbeddedWorkflow';
|
||||||
import { memo, useCallback } from 'react';
|
import { memo, useCallback } from 'react';
|
||||||
import { flushSync } from 'react-dom';
|
import { flushSync } from 'react-dom';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
@ -36,12 +40,7 @@ import {
|
|||||||
useUnstarImagesMutation,
|
useUnstarImagesMutation,
|
||||||
} from 'services/api/endpoints/images';
|
} from 'services/api/endpoints/images';
|
||||||
import { useDebouncedMetadata } from 'services/api/hooks/useDebouncedMetadata';
|
import { useDebouncedMetadata } from 'services/api/hooks/useDebouncedMetadata';
|
||||||
import { useDebouncedWorkflow } from 'services/api/hooks/useDebouncedWorkflow';
|
|
||||||
import { ImageDTO } from 'services/api/types';
|
import { ImageDTO } from 'services/api/types';
|
||||||
import {
|
|
||||||
sentImageToCanvas,
|
|
||||||
sentImageToImg2Img,
|
|
||||||
} from 'features/gallery/store/actions';
|
|
||||||
|
|
||||||
type SingleSelectionMenuItemsProps = {
|
type SingleSelectionMenuItemsProps = {
|
||||||
imageDTO: ImageDTO;
|
imageDTO: ImageDTO;
|
||||||
@ -61,9 +60,13 @@ const SingleSelectionMenuItems = (props: SingleSelectionMenuItemsProps) => {
|
|||||||
const { metadata, isLoading: isLoadingMetadata } = useDebouncedMetadata(
|
const { metadata, isLoading: isLoadingMetadata } = useDebouncedMetadata(
|
||||||
imageDTO?.image_name
|
imageDTO?.image_name
|
||||||
);
|
);
|
||||||
const { workflow, isLoading: isLoadingWorkflow } = useDebouncedWorkflow(
|
|
||||||
imageDTO?.workflow_id
|
const { getAndLoadEmbeddedWorkflow, getAndLoadEmbeddedWorkflowResult } =
|
||||||
);
|
useGetAndLoadEmbeddedWorkflow({});
|
||||||
|
|
||||||
|
const handleLoadWorkflow = useCallback(() => {
|
||||||
|
getAndLoadEmbeddedWorkflow(imageDTO.image_name);
|
||||||
|
}, [getAndLoadEmbeddedWorkflow, imageDTO.image_name]);
|
||||||
|
|
||||||
const [starImages] = useStarImagesMutation();
|
const [starImages] = useStarImagesMutation();
|
||||||
const [unstarImages] = useUnstarImagesMutation();
|
const [unstarImages] = useUnstarImagesMutation();
|
||||||
@ -101,13 +104,6 @@ const SingleSelectionMenuItems = (props: SingleSelectionMenuItemsProps) => {
|
|||||||
recallSeed(metadata?.seed);
|
recallSeed(metadata?.seed);
|
||||||
}, [metadata?.seed, recallSeed]);
|
}, [metadata?.seed, recallSeed]);
|
||||||
|
|
||||||
const handleLoadWorkflow = useCallback(() => {
|
|
||||||
if (!workflow) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
dispatch(workflowLoadRequested(workflow));
|
|
||||||
}, [dispatch, workflow]);
|
|
||||||
|
|
||||||
const handleSendToImageToImage = useCallback(() => {
|
const handleSendToImageToImage = useCallback(() => {
|
||||||
dispatch(sentImageToImg2Img());
|
dispatch(sentImageToImg2Img());
|
||||||
dispatch(initialImageSelected(imageDTO));
|
dispatch(initialImageSelected(imageDTO));
|
||||||
@ -179,9 +175,15 @@ const SingleSelectionMenuItems = (props: SingleSelectionMenuItemsProps) => {
|
|||||||
{t('parameters.downloadImage')}
|
{t('parameters.downloadImage')}
|
||||||
</MenuItem>
|
</MenuItem>
|
||||||
<MenuItem
|
<MenuItem
|
||||||
icon={isLoadingWorkflow ? <SpinnerIcon /> : <FaCircleNodes />}
|
icon={
|
||||||
|
getAndLoadEmbeddedWorkflowResult.isLoading ? (
|
||||||
|
<SpinnerIcon />
|
||||||
|
) : (
|
||||||
|
<FaCircleNodes />
|
||||||
|
)
|
||||||
|
}
|
||||||
onClickCapture={handleLoadWorkflow}
|
onClickCapture={handleLoadWorkflow}
|
||||||
isDisabled={isLoadingWorkflow || !workflow}
|
isDisabled={!imageDTO.has_workflow}
|
||||||
>
|
>
|
||||||
{t('nodes.loadWorkflow')}
|
{t('nodes.loadWorkflow')}
|
||||||
</MenuItem>
|
</MenuItem>
|
||||||
|
@ -14,10 +14,10 @@ import ScrollableContent from 'features/nodes/components/sidePanel/ScrollableCon
|
|||||||
import { memo } from 'react';
|
import { memo } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { useDebouncedMetadata } from 'services/api/hooks/useDebouncedMetadata';
|
import { useDebouncedMetadata } from 'services/api/hooks/useDebouncedMetadata';
|
||||||
import { useDebouncedWorkflow } from 'services/api/hooks/useDebouncedWorkflow';
|
|
||||||
import { ImageDTO } from 'services/api/types';
|
import { ImageDTO } from 'services/api/types';
|
||||||
import DataViewer from './DataViewer';
|
import DataViewer from './DataViewer';
|
||||||
import ImageMetadataActions from './ImageMetadataActions';
|
import ImageMetadataActions from './ImageMetadataActions';
|
||||||
|
import ImageMetadataWorkflowTabContent from './ImageMetadataWorkflowTabContent';
|
||||||
|
|
||||||
type ImageMetadataViewerProps = {
|
type ImageMetadataViewerProps = {
|
||||||
image: ImageDTO;
|
image: ImageDTO;
|
||||||
@ -32,7 +32,6 @@ const ImageMetadataViewer = ({ image }: ImageMetadataViewerProps) => {
|
|||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
|
|
||||||
const { metadata } = useDebouncedMetadata(image.image_name);
|
const { metadata } = useDebouncedMetadata(image.image_name);
|
||||||
const { workflow } = useDebouncedWorkflow(image.workflow_id);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Flex
|
<Flex
|
||||||
@ -67,9 +66,9 @@ const ImageMetadataViewer = ({ image }: ImageMetadataViewerProps) => {
|
|||||||
>
|
>
|
||||||
<TabList>
|
<TabList>
|
||||||
<Tab>{t('metadata.recallParameters')}</Tab>
|
<Tab>{t('metadata.recallParameters')}</Tab>
|
||||||
<Tab>{t('metadata.metadata')}</Tab>
|
<Tab isDisabled={!metadata}>{t('metadata.metadata')}</Tab>
|
||||||
<Tab>{t('metadata.imageDetails')}</Tab>
|
<Tab>{t('metadata.imageDetails')}</Tab>
|
||||||
<Tab>{t('metadata.workflow')}</Tab>
|
<Tab isDisabled={!image.has_workflow}>{t('metadata.workflow')}</Tab>
|
||||||
</TabList>
|
</TabList>
|
||||||
|
|
||||||
<TabPanels>
|
<TabPanels>
|
||||||
@ -97,11 +96,7 @@ const ImageMetadataViewer = ({ image }: ImageMetadataViewerProps) => {
|
|||||||
)}
|
)}
|
||||||
</TabPanel>
|
</TabPanel>
|
||||||
<TabPanel>
|
<TabPanel>
|
||||||
{workflow ? (
|
<ImageMetadataWorkflowTabContent image={image} />
|
||||||
<DataViewer data={workflow} label={t('metadata.workflow')} />
|
|
||||||
) : (
|
|
||||||
<IAINoContentFallback label={t('nodes.noWorkflow')} />
|
|
||||||
)}
|
|
||||||
</TabPanel>
|
</TabPanel>
|
||||||
</TabPanels>
|
</TabPanels>
|
||||||
</Tabs>
|
</Tabs>
|
||||||
|
@ -0,0 +1,23 @@
|
|||||||
|
import { IAINoContentFallback } from 'common/components/IAIImageFallback';
|
||||||
|
import { memo } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { useGetImageWorkflowQuery } from 'services/api/endpoints/images';
|
||||||
|
import { ImageDTO } from 'services/api/types';
|
||||||
|
import DataViewer from './DataViewer';
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
image: ImageDTO;
|
||||||
|
};
|
||||||
|
|
||||||
|
const ImageMetadataWorkflowTabContent = ({ image }: Props) => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const { currentData: workflow } = useGetImageWorkflowQuery(image.image_name);
|
||||||
|
|
||||||
|
if (!workflow) {
|
||||||
|
return <IAINoContentFallback label={t('nodes.noWorkflow')} />;
|
||||||
|
}
|
||||||
|
|
||||||
|
return <DataViewer data={workflow} label={t('metadata.workflow')} />;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(ImageMetadataWorkflowTabContent);
|
@ -1,45 +0,0 @@
|
|||||||
import { Checkbox, Flex, FormControl, FormLabel } from '@chakra-ui/react';
|
|
||||||
import { useAppDispatch } from 'app/store/storeHooks';
|
|
||||||
import { useEmbedWorkflow } from 'features/nodes/hooks/useEmbedWorkflow';
|
|
||||||
import { useWithWorkflow } from 'features/nodes/hooks/useWithWorkflow';
|
|
||||||
import { nodeEmbedWorkflowChanged } from 'features/nodes/store/nodesSlice';
|
|
||||||
import { ChangeEvent, memo, useCallback } from 'react';
|
|
||||||
import { useTranslation } from 'react-i18next';
|
|
||||||
|
|
||||||
const EmbedWorkflowCheckbox = ({ nodeId }: { nodeId: string }) => {
|
|
||||||
const { t } = useTranslation();
|
|
||||||
const dispatch = useAppDispatch();
|
|
||||||
const withWorkflow = useWithWorkflow(nodeId);
|
|
||||||
const embedWorkflow = useEmbedWorkflow(nodeId);
|
|
||||||
const handleChange = useCallback(
|
|
||||||
(e: ChangeEvent<HTMLInputElement>) => {
|
|
||||||
dispatch(
|
|
||||||
nodeEmbedWorkflowChanged({
|
|
||||||
nodeId,
|
|
||||||
embedWorkflow: e.target.checked,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
},
|
|
||||||
[dispatch, nodeId]
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!withWorkflow) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<FormControl as={Flex} sx={{ alignItems: 'center', gap: 2, w: 'auto' }}>
|
|
||||||
<FormLabel sx={{ fontSize: 'xs', mb: '1px' }}>
|
|
||||||
{t('metadata.workflow')}
|
|
||||||
</FormLabel>
|
|
||||||
<Checkbox
|
|
||||||
className="nopan"
|
|
||||||
size="sm"
|
|
||||||
onChange={handleChange}
|
|
||||||
isChecked={embedWorkflow}
|
|
||||||
/>
|
|
||||||
</FormControl>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export default memo(EmbedWorkflowCheckbox);
|
|
@ -1,9 +1,8 @@
|
|||||||
import { Flex } from '@chakra-ui/react';
|
import { Flex } from '@chakra-ui/react';
|
||||||
import { useHasImageOutput } from 'features/nodes/hooks/useHasImageOutput';
|
import { useHasImageOutput } from 'features/nodes/hooks/useHasImageOutput';
|
||||||
import { DRAG_HANDLE_CLASSNAME } from 'features/nodes/types/constants';
|
import { DRAG_HANDLE_CLASSNAME } from 'features/nodes/types/constants';
|
||||||
import { memo } from 'react';
|
|
||||||
import { useFeatureStatus } from 'features/system/hooks/useFeatureStatus';
|
import { useFeatureStatus } from 'features/system/hooks/useFeatureStatus';
|
||||||
import EmbedWorkflowCheckbox from './EmbedWorkflowCheckbox';
|
import { memo } from 'react';
|
||||||
import SaveToGalleryCheckbox from './SaveToGalleryCheckbox';
|
import SaveToGalleryCheckbox from './SaveToGalleryCheckbox';
|
||||||
import UseCacheCheckbox from './UseCacheCheckbox';
|
import UseCacheCheckbox from './UseCacheCheckbox';
|
||||||
|
|
||||||
@ -28,7 +27,6 @@ const InvocationNodeFooter = ({ nodeId }: Props) => {
|
|||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{isCacheEnabled && <UseCacheCheckbox nodeId={nodeId} />}
|
{isCacheEnabled && <UseCacheCheckbox nodeId={nodeId} />}
|
||||||
{hasImageOutput && <EmbedWorkflowCheckbox nodeId={nodeId} />}
|
|
||||||
{hasImageOutput && <SaveToGalleryCheckbox nodeId={nodeId} />}
|
{hasImageOutput && <SaveToGalleryCheckbox nodeId={nodeId} />}
|
||||||
</Flex>
|
</Flex>
|
||||||
);
|
);
|
||||||
|
@ -13,7 +13,7 @@ import { useFieldTemplateTitle } from 'features/nodes/hooks/useFieldTemplateTitl
|
|||||||
import {
|
import {
|
||||||
workflowExposedFieldAdded,
|
workflowExposedFieldAdded,
|
||||||
workflowExposedFieldRemoved,
|
workflowExposedFieldRemoved,
|
||||||
} from 'features/nodes/store/nodesSlice';
|
} from 'features/nodes/store/workflowSlice';
|
||||||
import { MouseEvent, ReactNode, memo, useCallback, useMemo } from 'react';
|
import { MouseEvent, ReactNode, memo, useCallback, useMemo } from 'react';
|
||||||
import { FaMinus, FaPlus } from 'react-icons/fa';
|
import { FaMinus, FaPlus } from 'react-icons/fa';
|
||||||
import { menuListMotionProps } from 'theme/components/menu';
|
import { menuListMotionProps } from 'theme/components/menu';
|
||||||
@ -41,9 +41,9 @@ const FieldContextMenu = ({ nodeId, fieldName, kind, children }: Props) => {
|
|||||||
() =>
|
() =>
|
||||||
createSelector(
|
createSelector(
|
||||||
stateSelector,
|
stateSelector,
|
||||||
({ nodes }) => {
|
({ workflow }) => {
|
||||||
const isExposed = Boolean(
|
const isExposed = Boolean(
|
||||||
nodes.workflow.exposedFields.find(
|
workflow.exposedFields.find(
|
||||||
(f) => f.nodeId === nodeId && f.fieldName === fieldName
|
(f) => f.nodeId === nodeId && f.fieldName === fieldName
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -10,7 +10,7 @@ import { useAppDispatch } from 'app/store/storeHooks';
|
|||||||
import IAIIconButton from 'common/components/IAIIconButton';
|
import IAIIconButton from 'common/components/IAIIconButton';
|
||||||
import NodeSelectionOverlay from 'common/components/NodeSelectionOverlay';
|
import NodeSelectionOverlay from 'common/components/NodeSelectionOverlay';
|
||||||
import { useMouseOverNode } from 'features/nodes/hooks/useMouseOverNode';
|
import { useMouseOverNode } from 'features/nodes/hooks/useMouseOverNode';
|
||||||
import { workflowExposedFieldRemoved } from 'features/nodes/store/nodesSlice';
|
import { workflowExposedFieldRemoved } from 'features/nodes/store/workflowSlice';
|
||||||
import { HANDLE_TOOLTIP_OPEN_DELAY } from 'features/nodes/types/constants';
|
import { HANDLE_TOOLTIP_OPEN_DELAY } from 'features/nodes/types/constants';
|
||||||
import { memo, useCallback } from 'react';
|
import { memo, useCallback } from 'react';
|
||||||
import { FaInfoCircle, FaTrash } from 'react-icons/fa';
|
import { FaInfoCircle, FaTrash } from 'react-icons/fa';
|
||||||
|
@ -1,10 +1,16 @@
|
|||||||
import { Flex } from '@chakra-ui/layout';
|
import { Flex } from '@chakra-ui/layout';
|
||||||
import { memo } from 'react';
|
import { memo } from 'react';
|
||||||
import LoadWorkflowButton from './LoadWorkflowButton';
|
import DownloadWorkflowButton from 'features/workflowLibrary/components/DownloadWorkflowButton';
|
||||||
import ResetWorkflowButton from './ResetWorkflowButton';
|
import UploadWorkflowButton from 'features/workflowLibrary/components/LoadWorkflowFromFileButton';
|
||||||
import DownloadWorkflowButton from './DownloadWorkflowButton';
|
import ResetWorkflowEditorButton from 'features/workflowLibrary/components/ResetWorkflowButton';
|
||||||
|
import SaveWorkflowButton from 'features/workflowLibrary/components/SaveWorkflowButton';
|
||||||
|
import SaveWorkflowAsButton from 'features/workflowLibrary/components/SaveWorkflowAsButton';
|
||||||
|
import { useFeatureStatus } from 'features/system/hooks/useFeatureStatus';
|
||||||
|
|
||||||
const TopCenterPanel = () => {
|
const TopCenterPanel = () => {
|
||||||
|
const isWorkflowLibraryEnabled =
|
||||||
|
useFeatureStatus('workflowLibrary').isFeatureEnabled;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Flex
|
<Flex
|
||||||
sx={{
|
sx={{
|
||||||
@ -16,8 +22,14 @@ const TopCenterPanel = () => {
|
|||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<DownloadWorkflowButton />
|
<DownloadWorkflowButton />
|
||||||
<LoadWorkflowButton />
|
<UploadWorkflowButton />
|
||||||
<ResetWorkflowButton />
|
{isWorkflowLibraryEnabled && (
|
||||||
|
<>
|
||||||
|
<SaveWorkflowButton />
|
||||||
|
<SaveWorkflowAsButton />
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
<ResetWorkflowEditorButton />
|
||||||
</Flex>
|
</Flex>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
@ -1,10 +1,16 @@
|
|||||||
import { Flex } from '@chakra-ui/layout';
|
import { Flex } from '@chakra-ui/react';
|
||||||
|
import WorkflowLibraryButton from 'features/workflowLibrary/components/WorkflowLibraryButton';
|
||||||
import { memo } from 'react';
|
import { memo } from 'react';
|
||||||
import WorkflowEditorSettings from './WorkflowEditorSettings';
|
import WorkflowEditorSettings from './WorkflowEditorSettings';
|
||||||
|
import { useFeatureStatus } from 'features/system/hooks/useFeatureStatus';
|
||||||
|
|
||||||
const TopRightPanel = () => {
|
const TopRightPanel = () => {
|
||||||
|
const isWorkflowLibraryEnabled =
|
||||||
|
useFeatureStatus('workflowLibrary').isFeatureEnabled;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Flex sx={{ gap: 2, position: 'absolute', top: 2, insetInlineEnd: 2 }}>
|
<Flex sx={{ gap: 2, position: 'absolute', top: 2, insetInlineEnd: 2 }}>
|
||||||
|
{isWorkflowLibraryEnabled && <WorkflowLibraryButton />}
|
||||||
<WorkflowEditorSettings />
|
<WorkflowEditorSettings />
|
||||||
</Flex>
|
</Flex>
|
||||||
);
|
);
|
||||||
|
@ -11,17 +11,16 @@ import { stateSelector } from 'app/store/store';
|
|||||||
import { useAppSelector } from 'app/store/storeHooks';
|
import { useAppSelector } from 'app/store/storeHooks';
|
||||||
import { defaultSelectorOptions } from 'app/store/util/defaultMemoizeOptions';
|
import { defaultSelectorOptions } from 'app/store/util/defaultMemoizeOptions';
|
||||||
import { IAINoContentFallback } from 'common/components/IAIImageFallback';
|
import { IAINoContentFallback } from 'common/components/IAIImageFallback';
|
||||||
import { getNeedsUpdate } from 'features/nodes/util/node/nodeUpdate';
|
import NotesTextarea from 'features/nodes/components/flow/nodes/Invocation/NotesTextarea';
|
||||||
|
import ScrollableContent from 'features/nodes/components/sidePanel/ScrollableContent';
|
||||||
import {
|
import {
|
||||||
InvocationNodeData,
|
InvocationNode,
|
||||||
InvocationTemplate,
|
InvocationTemplate,
|
||||||
isInvocationNode,
|
isInvocationNode,
|
||||||
} from 'features/nodes/types/invocation';
|
} from 'features/nodes/types/invocation';
|
||||||
|
import { getNeedsUpdate } from 'features/nodes/util/node/nodeUpdate';
|
||||||
import { memo, useMemo } from 'react';
|
import { memo, useMemo } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { Node } from 'reactflow';
|
|
||||||
import NotesTextarea from 'features/nodes/components/flow/nodes/Invocation/NotesTextarea';
|
|
||||||
import ScrollableContent from 'features/nodes/components/sidePanel/ScrollableContent';
|
|
||||||
import EditableNodeTitle from './details/EditableNodeTitle';
|
import EditableNodeTitle from './details/EditableNodeTitle';
|
||||||
|
|
||||||
const selector = createSelector(
|
const selector = createSelector(
|
||||||
@ -62,7 +61,7 @@ const InspectorDetailsTab = () => {
|
|||||||
export default memo(InspectorDetailsTab);
|
export default memo(InspectorDetailsTab);
|
||||||
|
|
||||||
type ContentProps = {
|
type ContentProps = {
|
||||||
node: Node<InvocationNodeData>;
|
node: InvocationNode;
|
||||||
template: InvocationTemplate;
|
template: InvocationTemplate;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ import { useAppDispatch, useAppSelector } from 'app/store/storeHooks';
|
|||||||
import { defaultSelectorOptions } from 'app/store/util/defaultMemoizeOptions';
|
import { defaultSelectorOptions } from 'app/store/util/defaultMemoizeOptions';
|
||||||
import IAIInput from 'common/components/IAIInput';
|
import IAIInput from 'common/components/IAIInput';
|
||||||
import IAITextarea from 'common/components/IAITextarea';
|
import IAITextarea from 'common/components/IAITextarea';
|
||||||
|
import ScrollableContent from 'features/nodes/components/sidePanel/ScrollableContent';
|
||||||
import {
|
import {
|
||||||
workflowAuthorChanged,
|
workflowAuthorChanged,
|
||||||
workflowContactChanged,
|
workflowContactChanged,
|
||||||
@ -13,16 +14,15 @@ import {
|
|||||||
workflowNotesChanged,
|
workflowNotesChanged,
|
||||||
workflowTagsChanged,
|
workflowTagsChanged,
|
||||||
workflowVersionChanged,
|
workflowVersionChanged,
|
||||||
} from 'features/nodes/store/nodesSlice';
|
} from 'features/nodes/store/workflowSlice';
|
||||||
import { ChangeEvent, memo, useCallback } from 'react';
|
import { ChangeEvent, memo, useCallback } from 'react';
|
||||||
import ScrollableContent from 'features/nodes/components/sidePanel/ScrollableContent';
|
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
const selector = createSelector(
|
const selector = createSelector(
|
||||||
stateSelector,
|
stateSelector,
|
||||||
({ nodes }) => {
|
({ workflow }) => {
|
||||||
const { author, name, description, tags, version, contact, notes } =
|
const { author, name, description, tags, version, contact, notes } =
|
||||||
nodes.workflow;
|
workflow;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
name,
|
name,
|
||||||
|
@ -11,9 +11,9 @@ import { useTranslation } from 'react-i18next';
|
|||||||
|
|
||||||
const selector = createSelector(
|
const selector = createSelector(
|
||||||
stateSelector,
|
stateSelector,
|
||||||
({ nodes }) => {
|
({ workflow }) => {
|
||||||
return {
|
return {
|
||||||
fields: nodes.workflow.exposedFields,
|
fields: workflow.exposedFields,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
defaultSelectorOptions
|
defaultSelectorOptions
|
||||||
|
@ -0,0 +1,17 @@
|
|||||||
|
import { useWorkflow } from 'features/nodes/hooks/useWorkflow';
|
||||||
|
import { useCallback } from 'react';
|
||||||
|
|
||||||
|
export const useDownloadWorkflow = () => {
|
||||||
|
const workflow = useWorkflow();
|
||||||
|
const downloadWorkflow = useCallback(() => {
|
||||||
|
const blob = new Blob([JSON.stringify(workflow, null, 2)]);
|
||||||
|
const a = document.createElement('a');
|
||||||
|
a.href = URL.createObjectURL(blob);
|
||||||
|
a.download = `${workflow.name || 'My Workflow'}.json`;
|
||||||
|
document.body.appendChild(a);
|
||||||
|
a.click();
|
||||||
|
a.remove();
|
||||||
|
}, [workflow]);
|
||||||
|
|
||||||
|
return downloadWorkflow;
|
||||||
|
};
|
@ -1,27 +0,0 @@
|
|||||||
import { createSelector } from '@reduxjs/toolkit';
|
|
||||||
import { stateSelector } from 'app/store/store';
|
|
||||||
import { useAppSelector } from 'app/store/storeHooks';
|
|
||||||
import { defaultSelectorOptions } from 'app/store/util/defaultMemoizeOptions';
|
|
||||||
import { useMemo } from 'react';
|
|
||||||
import { isInvocationNode } from 'features/nodes/types/invocation';
|
|
||||||
|
|
||||||
export const useEmbedWorkflow = (nodeId: string) => {
|
|
||||||
const selector = useMemo(
|
|
||||||
() =>
|
|
||||||
createSelector(
|
|
||||||
stateSelector,
|
|
||||||
({ nodes }) => {
|
|
||||||
const node = nodes.nodes.find((node) => node.id === nodeId);
|
|
||||||
if (!isInvocationNode(node)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return node.data.embedWorkflow;
|
|
||||||
},
|
|
||||||
defaultSelectorOptions
|
|
||||||
),
|
|
||||||
[nodeId]
|
|
||||||
);
|
|
||||||
|
|
||||||
const embedWorkflow = useAppSelector(selector);
|
|
||||||
return embedWorkflow;
|
|
||||||
};
|
|
@ -1,31 +0,0 @@
|
|||||||
import { createSelector } from '@reduxjs/toolkit';
|
|
||||||
import { stateSelector } from 'app/store/store';
|
|
||||||
import { useAppSelector } from 'app/store/storeHooks';
|
|
||||||
import { defaultSelectorOptions } from 'app/store/util/defaultMemoizeOptions';
|
|
||||||
import { useMemo } from 'react';
|
|
||||||
import { isInvocationNode } from 'features/nodes/types/invocation';
|
|
||||||
|
|
||||||
export const useWithWorkflow = (nodeId: string) => {
|
|
||||||
const selector = useMemo(
|
|
||||||
() =>
|
|
||||||
createSelector(
|
|
||||||
stateSelector,
|
|
||||||
({ nodes }) => {
|
|
||||||
const node = nodes.nodes.find((node) => node.id === nodeId);
|
|
||||||
if (!isInvocationNode(node)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const nodeTemplate = nodes.nodeTemplates[node?.data.type ?? ''];
|
|
||||||
if (!nodeTemplate) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return nodeTemplate.withWorkflow;
|
|
||||||
},
|
|
||||||
defaultSelectorOptions
|
|
||||||
),
|
|
||||||
[nodeId]
|
|
||||||
);
|
|
||||||
|
|
||||||
const withWorkflow = useAppSelector(selector);
|
|
||||||
return withWorkflow;
|
|
||||||
};
|
|
@ -5,12 +5,16 @@ import { useMemo } from 'react';
|
|||||||
import { useDebounce } from 'use-debounce';
|
import { useDebounce } from 'use-debounce';
|
||||||
|
|
||||||
export const useWorkflow = () => {
|
export const useWorkflow = () => {
|
||||||
const nodes = useAppSelector((state: RootState) => state.nodes);
|
const nodes_ = useAppSelector((state: RootState) => state.nodes.nodes);
|
||||||
const [debouncedNodes] = useDebounce(nodes, 300);
|
const edges_ = useAppSelector((state: RootState) => state.nodes.edges);
|
||||||
const workflow = useMemo(
|
const workflow_ = useAppSelector((state: RootState) => state.workflow);
|
||||||
() => buildWorkflow(debouncedNodes),
|
const [nodes] = useDebounce(nodes_, 300);
|
||||||
[debouncedNodes]
|
const [edges] = useDebounce(edges_, 300);
|
||||||
|
const [workflow] = useDebounce(workflow_, 300);
|
||||||
|
const builtWorkflow = useMemo(
|
||||||
|
() => buildWorkflow({ nodes, edges, workflow }),
|
||||||
|
[nodes, edges, workflow]
|
||||||
);
|
);
|
||||||
|
|
||||||
return workflow;
|
return builtWorkflow;
|
||||||
};
|
};
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { createAction, isAnyOf } from '@reduxjs/toolkit';
|
import { createAction, isAnyOf } from '@reduxjs/toolkit';
|
||||||
|
import { WorkflowV2 } from 'features/nodes/types/workflow';
|
||||||
import { Graph } from 'services/api/types';
|
import { Graph } from 'services/api/types';
|
||||||
|
|
||||||
export const textToImageGraphBuilt = createAction<Graph>(
|
export const textToImageGraphBuilt = createAction<Graph>(
|
||||||
@ -17,10 +18,15 @@ export const isAnyGraphBuilt = isAnyOf(
|
|||||||
nodesGraphBuilt
|
nodesGraphBuilt
|
||||||
);
|
);
|
||||||
|
|
||||||
export const workflowLoadRequested = createAction<unknown>(
|
export const workflowLoadRequested = createAction<{
|
||||||
'nodes/workflowLoadRequested'
|
workflow: unknown;
|
||||||
);
|
asCopy: boolean;
|
||||||
|
}>('nodes/workflowLoadRequested');
|
||||||
|
|
||||||
export const updateAllNodesRequested = createAction(
|
export const updateAllNodesRequested = createAction(
|
||||||
'nodes/updateAllNodesRequested'
|
'nodes/updateAllNodesRequested'
|
||||||
);
|
);
|
||||||
|
|
||||||
|
export const workflowLoaded = createAction<WorkflowV2>(
|
||||||
|
'workflow/workflowLoaded'
|
||||||
|
);
|
||||||
|
@ -1,33 +1,5 @@
|
|||||||
import { createSlice, PayloadAction } from '@reduxjs/toolkit';
|
import { createSlice, PayloadAction } from '@reduxjs/toolkit';
|
||||||
import { cloneDeep, forEach, isEqual, uniqBy } from 'lodash-es';
|
import { workflowLoaded } from 'features/nodes/store/actions';
|
||||||
import {
|
|
||||||
addEdge,
|
|
||||||
applyEdgeChanges,
|
|
||||||
applyNodeChanges,
|
|
||||||
Connection,
|
|
||||||
Edge,
|
|
||||||
EdgeChange,
|
|
||||||
EdgeRemoveChange,
|
|
||||||
getConnectedEdges,
|
|
||||||
getIncomers,
|
|
||||||
getOutgoers,
|
|
||||||
Node,
|
|
||||||
NodeChange,
|
|
||||||
OnConnectStartParams,
|
|
||||||
SelectionMode,
|
|
||||||
updateEdge,
|
|
||||||
Viewport,
|
|
||||||
XYPosition,
|
|
||||||
} from 'reactflow';
|
|
||||||
import { receivedOpenAPISchema } from 'services/api/thunks/schema';
|
|
||||||
import {
|
|
||||||
appSocketGeneratorProgress,
|
|
||||||
appSocketInvocationComplete,
|
|
||||||
appSocketInvocationError,
|
|
||||||
appSocketInvocationStarted,
|
|
||||||
appSocketQueueItemStatusChanged,
|
|
||||||
} from 'services/events/actions';
|
|
||||||
import { v4 as uuidv4 } from 'uuid';
|
|
||||||
import { SHARED_NODE_PROPERTIES } from 'features/nodes/types/constants';
|
import { SHARED_NODE_PROPERTIES } from 'features/nodes/types/constants';
|
||||||
import {
|
import {
|
||||||
BoardFieldValue,
|
BoardFieldValue,
|
||||||
@ -57,7 +29,35 @@ import {
|
|||||||
NodeExecutionState,
|
NodeExecutionState,
|
||||||
zNodeStatus,
|
zNodeStatus,
|
||||||
} from 'features/nodes/types/invocation';
|
} from 'features/nodes/types/invocation';
|
||||||
import { WorkflowV2 } from 'features/nodes/types/workflow';
|
import { cloneDeep, forEach } from 'lodash-es';
|
||||||
|
import {
|
||||||
|
addEdge,
|
||||||
|
applyEdgeChanges,
|
||||||
|
applyNodeChanges,
|
||||||
|
Connection,
|
||||||
|
Edge,
|
||||||
|
EdgeChange,
|
||||||
|
EdgeRemoveChange,
|
||||||
|
getConnectedEdges,
|
||||||
|
getIncomers,
|
||||||
|
getOutgoers,
|
||||||
|
Node,
|
||||||
|
NodeChange,
|
||||||
|
OnConnectStartParams,
|
||||||
|
SelectionMode,
|
||||||
|
updateEdge,
|
||||||
|
Viewport,
|
||||||
|
XYPosition,
|
||||||
|
} from 'reactflow';
|
||||||
|
import { receivedOpenAPISchema } from 'services/api/thunks/schema';
|
||||||
|
import {
|
||||||
|
appSocketGeneratorProgress,
|
||||||
|
appSocketInvocationComplete,
|
||||||
|
appSocketInvocationError,
|
||||||
|
appSocketInvocationStarted,
|
||||||
|
appSocketQueueItemStatusChanged,
|
||||||
|
} from 'services/events/actions';
|
||||||
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
import { NodesState } from './types';
|
import { NodesState } from './types';
|
||||||
import { findConnectionToValidHandle } from './util/findConnectionToValidHandle';
|
import { findConnectionToValidHandle } from './util/findConnectionToValidHandle';
|
||||||
import { findUnoccupiedPosition } from './util/findUnoccupiedPosition';
|
import { findUnoccupiedPosition } from './util/findUnoccupiedPosition';
|
||||||
@ -70,20 +70,6 @@ const initialNodeExecutionState: Omit<NodeExecutionState, 'nodeId'> = {
|
|||||||
outputs: [],
|
outputs: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
const INITIAL_WORKFLOW: WorkflowV2 = {
|
|
||||||
name: '',
|
|
||||||
author: '',
|
|
||||||
description: '',
|
|
||||||
version: '',
|
|
||||||
contact: '',
|
|
||||||
tags: '',
|
|
||||||
notes: '',
|
|
||||||
nodes: [],
|
|
||||||
edges: [],
|
|
||||||
exposedFields: [],
|
|
||||||
meta: { version: '2.0.0' },
|
|
||||||
};
|
|
||||||
|
|
||||||
export const initialNodesState: NodesState = {
|
export const initialNodesState: NodesState = {
|
||||||
nodes: [],
|
nodes: [],
|
||||||
edges: [],
|
edges: [],
|
||||||
@ -103,7 +89,6 @@ export const initialNodesState: NodesState = {
|
|||||||
nodeOpacity: 1,
|
nodeOpacity: 1,
|
||||||
selectedNodes: [],
|
selectedNodes: [],
|
||||||
selectedEdges: [],
|
selectedEdges: [],
|
||||||
workflow: INITIAL_WORKFLOW,
|
|
||||||
nodeExecutionStates: {},
|
nodeExecutionStates: {},
|
||||||
viewport: { x: 0, y: 0, zoom: 1 },
|
viewport: { x: 0, y: 0, zoom: 1 },
|
||||||
mouseOverField: null,
|
mouseOverField: null,
|
||||||
@ -308,23 +293,6 @@ const nodesSlice = createSlice({
|
|||||||
}
|
}
|
||||||
state.modifyingEdge = false;
|
state.modifyingEdge = false;
|
||||||
},
|
},
|
||||||
workflowExposedFieldAdded: (
|
|
||||||
state,
|
|
||||||
action: PayloadAction<FieldIdentifier>
|
|
||||||
) => {
|
|
||||||
state.workflow.exposedFields = uniqBy(
|
|
||||||
state.workflow.exposedFields.concat(action.payload),
|
|
||||||
(field) => `${field.nodeId}-${field.fieldName}`
|
|
||||||
);
|
|
||||||
},
|
|
||||||
workflowExposedFieldRemoved: (
|
|
||||||
state,
|
|
||||||
action: PayloadAction<FieldIdentifier>
|
|
||||||
) => {
|
|
||||||
state.workflow.exposedFields = state.workflow.exposedFields.filter(
|
|
||||||
(field) => !isEqual(field, action.payload)
|
|
||||||
);
|
|
||||||
},
|
|
||||||
fieldLabelChanged: (
|
fieldLabelChanged: (
|
||||||
state,
|
state,
|
||||||
action: PayloadAction<{
|
action: PayloadAction<{
|
||||||
@ -344,20 +312,6 @@ const nodesSlice = createSlice({
|
|||||||
}
|
}
|
||||||
field.label = label;
|
field.label = label;
|
||||||
},
|
},
|
||||||
nodeEmbedWorkflowChanged: (
|
|
||||||
state,
|
|
||||||
action: PayloadAction<{ nodeId: string; embedWorkflow: boolean }>
|
|
||||||
) => {
|
|
||||||
const { nodeId, embedWorkflow } = action.payload;
|
|
||||||
const nodeIndex = state.nodes.findIndex((n) => n.id === nodeId);
|
|
||||||
|
|
||||||
const node = state.nodes?.[nodeIndex];
|
|
||||||
|
|
||||||
if (!isInvocationNode(node)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
node.data.embedWorkflow = embedWorkflow;
|
|
||||||
},
|
|
||||||
nodeUseCacheChanged: (
|
nodeUseCacheChanged: (
|
||||||
state,
|
state,
|
||||||
action: PayloadAction<{ nodeId: string; useCache: boolean }>
|
action: PayloadAction<{ nodeId: string; useCache: boolean }>
|
||||||
@ -522,9 +476,6 @@ const nodesSlice = createSlice({
|
|||||||
},
|
},
|
||||||
nodesDeleted: (state, action: PayloadAction<AnyNode[]>) => {
|
nodesDeleted: (state, action: PayloadAction<AnyNode[]>) => {
|
||||||
action.payload.forEach((node) => {
|
action.payload.forEach((node) => {
|
||||||
state.workflow.exposedFields = state.workflow.exposedFields.filter(
|
|
||||||
(f) => f.nodeId !== node.id
|
|
||||||
);
|
|
||||||
if (!isInvocationNode(node)) {
|
if (!isInvocationNode(node)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -687,7 +638,6 @@ const nodesSlice = createSlice({
|
|||||||
nodeEditorReset: (state) => {
|
nodeEditorReset: (state) => {
|
||||||
state.nodes = [];
|
state.nodes = [];
|
||||||
state.edges = [];
|
state.edges = [];
|
||||||
state.workflow = cloneDeep(INITIAL_WORKFLOW);
|
|
||||||
},
|
},
|
||||||
shouldValidateGraphChanged: (state, action: PayloadAction<boolean>) => {
|
shouldValidateGraphChanged: (state, action: PayloadAction<boolean>) => {
|
||||||
state.shouldValidateGraph = action.payload;
|
state.shouldValidateGraph = action.payload;
|
||||||
@ -704,56 +654,6 @@ const nodesSlice = createSlice({
|
|||||||
nodeOpacityChanged: (state, action: PayloadAction<number>) => {
|
nodeOpacityChanged: (state, action: PayloadAction<number>) => {
|
||||||
state.nodeOpacity = action.payload;
|
state.nodeOpacity = action.payload;
|
||||||
},
|
},
|
||||||
workflowNameChanged: (state, action: PayloadAction<string>) => {
|
|
||||||
state.workflow.name = action.payload;
|
|
||||||
},
|
|
||||||
workflowDescriptionChanged: (state, action: PayloadAction<string>) => {
|
|
||||||
state.workflow.description = action.payload;
|
|
||||||
},
|
|
||||||
workflowTagsChanged: (state, action: PayloadAction<string>) => {
|
|
||||||
state.workflow.tags = action.payload;
|
|
||||||
},
|
|
||||||
workflowAuthorChanged: (state, action: PayloadAction<string>) => {
|
|
||||||
state.workflow.author = action.payload;
|
|
||||||
},
|
|
||||||
workflowNotesChanged: (state, action: PayloadAction<string>) => {
|
|
||||||
state.workflow.notes = action.payload;
|
|
||||||
},
|
|
||||||
workflowVersionChanged: (state, action: PayloadAction<string>) => {
|
|
||||||
state.workflow.version = action.payload;
|
|
||||||
},
|
|
||||||
workflowContactChanged: (state, action: PayloadAction<string>) => {
|
|
||||||
state.workflow.contact = action.payload;
|
|
||||||
},
|
|
||||||
workflowLoaded: (state, action: PayloadAction<WorkflowV2>) => {
|
|
||||||
const { nodes, edges, ...workflow } = action.payload;
|
|
||||||
state.workflow = workflow;
|
|
||||||
|
|
||||||
state.nodes = applyNodeChanges(
|
|
||||||
nodes.map((node) => ({
|
|
||||||
item: { ...node, ...SHARED_NODE_PROPERTIES },
|
|
||||||
type: 'add',
|
|
||||||
})),
|
|
||||||
[]
|
|
||||||
);
|
|
||||||
state.edges = applyEdgeChanges(
|
|
||||||
edges.map((edge) => ({ item: edge, type: 'add' })),
|
|
||||||
[]
|
|
||||||
);
|
|
||||||
|
|
||||||
state.nodeExecutionStates = nodes.reduce<
|
|
||||||
Record<string, NodeExecutionState>
|
|
||||||
>((acc, node) => {
|
|
||||||
acc[node.id] = {
|
|
||||||
nodeId: node.id,
|
|
||||||
...initialNodeExecutionState,
|
|
||||||
};
|
|
||||||
return acc;
|
|
||||||
}, {});
|
|
||||||
},
|
|
||||||
workflowReset: (state) => {
|
|
||||||
state.workflow = cloneDeep(INITIAL_WORKFLOW);
|
|
||||||
},
|
|
||||||
viewportChanged: (state, action: PayloadAction<Viewport>) => {
|
viewportChanged: (state, action: PayloadAction<Viewport>) => {
|
||||||
state.viewport = action.payload;
|
state.viewport = action.payload;
|
||||||
},
|
},
|
||||||
@ -899,6 +799,32 @@ const nodesSlice = createSlice({
|
|||||||
builder.addCase(receivedOpenAPISchema.pending, (state) => {
|
builder.addCase(receivedOpenAPISchema.pending, (state) => {
|
||||||
state.isReady = false;
|
state.isReady = false;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
builder.addCase(workflowLoaded, (state, action) => {
|
||||||
|
const { nodes, edges } = action.payload;
|
||||||
|
state.nodes = applyNodeChanges(
|
||||||
|
nodes.map((node) => ({
|
||||||
|
item: { ...node, ...SHARED_NODE_PROPERTIES },
|
||||||
|
type: 'add',
|
||||||
|
})),
|
||||||
|
[]
|
||||||
|
);
|
||||||
|
state.edges = applyEdgeChanges(
|
||||||
|
edges.map((edge) => ({ item: edge, type: 'add' })),
|
||||||
|
[]
|
||||||
|
);
|
||||||
|
|
||||||
|
state.nodeExecutionStates = nodes.reduce<
|
||||||
|
Record<string, NodeExecutionState>
|
||||||
|
>((acc, node) => {
|
||||||
|
acc[node.id] = {
|
||||||
|
nodeId: node.id,
|
||||||
|
...initialNodeExecutionState,
|
||||||
|
};
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
});
|
||||||
|
|
||||||
builder.addCase(appSocketInvocationStarted, (state, action) => {
|
builder.addCase(appSocketInvocationStarted, (state, action) => {
|
||||||
const { source_node_id } = action.payload.data;
|
const { source_node_id } = action.payload.data;
|
||||||
const node = state.nodeExecutionStates[source_node_id];
|
const node = state.nodeExecutionStates[source_node_id];
|
||||||
@ -984,7 +910,6 @@ export const {
|
|||||||
nodeAdded,
|
nodeAdded,
|
||||||
nodeReplaced,
|
nodeReplaced,
|
||||||
nodeEditorReset,
|
nodeEditorReset,
|
||||||
nodeEmbedWorkflowChanged,
|
|
||||||
nodeExclusivelySelected,
|
nodeExclusivelySelected,
|
||||||
nodeIsIntermediateChanged,
|
nodeIsIntermediateChanged,
|
||||||
nodeIsOpenChanged,
|
nodeIsOpenChanged,
|
||||||
@ -1008,16 +933,6 @@ export const {
|
|||||||
shouldSnapToGridChanged,
|
shouldSnapToGridChanged,
|
||||||
shouldValidateGraphChanged,
|
shouldValidateGraphChanged,
|
||||||
viewportChanged,
|
viewportChanged,
|
||||||
workflowAuthorChanged,
|
|
||||||
workflowContactChanged,
|
|
||||||
workflowDescriptionChanged,
|
|
||||||
workflowExposedFieldAdded,
|
|
||||||
workflowExposedFieldRemoved,
|
|
||||||
workflowLoaded,
|
|
||||||
workflowNameChanged,
|
|
||||||
workflowNotesChanged,
|
|
||||||
workflowTagsChanged,
|
|
||||||
workflowVersionChanged,
|
|
||||||
edgeAdded,
|
edgeAdded,
|
||||||
} = nodesSlice.actions;
|
} = nodesSlice.actions;
|
||||||
|
|
||||||
|
@ -29,7 +29,6 @@ export type NodesState = {
|
|||||||
shouldColorEdges: boolean;
|
shouldColorEdges: boolean;
|
||||||
selectedNodes: string[];
|
selectedNodes: string[];
|
||||||
selectedEdges: string[];
|
selectedEdges: string[];
|
||||||
workflow: Omit<WorkflowV2, 'nodes' | 'edges'>;
|
|
||||||
nodeExecutionStates: Record<string, NodeExecutionState>;
|
nodeExecutionStates: Record<string, NodeExecutionState>;
|
||||||
viewport: Viewport;
|
viewport: Viewport;
|
||||||
isReady: boolean;
|
isReady: boolean;
|
||||||
@ -41,3 +40,5 @@ export type NodesState = {
|
|||||||
addNewNodePosition: XYPosition | null;
|
addNewNodePosition: XYPosition | null;
|
||||||
selectionMode: SelectionMode;
|
selectionMode: SelectionMode;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type WorkflowsState = Omit<WorkflowV2, 'nodes' | 'edges'>;
|
||||||
|
@ -0,0 +1,99 @@
|
|||||||
|
import { PayloadAction, createSlice } from '@reduxjs/toolkit';
|
||||||
|
import { workflowLoaded } from 'features/nodes/store/actions';
|
||||||
|
import { nodeEditorReset, nodesDeleted } from 'features/nodes/store/nodesSlice';
|
||||||
|
import { WorkflowsState as WorkflowState } from 'features/nodes/store/types';
|
||||||
|
import { FieldIdentifier } from 'features/nodes/types/field';
|
||||||
|
import { cloneDeep, isEqual, uniqBy } from 'lodash-es';
|
||||||
|
|
||||||
|
export const initialWorkflowState: WorkflowState = {
|
||||||
|
name: '',
|
||||||
|
author: '',
|
||||||
|
description: '',
|
||||||
|
version: '',
|
||||||
|
contact: '',
|
||||||
|
tags: '',
|
||||||
|
notes: '',
|
||||||
|
exposedFields: [],
|
||||||
|
meta: { version: '2.0.0', category: 'user' },
|
||||||
|
};
|
||||||
|
|
||||||
|
const workflowSlice = createSlice({
|
||||||
|
name: 'workflow',
|
||||||
|
initialState: initialWorkflowState,
|
||||||
|
reducers: {
|
||||||
|
workflowExposedFieldAdded: (
|
||||||
|
state,
|
||||||
|
action: PayloadAction<FieldIdentifier>
|
||||||
|
) => {
|
||||||
|
state.exposedFields = uniqBy(
|
||||||
|
state.exposedFields.concat(action.payload),
|
||||||
|
(field) => `${field.nodeId}-${field.fieldName}`
|
||||||
|
);
|
||||||
|
},
|
||||||
|
workflowExposedFieldRemoved: (
|
||||||
|
state,
|
||||||
|
action: PayloadAction<FieldIdentifier>
|
||||||
|
) => {
|
||||||
|
state.exposedFields = state.exposedFields.filter(
|
||||||
|
(field) => !isEqual(field, action.payload)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
workflowNameChanged: (state, action: PayloadAction<string>) => {
|
||||||
|
state.name = action.payload;
|
||||||
|
},
|
||||||
|
workflowDescriptionChanged: (state, action: PayloadAction<string>) => {
|
||||||
|
state.description = action.payload;
|
||||||
|
},
|
||||||
|
workflowTagsChanged: (state, action: PayloadAction<string>) => {
|
||||||
|
state.tags = action.payload;
|
||||||
|
},
|
||||||
|
workflowAuthorChanged: (state, action: PayloadAction<string>) => {
|
||||||
|
state.author = action.payload;
|
||||||
|
},
|
||||||
|
workflowNotesChanged: (state, action: PayloadAction<string>) => {
|
||||||
|
state.notes = action.payload;
|
||||||
|
},
|
||||||
|
workflowVersionChanged: (state, action: PayloadAction<string>) => {
|
||||||
|
state.version = action.payload;
|
||||||
|
},
|
||||||
|
workflowContactChanged: (state, action: PayloadAction<string>) => {
|
||||||
|
state.contact = action.payload;
|
||||||
|
},
|
||||||
|
workflowIDChanged: (state, action: PayloadAction<string>) => {
|
||||||
|
state.id = action.payload;
|
||||||
|
},
|
||||||
|
workflowReset: () => cloneDeep(initialWorkflowState),
|
||||||
|
},
|
||||||
|
extraReducers: (builder) => {
|
||||||
|
builder.addCase(workflowLoaded, (state, action) => {
|
||||||
|
const { nodes: _nodes, edges: _edges, ...workflow } = action.payload;
|
||||||
|
return cloneDeep(workflow);
|
||||||
|
});
|
||||||
|
|
||||||
|
builder.addCase(nodesDeleted, (state, action) => {
|
||||||
|
action.payload.forEach((node) => {
|
||||||
|
state.exposedFields = state.exposedFields.filter(
|
||||||
|
(f) => f.nodeId !== node.id
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
builder.addCase(nodeEditorReset, () => cloneDeep(initialWorkflowState));
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export const {
|
||||||
|
workflowExposedFieldAdded,
|
||||||
|
workflowExposedFieldRemoved,
|
||||||
|
workflowNameChanged,
|
||||||
|
workflowDescriptionChanged,
|
||||||
|
workflowTagsChanged,
|
||||||
|
workflowAuthorChanged,
|
||||||
|
workflowNotesChanged,
|
||||||
|
workflowVersionChanged,
|
||||||
|
workflowContactChanged,
|
||||||
|
workflowIDChanged,
|
||||||
|
workflowReset,
|
||||||
|
} = workflowSlice.actions;
|
||||||
|
|
||||||
|
export default workflowSlice.reducer;
|
@ -18,7 +18,6 @@ export const zInvocationTemplate = z.object({
|
|||||||
inputs: z.record(zFieldInputTemplate),
|
inputs: z.record(zFieldInputTemplate),
|
||||||
outputs: z.record(zFieldOutputTemplate),
|
outputs: z.record(zFieldOutputTemplate),
|
||||||
outputType: z.string().min(1),
|
outputType: z.string().min(1),
|
||||||
withWorkflow: z.boolean(),
|
|
||||||
version: zSemVer,
|
version: zSemVer,
|
||||||
useCache: z.boolean(),
|
useCache: z.boolean(),
|
||||||
nodePack: z.string().min(1).nullish(),
|
nodePack: z.string().min(1).nullish(),
|
||||||
@ -33,7 +32,6 @@ export const zInvocationNodeData = z.object({
|
|||||||
label: z.string(),
|
label: z.string(),
|
||||||
isOpen: z.boolean(),
|
isOpen: z.boolean(),
|
||||||
notes: z.string(),
|
notes: z.string(),
|
||||||
embedWorkflow: z.boolean(),
|
|
||||||
isIntermediate: z.boolean(),
|
isIntermediate: z.boolean(),
|
||||||
useCache: z.boolean(),
|
useCache: z.boolean(),
|
||||||
version: zSemVer,
|
version: zSemVer,
|
||||||
|
@ -13,6 +13,9 @@ export type XYPosition = z.infer<typeof zXYPosition>;
|
|||||||
|
|
||||||
export const zDimension = z.number().gt(0).nullish();
|
export const zDimension = z.number().gt(0).nullish();
|
||||||
export type Dimension = z.infer<typeof zDimension>;
|
export type Dimension = z.infer<typeof zDimension>;
|
||||||
|
|
||||||
|
export const zWorkflowCategory = z.enum(['user', 'default']);
|
||||||
|
export type WorkflowCategory = z.infer<typeof zWorkflowCategory>;
|
||||||
// #endregion
|
// #endregion
|
||||||
|
|
||||||
// #region Workflow Nodes
|
// #region Workflow Nodes
|
||||||
@ -73,6 +76,7 @@ export type WorkflowEdge = z.infer<typeof zWorkflowEdge>;
|
|||||||
|
|
||||||
// #region Workflow
|
// #region Workflow
|
||||||
export const zWorkflowV2 = z.object({
|
export const zWorkflowV2 = z.object({
|
||||||
|
id: z.string().min(1).optional(),
|
||||||
name: z.string(),
|
name: z.string(),
|
||||||
author: z.string(),
|
author: z.string(),
|
||||||
description: z.string(),
|
description: z.string(),
|
||||||
@ -84,6 +88,7 @@ export const zWorkflowV2 = z.object({
|
|||||||
edges: z.array(zWorkflowEdge),
|
edges: z.array(zWorkflowEdge),
|
||||||
exposedFields: z.array(zFieldIdentifier),
|
exposedFields: z.array(zFieldIdentifier),
|
||||||
meta: z.object({
|
meta: z.object({
|
||||||
|
category: zWorkflowCategory.default('user'),
|
||||||
version: z.literal('2.0.0'),
|
version: z.literal('2.0.0'),
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
@ -1,14 +1,13 @@
|
|||||||
import { NodesState } from 'features/nodes/store/types';
|
import { NodesState } from 'features/nodes/store/types';
|
||||||
|
import {
|
||||||
|
FieldInputInstance,
|
||||||
|
isColorFieldInputInstance,
|
||||||
|
} from 'features/nodes/types/field';
|
||||||
import { isInvocationNode } from 'features/nodes/types/invocation';
|
import { isInvocationNode } from 'features/nodes/types/invocation';
|
||||||
import { cloneDeep, omit, reduce } from 'lodash-es';
|
import { cloneDeep, omit, reduce } from 'lodash-es';
|
||||||
import { Graph } from 'services/api/types';
|
import { Graph } from 'services/api/types';
|
||||||
import { AnyInvocation } from 'services/events/types';
|
import { AnyInvocation } from 'services/events/types';
|
||||||
import { v4 as uuidv4 } from 'uuid';
|
import { v4 as uuidv4 } from 'uuid';
|
||||||
import { buildWorkflow } from 'features/nodes/util/workflow/buildWorkflow';
|
|
||||||
import {
|
|
||||||
FieldInputInstance,
|
|
||||||
isColorFieldInputInstance,
|
|
||||||
} from 'features/nodes/types/field';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* We need to do special handling for some fields
|
* We need to do special handling for some fields
|
||||||
@ -44,7 +43,7 @@ export const buildNodesGraph = (nodesState: NodesState): Graph => {
|
|||||||
const parsedNodes = filteredNodes.reduce<NonNullable<Graph['nodes']>>(
|
const parsedNodes = filteredNodes.reduce<NonNullable<Graph['nodes']>>(
|
||||||
(nodesAccumulator, node) => {
|
(nodesAccumulator, node) => {
|
||||||
const { id, data } = node;
|
const { id, data } = node;
|
||||||
const { type, inputs, isIntermediate, embedWorkflow } = data;
|
const { type, inputs, isIntermediate } = data;
|
||||||
|
|
||||||
// Transform each node's inputs to simple key-value pairs
|
// Transform each node's inputs to simple key-value pairs
|
||||||
const transformedInputs = reduce(
|
const transformedInputs = reduce(
|
||||||
@ -69,11 +68,6 @@ export const buildNodesGraph = (nodesState: NodesState): Graph => {
|
|||||||
is_intermediate: isIntermediate,
|
is_intermediate: isIntermediate,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (embedWorkflow) {
|
|
||||||
// add the workflow to the node
|
|
||||||
Object.assign(graphNode, { workflow: buildWorkflow(nodesState) });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add it to the nodes object
|
// Add it to the nodes object
|
||||||
Object.assign(nodesAccumulator, {
|
Object.assign(nodesAccumulator, {
|
||||||
[id]: graphNode,
|
[id]: graphNode,
|
||||||
|
@ -67,7 +67,6 @@ export const buildInvocationNode = (
|
|||||||
label: '',
|
label: '',
|
||||||
notes: '',
|
notes: '',
|
||||||
isOpen: true,
|
isOpen: true,
|
||||||
embedWorkflow: false,
|
|
||||||
isIntermediate: type === 'save_image' ? false : true,
|
isIntermediate: type === 'save_image' ? false : true,
|
||||||
useCache: template.useCache,
|
useCache: template.useCache,
|
||||||
inputs,
|
inputs,
|
||||||
|
@ -1,16 +1,15 @@
|
|||||||
import { satisfies } from 'compare-versions';
|
import { satisfies } from 'compare-versions';
|
||||||
import { NodeUpdateError } from 'features/nodes/types/error';
|
import { NodeUpdateError } from 'features/nodes/types/error';
|
||||||
import {
|
import {
|
||||||
InvocationNodeData,
|
InvocationNode,
|
||||||
InvocationTemplate,
|
InvocationTemplate,
|
||||||
} from 'features/nodes/types/invocation';
|
} from 'features/nodes/types/invocation';
|
||||||
import { zParsedSemver } from 'features/nodes/types/semver';
|
import { zParsedSemver } from 'features/nodes/types/semver';
|
||||||
import { cloneDeep, defaultsDeep } from 'lodash-es';
|
import { cloneDeep, keys, defaultsDeep, pick } from 'lodash-es';
|
||||||
import { Node } from 'reactflow';
|
|
||||||
import { buildInvocationNode } from './buildInvocationNode';
|
import { buildInvocationNode } from './buildInvocationNode';
|
||||||
|
|
||||||
export const getNeedsUpdate = (
|
export const getNeedsUpdate = (
|
||||||
node: Node<InvocationNodeData>,
|
node: InvocationNode,
|
||||||
template: InvocationTemplate
|
template: InvocationTemplate
|
||||||
): boolean => {
|
): boolean => {
|
||||||
if (node.data.type !== template.type) {
|
if (node.data.type !== template.type) {
|
||||||
@ -24,7 +23,7 @@ export const getNeedsUpdate = (
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
export const getMayUpdateNode = (
|
export const getMayUpdateNode = (
|
||||||
node: Node<InvocationNodeData>,
|
node: InvocationNode,
|
||||||
template: InvocationTemplate
|
template: InvocationTemplate
|
||||||
): boolean => {
|
): boolean => {
|
||||||
const needsUpdate = getNeedsUpdate(node, template);
|
const needsUpdate = getNeedsUpdate(node, template);
|
||||||
@ -45,9 +44,9 @@ export const getMayUpdateNode = (
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
export const updateNode = (
|
export const updateNode = (
|
||||||
node: Node<InvocationNodeData>,
|
node: InvocationNode,
|
||||||
template: InvocationTemplate
|
template: InvocationTemplate
|
||||||
): Node<InvocationNodeData> => {
|
): InvocationNode => {
|
||||||
const mayUpdate = getMayUpdateNode(node, template);
|
const mayUpdate = getMayUpdateNode(node, template);
|
||||||
|
|
||||||
if (!mayUpdate || node.data.type !== template.type) {
|
if (!mayUpdate || node.data.type !== template.type) {
|
||||||
@ -64,5 +63,8 @@ export const updateNode = (
|
|||||||
clone.data.version = template.version;
|
clone.data.version = template.version;
|
||||||
defaultsDeep(clone, defaults); // mutates!
|
defaultsDeep(clone, defaults); // mutates!
|
||||||
|
|
||||||
|
// Remove any fields that are not in the template
|
||||||
|
clone.data.inputs = pick(clone.data.inputs, keys(defaults.data.inputs));
|
||||||
|
clone.data.outputs = pick(clone.data.outputs, keys(defaults.data.outputs));
|
||||||
return clone;
|
return clone;
|
||||||
};
|
};
|
||||||
|
@ -83,7 +83,6 @@ export const parseSchema = (
|
|||||||
const description = schema.description ?? '';
|
const description = schema.description ?? '';
|
||||||
const version = schema.version;
|
const version = schema.version;
|
||||||
const nodePack = schema.node_pack;
|
const nodePack = schema.node_pack;
|
||||||
let withWorkflow = false;
|
|
||||||
|
|
||||||
const inputs = reduce(
|
const inputs = reduce(
|
||||||
schema.properties,
|
schema.properties,
|
||||||
@ -111,12 +110,6 @@ export const parseSchema = (
|
|||||||
try {
|
try {
|
||||||
const fieldType = parseFieldType(property);
|
const fieldType = parseFieldType(property);
|
||||||
|
|
||||||
if (fieldType.name === 'WorkflowField') {
|
|
||||||
// This supports workflows, set the flag and skip to next field
|
|
||||||
withWorkflow = true;
|
|
||||||
return inputsAccumulator;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isReservedFieldType(fieldType.name)) {
|
if (isReservedFieldType(fieldType.name)) {
|
||||||
// Skip processing this reserved field
|
// Skip processing this reserved field
|
||||||
return inputsAccumulator;
|
return inputsAccumulator;
|
||||||
@ -251,7 +244,6 @@ export const parseSchema = (
|
|||||||
inputs,
|
inputs,
|
||||||
outputs,
|
outputs,
|
||||||
useCache,
|
useCache,
|
||||||
withWorkflow,
|
|
||||||
nodePack,
|
nodePack,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,23 +1,39 @@
|
|||||||
import { logger } from 'app/logging/logger';
|
import { logger } from 'app/logging/logger';
|
||||||
|
import { parseify } from 'common/util/serialize';
|
||||||
import { NodesState } from 'features/nodes/store/types';
|
import { NodesState } from 'features/nodes/store/types';
|
||||||
import {
|
import {
|
||||||
WorkflowV2,
|
WorkflowV2,
|
||||||
zWorkflowEdge,
|
zWorkflowEdge,
|
||||||
zWorkflowNode,
|
zWorkflowNode,
|
||||||
} from 'features/nodes/types/workflow';
|
} from 'features/nodes/types/workflow';
|
||||||
import { fromZodError } from 'zod-validation-error';
|
|
||||||
import { parseify } from 'common/util/serialize';
|
|
||||||
import i18n from 'i18next';
|
import i18n from 'i18next';
|
||||||
|
import { cloneDeep } from 'lodash-es';
|
||||||
|
import { fromZodError } from 'zod-validation-error';
|
||||||
|
|
||||||
export const buildWorkflow = (nodesState: NodesState): WorkflowV2 => {
|
type BuildWorkflowArg = {
|
||||||
const { workflow: workflowMeta, nodes, edges } = nodesState;
|
nodes: NodesState['nodes'];
|
||||||
const workflow: WorkflowV2 = {
|
edges: NodesState['edges'];
|
||||||
...workflowMeta,
|
workflow: Omit<WorkflowV2, 'nodes' | 'edges'>;
|
||||||
|
};
|
||||||
|
|
||||||
|
type BuildWorkflowFunction = (arg: BuildWorkflowArg) => WorkflowV2;
|
||||||
|
|
||||||
|
export const buildWorkflow: BuildWorkflowFunction = ({
|
||||||
|
nodes,
|
||||||
|
edges,
|
||||||
|
workflow,
|
||||||
|
}) => {
|
||||||
|
const clonedWorkflow = cloneDeep(workflow);
|
||||||
|
const clonedNodes = cloneDeep(nodes);
|
||||||
|
const clonedEdges = cloneDeep(edges);
|
||||||
|
|
||||||
|
const newWorkflow: WorkflowV2 = {
|
||||||
|
...clonedWorkflow,
|
||||||
nodes: [],
|
nodes: [],
|
||||||
edges: [],
|
edges: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
nodes
|
clonedNodes
|
||||||
.filter((n) =>
|
.filter((n) =>
|
||||||
['invocation', 'notes'].includes(n.type ?? '__UNKNOWN_NODE_TYPE__')
|
['invocation', 'notes'].includes(n.type ?? '__UNKNOWN_NODE_TYPE__')
|
||||||
)
|
)
|
||||||
@ -30,10 +46,10 @@ export const buildWorkflow = (nodesState: NodesState): WorkflowV2 => {
|
|||||||
logger('nodes').warn({ node: parseify(node) }, message);
|
logger('nodes').warn({ node: parseify(node) }, message);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
workflow.nodes.push(result.data);
|
newWorkflow.nodes.push(result.data);
|
||||||
});
|
});
|
||||||
|
|
||||||
edges.forEach((edge) => {
|
clonedEdges.forEach((edge) => {
|
||||||
const result = zWorkflowEdge.safeParse(edge);
|
const result = zWorkflowEdge.safeParse(edge);
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
const { message } = fromZodError(result.error, {
|
const { message } = fromZodError(result.error, {
|
||||||
@ -42,8 +58,8 @@ export const buildWorkflow = (nodesState: NodesState): WorkflowV2 => {
|
|||||||
logger('nodes').warn({ edge: parseify(edge) }, message);
|
logger('nodes').warn({ edge: parseify(edge) }, message);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
workflow.edges.push(result.data);
|
newWorkflow.edges.push(result.data);
|
||||||
});
|
});
|
||||||
|
|
||||||
return workflow;
|
return newWorkflow;
|
||||||
};
|
};
|
||||||
|
@ -1,18 +1,17 @@
|
|||||||
import { $store } from 'app/store/nanostores/store';
|
import { $store } from 'app/store/nanostores/store';
|
||||||
import { RootState } from 'app/store/store';
|
|
||||||
import { FieldType } from 'features/nodes/types/field';
|
|
||||||
import { InvocationNodeData } from 'features/nodes/types/invocation';
|
|
||||||
import { t } from 'i18next';
|
|
||||||
import { forEach } from 'lodash-es';
|
|
||||||
import { z } from 'zod';
|
|
||||||
import {
|
import {
|
||||||
WorkflowMigrationError,
|
WorkflowMigrationError,
|
||||||
WorkflowVersionError,
|
WorkflowVersionError,
|
||||||
} from 'features/nodes/types/error';
|
} from 'features/nodes/types/error';
|
||||||
|
import { FieldType } from 'features/nodes/types/field';
|
||||||
|
import { InvocationNodeData } from 'features/nodes/types/invocation';
|
||||||
import { zSemVer } from 'features/nodes/types/semver';
|
import { zSemVer } from 'features/nodes/types/semver';
|
||||||
import { FIELD_TYPE_V1_TO_FIELD_TYPE_V2_MAPPING } from 'features/nodes/types/v1/fieldTypeMap';
|
import { FIELD_TYPE_V1_TO_FIELD_TYPE_V2_MAPPING } from 'features/nodes/types/v1/fieldTypeMap';
|
||||||
import { WorkflowV1, zWorkflowV1 } from 'features/nodes/types/v1/workflowV1';
|
import { WorkflowV1, zWorkflowV1 } from 'features/nodes/types/v1/workflowV1';
|
||||||
import { WorkflowV2, zWorkflowV2 } from 'features/nodes/types/workflow';
|
import { WorkflowV2, zWorkflowV2 } from 'features/nodes/types/workflow';
|
||||||
|
import { t } from 'i18next';
|
||||||
|
import { forEach } from 'lodash-es';
|
||||||
|
import { z } from 'zod';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper schema to extract the version from a workflow.
|
* Helper schema to extract the version from a workflow.
|
||||||
@ -25,10 +24,19 @@ const zWorkflowMetaVersion = z.object({
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Migrates a workflow from V1 to V2.
|
* Migrates a workflow from V1 to V2.
|
||||||
|
*
|
||||||
|
* Changes include:
|
||||||
|
* - Field types are now structured
|
||||||
|
* - Invocation node pack is now saved in the node data
|
||||||
|
* - Workflow schema version bumped to 2.0.0
|
||||||
*/
|
*/
|
||||||
const migrateV1toV2 = (workflowToMigrate: WorkflowV1): WorkflowV2 => {
|
const migrateV1toV2 = (workflowToMigrate: WorkflowV1): WorkflowV2 => {
|
||||||
const invocationTemplates = ($store.get()?.getState() as RootState).nodes
|
const invocationTemplates = $store.get()?.getState().nodes.nodeTemplates;
|
||||||
.nodeTemplates;
|
|
||||||
|
if (!invocationTemplates) {
|
||||||
|
throw new Error(t('app.storeNotInitialized'));
|
||||||
|
}
|
||||||
|
|
||||||
workflowToMigrate.nodes.forEach((node) => {
|
workflowToMigrate.nodes.forEach((node) => {
|
||||||
if (node.type === 'invocation') {
|
if (node.type === 'invocation') {
|
||||||
// Migrate field types
|
// Migrate field types
|
||||||
@ -39,7 +47,6 @@ const migrateV1toV2 = (workflowToMigrate: WorkflowV1): WorkflowV2 => {
|
|||||||
t('nodes.unknownFieldType', { type: input.type })
|
t('nodes.unknownFieldType', { type: input.type })
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// Cast as the V2 type
|
|
||||||
(input.type as unknown as FieldType) = newFieldType;
|
(input.type as unknown as FieldType) = newFieldType;
|
||||||
});
|
});
|
||||||
forEach(node.data.outputs, (output) => {
|
forEach(node.data.outputs, (output) => {
|
||||||
@ -50,19 +57,21 @@ const migrateV1toV2 = (workflowToMigrate: WorkflowV1): WorkflowV2 => {
|
|||||||
t('nodes.unknownFieldType', { type: output.type })
|
t('nodes.unknownFieldType', { type: output.type })
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
// Cast as the V2 type
|
|
||||||
(output.type as unknown as FieldType) = newFieldType;
|
(output.type as unknown as FieldType) = newFieldType;
|
||||||
});
|
});
|
||||||
// Migrate nodePack
|
// Add node pack
|
||||||
const invocationTemplate = invocationTemplates[node.data.type];
|
const invocationTemplate = invocationTemplates[node.data.type];
|
||||||
const nodePack = invocationTemplate
|
const nodePack = invocationTemplate
|
||||||
? invocationTemplate.nodePack
|
? invocationTemplate.nodePack
|
||||||
: t('common.unknown');
|
: t('common.unknown');
|
||||||
// Cast as the V2 type
|
|
||||||
(node.data as unknown as InvocationNodeData).nodePack = nodePack;
|
(node.data as unknown as InvocationNodeData).nodePack = nodePack;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
(workflowToMigrate.meta.version as WorkflowV2['meta']['version']) = '2.0.0';
|
// Bump version
|
||||||
|
(workflowToMigrate as unknown as WorkflowV2).meta.version = '2.0.0';
|
||||||
|
// Add category - should always be 'user', 'default' workflows are only created by the backend
|
||||||
|
(workflowToMigrate as unknown as WorkflowV2).meta.category = 'user';
|
||||||
|
// Parsing strips out any extra properties not in the latest version
|
||||||
return zWorkflowV2.parse(workflowToMigrate);
|
return zWorkflowV2.parse(workflowToMigrate);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -73,7 +82,6 @@ export const parseAndMigrateWorkflow = (data: unknown): WorkflowV2 => {
|
|||||||
const workflowVersionResult = zWorkflowMetaVersion.safeParse(data);
|
const workflowVersionResult = zWorkflowMetaVersion.safeParse(data);
|
||||||
|
|
||||||
if (!workflowVersionResult.success) {
|
if (!workflowVersionResult.success) {
|
||||||
console.log(data);
|
|
||||||
throw new WorkflowVersionError(t('nodes.unableToGetWorkflowVersion'));
|
throw new WorkflowVersionError(t('nodes.unableToGetWorkflowVersion'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,6 +39,13 @@ export const validateWorkflow = (
|
|||||||
// Parse the raw workflow data & migrate it to the latest version
|
// Parse the raw workflow data & migrate it to the latest version
|
||||||
const _workflow = parseAndMigrateWorkflow(workflow);
|
const _workflow = parseAndMigrateWorkflow(workflow);
|
||||||
|
|
||||||
|
// System workflows are only allowed to be used as templates.
|
||||||
|
// If a system workflow is loaded, change its category to user and remove its ID so that we can save it as a user workflow.
|
||||||
|
if (_workflow.meta.category === 'default') {
|
||||||
|
_workflow.meta.category = 'user';
|
||||||
|
_workflow.id = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
// Now we can validate the graph
|
// Now we can validate the graph
|
||||||
const { nodes, edges } = _workflow;
|
const { nodes, edges } = _workflow;
|
||||||
const warnings: WorkflowWarning[] = [];
|
const warnings: WorkflowWarning[] = [];
|
||||||
|
@ -19,8 +19,8 @@ const DownloadWorkflowButton = () => {
|
|||||||
return (
|
return (
|
||||||
<IAIIconButton
|
<IAIIconButton
|
||||||
icon={<FaDownload />}
|
icon={<FaDownload />}
|
||||||
tooltip={t('nodes.downloadWorkflow')}
|
tooltip={t('workflows.downloadWorkflow')}
|
||||||
aria-label={t('nodes.downloadWorkflow')}
|
aria-label={t('workflows.downloadWorkflow')}
|
||||||
onClick={handleDownload}
|
onClick={handleDownload}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
@ -1,14 +1,14 @@
|
|||||||
import { FileButton } from '@mantine/core';
|
import { FileButton } from '@mantine/core';
|
||||||
import IAIIconButton from 'common/components/IAIIconButton';
|
import IAIIconButton from 'common/components/IAIIconButton';
|
||||||
import { useLoadWorkflowFromFile } from 'features/nodes/hooks/useLoadWorkflowFromFile';
|
import { useLoadWorkflowFromFile } from 'features/workflowLibrary/hooks/useLoadWorkflowFromFile';
|
||||||
import { memo, useRef } from 'react';
|
import { memo, useRef } from 'react';
|
||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { FaUpload } from 'react-icons/fa';
|
import { FaUpload } from 'react-icons/fa';
|
||||||
|
|
||||||
const LoadWorkflowButton = () => {
|
const UploadWorkflowButton = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const resetRef = useRef<() => void>(null);
|
const resetRef = useRef<() => void>(null);
|
||||||
const loadWorkflowFromFile = useLoadWorkflowFromFile(resetRef);
|
const loadWorkflowFromFile = useLoadWorkflowFromFile({ resetRef });
|
||||||
return (
|
return (
|
||||||
<FileButton
|
<FileButton
|
||||||
resetRef={resetRef}
|
resetRef={resetRef}
|
||||||
@ -18,8 +18,8 @@ const LoadWorkflowButton = () => {
|
|||||||
{(props) => (
|
{(props) => (
|
||||||
<IAIIconButton
|
<IAIIconButton
|
||||||
icon={<FaUpload />}
|
icon={<FaUpload />}
|
||||||
tooltip={t('nodes.loadWorkflow')}
|
tooltip={t('workflows.uploadWorkflow')}
|
||||||
aria-label={t('nodes.loadWorkflow')}
|
aria-label={t('workflows.uploadWorkflow')}
|
||||||
{...props}
|
{...props}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
@ -27,4 +27,4 @@ const LoadWorkflowButton = () => {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export default memo(LoadWorkflowButton);
|
export default memo(UploadWorkflowButton);
|
@ -10,8 +10,7 @@ import {
|
|||||||
Text,
|
Text,
|
||||||
useDisclosure,
|
useDisclosure,
|
||||||
} from '@chakra-ui/react';
|
} from '@chakra-ui/react';
|
||||||
import { RootState } from 'app/store/store';
|
import { useAppDispatch } from 'app/store/storeHooks';
|
||||||
import { useAppDispatch, useAppSelector } from 'app/store/storeHooks';
|
|
||||||
import IAIIconButton from 'common/components/IAIIconButton';
|
import IAIIconButton from 'common/components/IAIIconButton';
|
||||||
import { nodeEditorReset } from 'features/nodes/store/nodesSlice';
|
import { nodeEditorReset } from 'features/nodes/store/nodesSlice';
|
||||||
import { addToast } from 'features/system/store/systemSlice';
|
import { addToast } from 'features/system/store/systemSlice';
|
||||||
@ -20,23 +19,19 @@ import { memo, useCallback, useRef } from 'react';
|
|||||||
import { useTranslation } from 'react-i18next';
|
import { useTranslation } from 'react-i18next';
|
||||||
import { FaTrash } from 'react-icons/fa';
|
import { FaTrash } from 'react-icons/fa';
|
||||||
|
|
||||||
const ResetWorkflowButton = () => {
|
const ResetWorkflowEditorButton = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const dispatch = useAppDispatch();
|
const dispatch = useAppDispatch();
|
||||||
const { isOpen, onOpen, onClose } = useDisclosure();
|
const { isOpen, onOpen, onClose } = useDisclosure();
|
||||||
const cancelRef = useRef<HTMLButtonElement | null>(null);
|
const cancelRef = useRef<HTMLButtonElement | null>(null);
|
||||||
|
|
||||||
const nodesCount = useAppSelector(
|
|
||||||
(state: RootState) => state.nodes.nodes.length
|
|
||||||
);
|
|
||||||
|
|
||||||
const handleConfirmClear = useCallback(() => {
|
const handleConfirmClear = useCallback(() => {
|
||||||
dispatch(nodeEditorReset());
|
dispatch(nodeEditorReset());
|
||||||
|
|
||||||
dispatch(
|
dispatch(
|
||||||
addToast(
|
addToast(
|
||||||
makeToast({
|
makeToast({
|
||||||
title: t('toast.nodesCleared'),
|
title: t('workflows.workflowEditorReset'),
|
||||||
status: 'success',
|
status: 'success',
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
@ -52,7 +47,6 @@ const ResetWorkflowButton = () => {
|
|||||||
tooltip={t('nodes.resetWorkflow')}
|
tooltip={t('nodes.resetWorkflow')}
|
||||||
aria-label={t('nodes.resetWorkflow')}
|
aria-label={t('nodes.resetWorkflow')}
|
||||||
onClick={onOpen}
|
onClick={onOpen}
|
||||||
isDisabled={!nodesCount}
|
|
||||||
colorScheme="error"
|
colorScheme="error"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@ -90,4 +84,4 @@ const ResetWorkflowButton = () => {
|
|||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
export default memo(ResetWorkflowButton);
|
export default memo(ResetWorkflowEditorButton);
|
@ -0,0 +1,89 @@
|
|||||||
|
import {
|
||||||
|
AlertDialog,
|
||||||
|
AlertDialogBody,
|
||||||
|
AlertDialogContent,
|
||||||
|
AlertDialogFooter,
|
||||||
|
AlertDialogHeader,
|
||||||
|
AlertDialogOverlay,
|
||||||
|
FormControl,
|
||||||
|
FormLabel,
|
||||||
|
Input,
|
||||||
|
useDisclosure,
|
||||||
|
} from '@chakra-ui/react';
|
||||||
|
import { useAppSelector } from 'app/store/storeHooks';
|
||||||
|
import IAIButton from 'common/components/IAIButton';
|
||||||
|
import IAIIconButton from 'common/components/IAIIconButton';
|
||||||
|
import { useSaveWorkflowAs } from 'features/workflowLibrary/hooks/useSaveWorkflowAs';
|
||||||
|
import { getWorkflowCopyName } from 'features/workflowLibrary/util/getWorkflowCopyName';
|
||||||
|
import { ChangeEvent, memo, useCallback, useRef, useState } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { FaClone } from 'react-icons/fa';
|
||||||
|
|
||||||
|
const SaveWorkflowAsButton = () => {
|
||||||
|
const currentName = useAppSelector((state) => state.workflow.name);
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const { saveWorkflowAs, isLoading } = useSaveWorkflowAs();
|
||||||
|
const [name, setName] = useState(getWorkflowCopyName(currentName));
|
||||||
|
const { isOpen, onOpen, onClose } = useDisclosure();
|
||||||
|
const inputRef = useRef<HTMLInputElement>(null);
|
||||||
|
|
||||||
|
const onOpenCallback = useCallback(() => {
|
||||||
|
setName(getWorkflowCopyName(currentName));
|
||||||
|
onOpen();
|
||||||
|
}, [currentName, onOpen]);
|
||||||
|
|
||||||
|
const onSave = useCallback(async () => {
|
||||||
|
saveWorkflowAs({ name, onSuccess: onClose, onError: onClose });
|
||||||
|
}, [name, onClose, saveWorkflowAs]);
|
||||||
|
|
||||||
|
const onChange = useCallback((e: ChangeEvent<HTMLInputElement>) => {
|
||||||
|
setName(e.target.value);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<IAIIconButton
|
||||||
|
icon={<FaClone />}
|
||||||
|
onClick={onOpenCallback}
|
||||||
|
isLoading={isLoading}
|
||||||
|
tooltip={t('workflows.saveWorkflowAs')}
|
||||||
|
aria-label={t('workflows.saveWorkflowAs')}
|
||||||
|
/>
|
||||||
|
<AlertDialog
|
||||||
|
isOpen={isOpen}
|
||||||
|
onClose={onClose}
|
||||||
|
leastDestructiveRef={inputRef}
|
||||||
|
isCentered
|
||||||
|
>
|
||||||
|
<AlertDialogOverlay>
|
||||||
|
<AlertDialogContent>
|
||||||
|
<AlertDialogHeader fontSize="lg" fontWeight="bold">
|
||||||
|
{t('workflows.saveWorkflowAs')}
|
||||||
|
</AlertDialogHeader>
|
||||||
|
|
||||||
|
<AlertDialogBody>
|
||||||
|
<FormControl>
|
||||||
|
<FormLabel>{t('workflows.workflowName')}</FormLabel>
|
||||||
|
<Input
|
||||||
|
ref={inputRef}
|
||||||
|
value={name}
|
||||||
|
onChange={onChange}
|
||||||
|
placeholder={t('workflows.workflowName')}
|
||||||
|
/>
|
||||||
|
</FormControl>
|
||||||
|
</AlertDialogBody>
|
||||||
|
|
||||||
|
<AlertDialogFooter>
|
||||||
|
<IAIButton onClick={onClose}>{t('common.cancel')}</IAIButton>
|
||||||
|
<IAIButton colorScheme="accent" onClick={onSave} ml={3}>
|
||||||
|
{t('common.saveAs')}
|
||||||
|
</IAIButton>
|
||||||
|
</AlertDialogFooter>
|
||||||
|
</AlertDialogContent>
|
||||||
|
</AlertDialogOverlay>
|
||||||
|
</AlertDialog>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(SaveWorkflowAsButton);
|
@ -0,0 +1,21 @@
|
|||||||
|
import IAIIconButton from 'common/components/IAIIconButton';
|
||||||
|
import { useSaveLibraryWorkflow } from 'features/workflowLibrary/hooks/useSaveWorkflow';
|
||||||
|
import { memo } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { FaSave } from 'react-icons/fa';
|
||||||
|
|
||||||
|
const SaveLibraryWorkflowButton = () => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const { saveWorkflow, isLoading } = useSaveLibraryWorkflow();
|
||||||
|
return (
|
||||||
|
<IAIIconButton
|
||||||
|
icon={<FaSave />}
|
||||||
|
onClick={saveWorkflow}
|
||||||
|
isLoading={isLoading}
|
||||||
|
tooltip={t('workflows.saveWorkflow')}
|
||||||
|
aria-label={t('workflows.saveWorkflow')}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(SaveLibraryWorkflowButton);
|
@ -0,0 +1,26 @@
|
|||||||
|
import { useDisclosure } from '@chakra-ui/react';
|
||||||
|
import IAIIconButton from 'common/components/IAIIconButton';
|
||||||
|
import { memo } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { FaFolderOpen } from 'react-icons/fa';
|
||||||
|
import WorkflowLibraryModal from './WorkflowLibraryModal';
|
||||||
|
import { WorkflowLibraryModalContext } from 'features/workflowLibrary/context/WorkflowLibraryModalContext';
|
||||||
|
|
||||||
|
const WorkflowLibraryButton = () => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const disclosure = useDisclosure();
|
||||||
|
|
||||||
|
return (
|
||||||
|
<WorkflowLibraryModalContext.Provider value={disclosure}>
|
||||||
|
<IAIIconButton
|
||||||
|
icon={<FaFolderOpen />}
|
||||||
|
onClick={disclosure.onOpen}
|
||||||
|
tooltip={t('workflows.workflowLibrary')}
|
||||||
|
aria-label={t('workflows.workflowLibrary')}
|
||||||
|
/>
|
||||||
|
<WorkflowLibraryModal />
|
||||||
|
</WorkflowLibraryModalContext.Provider>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(WorkflowLibraryButton);
|
@ -0,0 +1,13 @@
|
|||||||
|
import WorkflowLibraryList from 'features/workflowLibrary/components/WorkflowLibraryList';
|
||||||
|
import WorkflowLibraryListWrapper from 'features/workflowLibrary/components/WorkflowLibraryListWrapper';
|
||||||
|
import { memo } from 'react';
|
||||||
|
|
||||||
|
const WorkflowLibraryContent = () => {
|
||||||
|
return (
|
||||||
|
<WorkflowLibraryListWrapper>
|
||||||
|
<WorkflowLibraryList />
|
||||||
|
</WorkflowLibraryListWrapper>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(WorkflowLibraryContent);
|
@ -0,0 +1,242 @@
|
|||||||
|
import { CloseIcon } from '@chakra-ui/icons';
|
||||||
|
import {
|
||||||
|
ButtonGroup,
|
||||||
|
Divider,
|
||||||
|
Flex,
|
||||||
|
IconButton,
|
||||||
|
Input,
|
||||||
|
InputGroup,
|
||||||
|
InputRightElement,
|
||||||
|
Spacer,
|
||||||
|
} from '@chakra-ui/react';
|
||||||
|
import { SelectItem } from '@mantine/core';
|
||||||
|
import IAIButton from 'common/components/IAIButton';
|
||||||
|
import {
|
||||||
|
IAINoContentFallback,
|
||||||
|
IAINoContentFallbackWithSpinner,
|
||||||
|
} from 'common/components/IAIImageFallback';
|
||||||
|
import IAIMantineSelect from 'common/components/IAIMantineSelect';
|
||||||
|
import ScrollableContent from 'features/nodes/components/sidePanel/ScrollableContent';
|
||||||
|
import { WorkflowCategory } from 'features/nodes/types/workflow';
|
||||||
|
import WorkflowLibraryListItem from 'features/workflowLibrary/components/WorkflowLibraryListItem';
|
||||||
|
import WorkflowLibraryPagination from 'features/workflowLibrary/components/WorkflowLibraryPagination';
|
||||||
|
import {
|
||||||
|
ChangeEvent,
|
||||||
|
KeyboardEvent,
|
||||||
|
memo,
|
||||||
|
useCallback,
|
||||||
|
useMemo,
|
||||||
|
useState,
|
||||||
|
} from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { useListWorkflowsQuery } from 'services/api/endpoints/workflows';
|
||||||
|
import { SQLiteDirection, WorkflowRecordOrderBy } from 'services/api/types';
|
||||||
|
import { useDebounce } from 'use-debounce';
|
||||||
|
|
||||||
|
const PER_PAGE = 10;
|
||||||
|
|
||||||
|
const ORDER_BY_DATA: SelectItem[] = [
|
||||||
|
{ value: 'opened_at', label: 'Opened' },
|
||||||
|
{ value: 'created_at', label: 'Created' },
|
||||||
|
{ value: 'updated_at', label: 'Updated' },
|
||||||
|
{ value: 'name', label: 'Name' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const DIRECTION_DATA: SelectItem[] = [
|
||||||
|
{ value: 'ASC', label: 'Ascending' },
|
||||||
|
{ value: 'DESC', label: 'Descending' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const WorkflowLibraryList = () => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const [category, setCategory] = useState<WorkflowCategory>('user');
|
||||||
|
const [page, setPage] = useState(0);
|
||||||
|
const [query, setQuery] = useState('');
|
||||||
|
const [order_by, setOrderBy] = useState<WorkflowRecordOrderBy>('opened_at');
|
||||||
|
const [direction, setDirection] = useState<SQLiteDirection>('ASC');
|
||||||
|
const [debouncedQuery] = useDebounce(query, 500);
|
||||||
|
|
||||||
|
const queryArg = useMemo<Parameters<typeof useListWorkflowsQuery>[0]>(() => {
|
||||||
|
if (category === 'user') {
|
||||||
|
return {
|
||||||
|
page,
|
||||||
|
per_page: PER_PAGE,
|
||||||
|
order_by,
|
||||||
|
direction,
|
||||||
|
category,
|
||||||
|
query: debouncedQuery,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
page,
|
||||||
|
per_page: PER_PAGE,
|
||||||
|
order_by: 'name' as const,
|
||||||
|
direction: 'ASC' as const,
|
||||||
|
category,
|
||||||
|
query: debouncedQuery,
|
||||||
|
};
|
||||||
|
}, [category, debouncedQuery, direction, order_by, page]);
|
||||||
|
|
||||||
|
const { data, isLoading, isError, isFetching } =
|
||||||
|
useListWorkflowsQuery(queryArg);
|
||||||
|
|
||||||
|
const handleChangeOrderBy = useCallback(
|
||||||
|
(value: string | null) => {
|
||||||
|
if (!value || value === order_by) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
setOrderBy(value as WorkflowRecordOrderBy);
|
||||||
|
setPage(0);
|
||||||
|
},
|
||||||
|
[order_by]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleChangeDirection = useCallback(
|
||||||
|
(value: string | null) => {
|
||||||
|
if (!value || value === direction) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
setDirection(value as SQLiteDirection);
|
||||||
|
setPage(0);
|
||||||
|
},
|
||||||
|
[direction]
|
||||||
|
);
|
||||||
|
|
||||||
|
const resetFilterText = useCallback(() => {
|
||||||
|
setQuery('');
|
||||||
|
setPage(0);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleKeydownFilterText = useCallback(
|
||||||
|
(e: KeyboardEvent<HTMLInputElement>) => {
|
||||||
|
// exit search mode on escape
|
||||||
|
if (e.key === 'Escape') {
|
||||||
|
resetFilterText();
|
||||||
|
e.preventDefault();
|
||||||
|
setPage(0);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[resetFilterText]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleChangeFilterText = useCallback(
|
||||||
|
(e: ChangeEvent<HTMLInputElement>) => {
|
||||||
|
setQuery(e.target.value);
|
||||||
|
setPage(0);
|
||||||
|
},
|
||||||
|
[]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleSetUserCategory = useCallback(() => {
|
||||||
|
setCategory('user');
|
||||||
|
setPage(0);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handleSetDefaultCategory = useCallback(() => {
|
||||||
|
setCategory('default');
|
||||||
|
setPage(0);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<Flex gap={4} alignItems="center" h={10} flexShrink={0} flexGrow={0}>
|
||||||
|
<ButtonGroup>
|
||||||
|
<IAIButton
|
||||||
|
variant={category === 'user' ? undefined : 'ghost'}
|
||||||
|
onClick={handleSetUserCategory}
|
||||||
|
isChecked={category === 'user'}
|
||||||
|
>
|
||||||
|
{t('workflows.userWorkflows')}
|
||||||
|
</IAIButton>
|
||||||
|
<IAIButton
|
||||||
|
variant={category === 'default' ? undefined : 'ghost'}
|
||||||
|
onClick={handleSetDefaultCategory}
|
||||||
|
isChecked={category === 'default'}
|
||||||
|
>
|
||||||
|
{t('workflows.defaultWorkflows')}
|
||||||
|
</IAIButton>
|
||||||
|
</ButtonGroup>
|
||||||
|
<Spacer />
|
||||||
|
{category === 'user' && (
|
||||||
|
<>
|
||||||
|
<IAIMantineSelect
|
||||||
|
label={t('common.orderBy')}
|
||||||
|
value={order_by}
|
||||||
|
data={ORDER_BY_DATA}
|
||||||
|
onChange={handleChangeOrderBy}
|
||||||
|
formControlProps={{
|
||||||
|
w: 48,
|
||||||
|
display: 'flex',
|
||||||
|
alignItems: 'center',
|
||||||
|
gap: 2,
|
||||||
|
}}
|
||||||
|
disabled={isFetching}
|
||||||
|
/>
|
||||||
|
<IAIMantineSelect
|
||||||
|
label={t('common.direction')}
|
||||||
|
value={direction}
|
||||||
|
data={DIRECTION_DATA}
|
||||||
|
onChange={handleChangeDirection}
|
||||||
|
formControlProps={{
|
||||||
|
w: 48,
|
||||||
|
display: 'flex',
|
||||||
|
alignItems: 'center',
|
||||||
|
gap: 2,
|
||||||
|
}}
|
||||||
|
disabled={isFetching}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
<InputGroup w="20rem">
|
||||||
|
<Input
|
||||||
|
placeholder={t('workflows.searchWorkflows')}
|
||||||
|
value={query}
|
||||||
|
onKeyDown={handleKeydownFilterText}
|
||||||
|
onChange={handleChangeFilterText}
|
||||||
|
data-testid="workflow-search-input"
|
||||||
|
/>
|
||||||
|
{query.trim().length && (
|
||||||
|
<InputRightElement>
|
||||||
|
<IconButton
|
||||||
|
onClick={resetFilterText}
|
||||||
|
size="xs"
|
||||||
|
variant="ghost"
|
||||||
|
aria-label={t('workflows.clearWorkflowSearchFilter')}
|
||||||
|
opacity={0.5}
|
||||||
|
icon={<CloseIcon boxSize={2} />}
|
||||||
|
/>
|
||||||
|
</InputRightElement>
|
||||||
|
)}
|
||||||
|
</InputGroup>
|
||||||
|
</Flex>
|
||||||
|
<Divider />
|
||||||
|
{isLoading ? (
|
||||||
|
<IAINoContentFallbackWithSpinner label={t('workflows.loading')} />
|
||||||
|
) : !data || isError ? (
|
||||||
|
<IAINoContentFallback label={t('workflows.problemLoading')} />
|
||||||
|
) : data.items.length ? (
|
||||||
|
<ScrollableContent>
|
||||||
|
<Flex w="full" h="full" gap={2} px={1} flexDir="column">
|
||||||
|
{data.items.map((w) => (
|
||||||
|
<WorkflowLibraryListItem key={w.workflow_id} workflowDTO={w} />
|
||||||
|
))}
|
||||||
|
</Flex>
|
||||||
|
</ScrollableContent>
|
||||||
|
) : (
|
||||||
|
<IAINoContentFallback label={t('workflows.noUserWorkflows')} />
|
||||||
|
)}
|
||||||
|
<Divider />
|
||||||
|
{data && (
|
||||||
|
<Flex w="full" justifyContent="space-around">
|
||||||
|
<WorkflowLibraryPagination
|
||||||
|
data={data}
|
||||||
|
page={page}
|
||||||
|
setPage={setPage}
|
||||||
|
/>
|
||||||
|
</Flex>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(WorkflowLibraryList);
|
@ -0,0 +1,94 @@
|
|||||||
|
import { Flex, Heading, Spacer, Text } from '@chakra-ui/react';
|
||||||
|
import IAIButton from 'common/components/IAIButton';
|
||||||
|
import dateFormat, { masks } from 'dateformat';
|
||||||
|
import { useDeleteLibraryWorkflow } from 'features/workflowLibrary/hooks/useDeleteLibraryWorkflow';
|
||||||
|
import { useGetAndLoadLibraryWorkflow } from 'features/workflowLibrary/hooks/useGetAndLoadLibraryWorkflow';
|
||||||
|
import { useWorkflowLibraryModalContext } from 'features/workflowLibrary/context/useWorkflowLibraryModalContext';
|
||||||
|
import { memo, useCallback } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { WorkflowRecordListItemDTO } from 'services/api/types';
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
workflowDTO: WorkflowRecordListItemDTO;
|
||||||
|
};
|
||||||
|
|
||||||
|
const WorkflowLibraryListItem = ({ workflowDTO }: Props) => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const { onClose } = useWorkflowLibraryModalContext();
|
||||||
|
const { deleteWorkflow, deleteWorkflowResult } = useDeleteLibraryWorkflow({});
|
||||||
|
const { getAndLoadWorkflow, getAndLoadWorkflowResult } =
|
||||||
|
useGetAndLoadLibraryWorkflow({ onSuccess: onClose });
|
||||||
|
|
||||||
|
const handleDeleteWorkflow = useCallback(() => {
|
||||||
|
deleteWorkflow(workflowDTO.workflow_id);
|
||||||
|
}, [deleteWorkflow, workflowDTO.workflow_id]);
|
||||||
|
|
||||||
|
const handleGetAndLoadWorkflow = useCallback(() => {
|
||||||
|
getAndLoadWorkflow(workflowDTO.workflow_id);
|
||||||
|
}, [getAndLoadWorkflow, workflowDTO.workflow_id]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Flex key={workflowDTO.workflow_id} w="full">
|
||||||
|
<Flex w="full" alignItems="center" gap={2} h={12}>
|
||||||
|
<Flex flexDir="column" flexGrow={1} h="full">
|
||||||
|
<Flex alignItems="center" w="full" h="50%">
|
||||||
|
<Heading size="sm">
|
||||||
|
{workflowDTO.name || t('workflows.unnamedWorkflow')}
|
||||||
|
</Heading>
|
||||||
|
<Spacer />
|
||||||
|
{workflowDTO.category === 'user' && (
|
||||||
|
<Text fontSize="sm" variant="subtext">
|
||||||
|
{t('common.updated')}:{' '}
|
||||||
|
{dateFormat(workflowDTO.updated_at, masks.shortDate)}{' '}
|
||||||
|
{dateFormat(workflowDTO.updated_at, masks.shortTime)}
|
||||||
|
</Text>
|
||||||
|
)}
|
||||||
|
</Flex>
|
||||||
|
<Flex alignItems="center" w="full" h="50%">
|
||||||
|
{workflowDTO.description ? (
|
||||||
|
<Text fontSize="sm" noOfLines={1}>
|
||||||
|
{workflowDTO.description}
|
||||||
|
</Text>
|
||||||
|
) : (
|
||||||
|
<Text
|
||||||
|
fontSize="sm"
|
||||||
|
variant="subtext"
|
||||||
|
fontStyle="italic"
|
||||||
|
noOfLines={1}
|
||||||
|
>
|
||||||
|
{t('workflows.noDescription')}
|
||||||
|
</Text>
|
||||||
|
)}
|
||||||
|
<Spacer />
|
||||||
|
{workflowDTO.category === 'user' && (
|
||||||
|
<Text fontSize="sm" variant="subtext">
|
||||||
|
{t('common.created')}:{' '}
|
||||||
|
{dateFormat(workflowDTO.created_at, masks.shortDate)}{' '}
|
||||||
|
{dateFormat(workflowDTO.created_at, masks.shortTime)}
|
||||||
|
</Text>
|
||||||
|
)}
|
||||||
|
</Flex>
|
||||||
|
</Flex>
|
||||||
|
<IAIButton
|
||||||
|
onClick={handleGetAndLoadWorkflow}
|
||||||
|
isLoading={getAndLoadWorkflowResult.isLoading}
|
||||||
|
aria-label={t('workflows.openWorkflow')}
|
||||||
|
>
|
||||||
|
{t('common.load')}
|
||||||
|
</IAIButton>
|
||||||
|
{workflowDTO.category === 'user' && (
|
||||||
|
<IAIButton
|
||||||
|
colorScheme="error"
|
||||||
|
onClick={handleDeleteWorkflow}
|
||||||
|
isLoading={deleteWorkflowResult.isLoading}
|
||||||
|
aria-label={t('workflows.deleteWorkflow')}
|
||||||
|
>
|
||||||
|
{t('common.delete')}
|
||||||
|
</IAIButton>
|
||||||
|
)}
|
||||||
|
</Flex>
|
||||||
|
</Flex>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(WorkflowLibraryListItem);
|
@ -0,0 +1,21 @@
|
|||||||
|
import { Flex } from '@chakra-ui/react';
|
||||||
|
import { PropsWithChildren, memo } from 'react';
|
||||||
|
|
||||||
|
const WorkflowLibraryListWrapper = (props: PropsWithChildren) => {
|
||||||
|
return (
|
||||||
|
<Flex
|
||||||
|
w="full"
|
||||||
|
h="full"
|
||||||
|
flexDir="column"
|
||||||
|
layerStyle="second"
|
||||||
|
py={2}
|
||||||
|
px={4}
|
||||||
|
gap={2}
|
||||||
|
borderRadius="base"
|
||||||
|
>
|
||||||
|
{props.children}
|
||||||
|
</Flex>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(WorkflowLibraryListWrapper);
|
@ -0,0 +1,40 @@
|
|||||||
|
import {
|
||||||
|
Modal,
|
||||||
|
ModalBody,
|
||||||
|
ModalCloseButton,
|
||||||
|
ModalContent,
|
||||||
|
ModalFooter,
|
||||||
|
ModalHeader,
|
||||||
|
ModalOverlay,
|
||||||
|
} from '@chakra-ui/react';
|
||||||
|
import WorkflowLibraryContent from 'features/workflowLibrary/components/WorkflowLibraryContent';
|
||||||
|
import { useWorkflowLibraryModalContext } from 'features/workflowLibrary/context/useWorkflowLibraryModalContext';
|
||||||
|
import { memo } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
|
||||||
|
const WorkflowLibraryModal = () => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const { isOpen, onClose } = useWorkflowLibraryModalContext();
|
||||||
|
return (
|
||||||
|
<Modal isOpen={isOpen} onClose={onClose} isCentered>
|
||||||
|
<ModalOverlay />
|
||||||
|
<ModalContent
|
||||||
|
w="80%"
|
||||||
|
h="80%"
|
||||||
|
minW="unset"
|
||||||
|
minH="unset"
|
||||||
|
maxW="unset"
|
||||||
|
maxH="unset"
|
||||||
|
>
|
||||||
|
<ModalHeader>{t('workflows.workflowLibrary')}</ModalHeader>
|
||||||
|
<ModalCloseButton />
|
||||||
|
<ModalBody>
|
||||||
|
<WorkflowLibraryContent />
|
||||||
|
</ModalBody>
|
||||||
|
<ModalFooter />
|
||||||
|
</ModalContent>
|
||||||
|
</Modal>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(WorkflowLibraryModal);
|
@ -0,0 +1,87 @@
|
|||||||
|
import { ButtonGroup } from '@chakra-ui/react';
|
||||||
|
import IAIButton from 'common/components/IAIButton';
|
||||||
|
import IAIIconButton from 'common/components/IAIIconButton';
|
||||||
|
import { Dispatch, SetStateAction, memo, useCallback, useMemo } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { FaChevronLeft, FaChevronRight } from 'react-icons/fa';
|
||||||
|
import { paths } from 'services/api/schema';
|
||||||
|
|
||||||
|
const PAGES_TO_DISPLAY = 7;
|
||||||
|
|
||||||
|
type PageData = {
|
||||||
|
page: number;
|
||||||
|
onClick: () => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
page: number;
|
||||||
|
setPage: Dispatch<SetStateAction<number>>;
|
||||||
|
data: paths['/api/v1/workflows/']['get']['responses']['200']['content']['application/json'];
|
||||||
|
};
|
||||||
|
|
||||||
|
const WorkflowLibraryPagination = ({ page, setPage, data }: Props) => {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
|
||||||
|
const handlePrevPage = useCallback(() => {
|
||||||
|
setPage((p) => Math.max(p - 1, 0));
|
||||||
|
}, [setPage]);
|
||||||
|
|
||||||
|
const handleNextPage = useCallback(() => {
|
||||||
|
setPage((p) => Math.min(p + 1, data.pages - 1));
|
||||||
|
}, [data.pages, setPage]);
|
||||||
|
|
||||||
|
const pages: PageData[] = useMemo(() => {
|
||||||
|
const pages = [];
|
||||||
|
let first =
|
||||||
|
data.pages > PAGES_TO_DISPLAY
|
||||||
|
? Math.max(0, page - Math.floor(PAGES_TO_DISPLAY / 2))
|
||||||
|
: 0;
|
||||||
|
const last =
|
||||||
|
data.pages > PAGES_TO_DISPLAY
|
||||||
|
? Math.min(data.pages, first + PAGES_TO_DISPLAY)
|
||||||
|
: data.pages;
|
||||||
|
if (last - first < PAGES_TO_DISPLAY && data.pages > PAGES_TO_DISPLAY) {
|
||||||
|
first = last - PAGES_TO_DISPLAY;
|
||||||
|
}
|
||||||
|
for (let i = first; i < last; i++) {
|
||||||
|
pages.push({
|
||||||
|
page: i,
|
||||||
|
onClick: () => setPage(i),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return pages;
|
||||||
|
}, [data.pages, page, setPage]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ButtonGroup>
|
||||||
|
<IAIIconButton
|
||||||
|
variant="ghost"
|
||||||
|
onClick={handlePrevPage}
|
||||||
|
isDisabled={page === 0}
|
||||||
|
aria-label={t('common.prevPage')}
|
||||||
|
icon={<FaChevronLeft />}
|
||||||
|
/>
|
||||||
|
{pages.map((p) => (
|
||||||
|
<IAIButton
|
||||||
|
w={10}
|
||||||
|
isDisabled={data.pages === 1}
|
||||||
|
onClick={p.page === page ? undefined : p.onClick}
|
||||||
|
variant={p.page === page ? 'invokeAI' : 'ghost'}
|
||||||
|
key={p.page}
|
||||||
|
transitionDuration="0s" // the delay in animation looks jank
|
||||||
|
>
|
||||||
|
{p.page + 1}
|
||||||
|
</IAIButton>
|
||||||
|
))}
|
||||||
|
<IAIIconButton
|
||||||
|
variant="ghost"
|
||||||
|
onClick={handleNextPage}
|
||||||
|
isDisabled={page === data.pages - 1}
|
||||||
|
aria-label={t('common.nextPage')}
|
||||||
|
icon={<FaChevronRight />}
|
||||||
|
/>
|
||||||
|
</ButtonGroup>
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
export default memo(WorkflowLibraryPagination);
|
@ -0,0 +1,5 @@
|
|||||||
|
import { UseDisclosureReturn } from '@chakra-ui/react';
|
||||||
|
import { createContext } from 'react';
|
||||||
|
|
||||||
|
export const WorkflowLibraryModalContext =
|
||||||
|
createContext<UseDisclosureReturn | null>(null);
|
@ -0,0 +1,12 @@
|
|||||||
|
import { WorkflowLibraryModalContext } from 'features/workflowLibrary/context/WorkflowLibraryModalContext';
|
||||||
|
import { useContext } from 'react';
|
||||||
|
|
||||||
|
export const useWorkflowLibraryModalContext = () => {
|
||||||
|
const context = useContext(WorkflowLibraryModalContext);
|
||||||
|
if (!context) {
|
||||||
|
throw new Error(
|
||||||
|
'useWorkflowLibraryContext must be used within a WorkflowLibraryContext.Provider'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return context;
|
||||||
|
};
|
@ -0,0 +1,48 @@
|
|||||||
|
import { useAppToaster } from 'app/components/Toaster';
|
||||||
|
import { useCallback } from 'react';
|
||||||
|
import { useTranslation } from 'react-i18next';
|
||||||
|
import { useDeleteWorkflowMutation } from 'services/api/endpoints/workflows';
|
||||||
|
|
||||||
|
type UseDeleteLibraryWorkflowOptions = {
|
||||||
|
onSuccess?: () => void;
|
||||||
|
onError?: () => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
type UseDeleteLibraryWorkflowReturn = {
|
||||||
|
deleteWorkflow: (workflow_id: string) => Promise<void>;
|
||||||
|
deleteWorkflowResult: ReturnType<typeof useDeleteWorkflowMutation>[1];
|
||||||
|
};
|
||||||
|
|
||||||
|
type UseDeleteLibraryWorkflow = (
|
||||||
|
arg: UseDeleteLibraryWorkflowOptions
|
||||||
|
) => UseDeleteLibraryWorkflowReturn;
|
||||||
|
|
||||||
|
export const useDeleteLibraryWorkflow: UseDeleteLibraryWorkflow = ({
|
||||||
|
onSuccess,
|
||||||
|
onError,
|
||||||
|
}) => {
|
||||||
|
const toaster = useAppToaster();
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const [_deleteWorkflow, deleteWorkflowResult] = useDeleteWorkflowMutation();
|
||||||
|
|
||||||
|
const deleteWorkflow = useCallback(
|
||||||
|
async (workflow_id: string) => {
|
||||||
|
try {
|
||||||
|
await _deleteWorkflow(workflow_id).unwrap();
|
||||||
|
toaster({
|
||||||
|
title: t('toast.workflowDeleted'),
|
||||||
|
});
|
||||||
|
onSuccess && onSuccess();
|
||||||
|
} catch {
|
||||||
|
toaster({
|
||||||
|
title: t('toast.problemDeletingWorkflow'),
|
||||||
|
status: 'error',
|
||||||
|
});
|
||||||
|
onError && onError();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[_deleteWorkflow, toaster, t, onSuccess, onError]
|
||||||
|
);
|
||||||
|
|
||||||
|
return { deleteWorkflow, deleteWorkflowResult };
|
||||||
|
};
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user