Compare commits

...

7 Commits

Author SHA1 Message Date
c439e9681e Update JS with node cache fix 2023-09-22 00:01:53 +10:00
369963791e fix(ui): do not reset node outputs on queue item completed 2023-09-21 23:59:36 +10:00
0d94bed9f8 fix(ui): workflow editor side panel remembers positioning
closes #4402
2023-09-21 23:59:36 +10:00
cec8ad57a5 fix(ui): do not poll for cache status unless connected, processor is running and the queue is not empty 2023-09-21 23:59:36 +10:00
003c2c28c9 feat(nodes): invocation cache reports disabled if max size is 0 2023-09-21 23:59:36 +10:00
661b3056ed feat(nodes): add enable, disable, status to invocation cache
- New routes to clear, enable, disable and get the status of the cache
- Status includes hits, misses, size, max size, enabled
- Add client cache queries and mutations, abstracted into hooks
- Add invocation cache status area (next to queue status) w/ buttons
2023-09-21 23:59:36 +10:00
20f7e448c3 Updated version & JS files 2023-09-21 23:47:17 +10:00
35 changed files with 1559 additions and 776 deletions

View File

@ -7,6 +7,7 @@ from fastapi.routing import APIRouter
from pydantic import BaseModel, Field
from invokeai.app.invocations.upscale import ESRGAN_MODELS
from invokeai.app.services.invocation_cache.invocation_cache_common import InvocationCacheStatus
from invokeai.backend.image_util.invisible_watermark import InvisibleWatermark
from invokeai.backend.image_util.patchmatch import PatchMatch
from invokeai.backend.image_util.safety_checker import SafetyChecker
@ -113,3 +114,33 @@ async def set_log_level(
async def clear_invocation_cache() -> None:
"""Clears the invocation cache"""
ApiDependencies.invoker.services.invocation_cache.clear()
@app_router.put(
"/invocation_cache/enable",
operation_id="enable_invocation_cache",
responses={200: {"description": "The operation was successful"}},
)
async def enable_invocation_cache() -> None:
"""Clears the invocation cache"""
ApiDependencies.invoker.services.invocation_cache.enable()
@app_router.put(
"/invocation_cache/disable",
operation_id="disable_invocation_cache",
responses={200: {"description": "The operation was successful"}},
)
async def disable_invocation_cache() -> None:
"""Clears the invocation cache"""
ApiDependencies.invoker.services.invocation_cache.disable()
@app_router.get(
"/invocation_cache/status",
operation_id="get_invocation_cache_status",
responses={200: {"model": InvocationCacheStatus}},
)
async def get_invocation_cache_status() -> InvocationCacheStatus:
"""Clears the invocation cache"""
return ApiDependencies.invoker.services.invocation_cache.get_status()

View File

@ -2,6 +2,7 @@ from abc import ABC, abstractmethod
from typing import Optional, Union
from invokeai.app.invocations.baseinvocation import BaseInvocation, BaseInvocationOutput
from invokeai.app.services.invocation_cache.invocation_cache_common import InvocationCacheStatus
class InvocationCacheBase(ABC):
@ -32,7 +33,7 @@ class InvocationCacheBase(ABC):
@abstractmethod
def delete(self, key: Union[int, str]) -> None:
"""Deleteds an invocation output from the cache"""
"""Deletes an invocation output from the cache"""
pass
@abstractmethod
@ -44,3 +45,18 @@ class InvocationCacheBase(ABC):
def create_key(self, invocation: BaseInvocation) -> int:
"""Gets the key for the invocation's cache item"""
pass
@abstractmethod
def disable(self) -> None:
"""Disables the cache, overriding the max cache size"""
pass
@abstractmethod
def enable(self) -> None:
"""Enables the cache, letting the the max cache size take effect"""
pass
@abstractmethod
def get_status(self) -> InvocationCacheStatus:
"""Returns the status of the cache"""
pass

View File

@ -0,0 +1,9 @@
from pydantic import BaseModel, Field
class InvocationCacheStatus(BaseModel):
size: int = Field(description="The current size of the invocation cache")
hits: int = Field(description="The number of cache hits")
misses: int = Field(description="The number of cache misses")
enabled: bool = Field(description="Whether the invocation cache is enabled")
max_size: int = Field(description="The maximum size of the invocation cache")

View File

@ -3,18 +3,25 @@ from typing import Optional, Union
from invokeai.app.invocations.baseinvocation import BaseInvocation, BaseInvocationOutput
from invokeai.app.services.invocation_cache.invocation_cache_base import InvocationCacheBase
from invokeai.app.services.invocation_cache.invocation_cache_common import InvocationCacheStatus
from invokeai.app.services.invoker import Invoker
class MemoryInvocationCache(InvocationCacheBase):
__cache: dict[Union[int, str], tuple[BaseInvocationOutput, str]]
__max_cache_size: int
__disabled: bool
__hits: int
__misses: int
__cache_ids: Queue
__invoker: Invoker
def __init__(self, max_cache_size: int = 0) -> None:
self.__cache = dict()
self.__max_cache_size = max_cache_size
self.__disabled = False
self.__hits = 0
self.__misses = 0
self.__cache_ids = Queue()
def start(self, invoker: Invoker) -> None:
@ -25,15 +32,17 @@ class MemoryInvocationCache(InvocationCacheBase):
self.__invoker.services.latents.on_deleted(self._delete_by_match)
def get(self, key: Union[int, str]) -> Optional[BaseInvocationOutput]:
if self.__max_cache_size == 0:
if self.__max_cache_size == 0 or self.__disabled:
return
item = self.__cache.get(key, None)
if item is not None:
self.__hits += 1
return item[0]
self.__misses += 1
def save(self, key: Union[int, str], invocation_output: BaseInvocationOutput) -> None:
if self.__max_cache_size == 0:
if self.__max_cache_size == 0 or self.__disabled:
return
if key not in self.__cache:
@ -47,25 +56,46 @@ class MemoryInvocationCache(InvocationCacheBase):
pass
def delete(self, key: Union[int, str]) -> None:
if self.__max_cache_size == 0:
if self.__max_cache_size == 0 or self.__disabled:
return
if key in self.__cache:
del self.__cache[key]
def clear(self, *args, **kwargs) -> None:
if self.__max_cache_size == 0:
if self.__max_cache_size == 0 or self.__disabled:
return
self.__cache.clear()
self.__cache_ids = Queue()
self.__misses = 0
self.__hits = 0
def create_key(self, invocation: BaseInvocation) -> int:
return hash(invocation.json(exclude={"id"}))
def _delete_by_match(self, to_match: str) -> None:
def disable(self) -> None:
if self.__max_cache_size == 0:
return
self.__disabled = True
def enable(self) -> None:
if self.__max_cache_size == 0:
return
self.__disabled = False
def get_status(self) -> InvocationCacheStatus:
return InvocationCacheStatus(
hits=self.__hits,
misses=self.__misses,
enabled=not self.__disabled and self.__max_cache_size > 0,
size=len(self.__cache),
max_size=self.__max_cache_size,
)
def _delete_by_match(self, to_match: str) -> None:
if self.__max_cache_size == 0 or self.__disabled:
return
keys_to_delete = set()
for key, value_tuple in self.__cache.items():

View File

@ -162,15 +162,15 @@ class SessionQueueItemWithoutGraph(BaseModel):
session_id: str = Field(
description="The ID of the session associated with this queue item. The session doesn't exist in graph_executions until the queue item is executed."
)
field_values: Optional[list[NodeFieldValue]] = Field(
default=None, description="The field values that were used for this queue item"
)
queue_id: str = Field(description="The id of the queue with which this item is associated")
error: Optional[str] = Field(default=None, description="The error message if this queue item errored")
created_at: Union[datetime.datetime, str] = Field(description="When this queue item was created")
updated_at: Union[datetime.datetime, str] = Field(description="When this queue item was updated")
started_at: Optional[Union[datetime.datetime, str]] = Field(description="When this queue item was started")
completed_at: Optional[Union[datetime.datetime, str]] = Field(description="When this queue item was completed")
queue_id: str = Field(description="The id of the queue with which this item is associated")
field_values: Optional[list[NodeFieldValue]] = Field(
default=None, description="The field values that were used for this queue item"
)
@classmethod
def from_dict(cls, queue_item_dict: dict) -> "SessionQueueItemDTO":

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -12,7 +12,7 @@
margin: 0;
}
</style>
<script type="module" crossorigin src="./assets/index-f6c3f475.js"></script>
<script type="module" crossorigin src="./assets/index-eac60e23.js"></script>
</head>
<body dir="ltr">

View File

@ -13,14 +13,15 @@
"reset": "Reset",
"rotateClockwise": "Rotate Clockwise",
"rotateCounterClockwise": "Rotate Counter-Clockwise",
"showGallery": "Show Gallery",
"showGalleryPanel": "Show Gallery Panel",
"showOptionsPanel": "Show Side Panel",
"toggleAutoscroll": "Toggle autoscroll",
"toggleLogViewer": "Toggle Log Viewer",
"uploadImage": "Upload Image",
"useThisParameter": "Use this parameter",
"zoomIn": "Zoom In",
"zoomOut": "Zoom Out"
"zoomOut": "Zoom Out",
"loadMore": "Load More"
},
"boards": {
"addBoard": "Add Board",
@ -110,6 +111,7 @@
"statusModelChanged": "Model Changed",
"statusModelConverted": "Model Converted",
"statusPreparing": "Preparing",
"statusProcessing": "Processing",
"statusProcessingCanceled": "Processing Canceled",
"statusProcessingComplete": "Processing Complete",
"statusRestoringFaces": "Restoring Faces",
@ -203,6 +205,81 @@
"incompatibleModel": "Incompatible base model:",
"noMatchingEmbedding": "No matching Embeddings"
},
"queue": {
"queue": "Queue",
"queueFront": "Add to Front of Queue",
"queueBack": "Add to Queue",
"queueCountPrediction": "Add {{predicted}} to Queue",
"queueMaxExceeded": "Max of {{max_queue_size}} exceeded, would skip {{skip}}",
"queuedCount": "{{pending}} Pending",
"queueTotal": "{{total}} Total",
"queueEmpty": "Queue Empty",
"enqueueing": "Queueing Batch",
"resume": "Resume",
"resumeTooltip": "Resume Processor",
"resumeSucceeded": "Processor Resumed",
"resumeFailed": "Problem Resuming Processor",
"pause": "Pause",
"pauseTooltip": "Pause Processor",
"pauseSucceeded": "Processor Paused",
"pauseFailed": "Problem Pausing Processor",
"cancel": "Cancel",
"cancelTooltip": "Cancel Current Item",
"cancelSucceeded": "Item Canceled",
"cancelFailed": "Problem Canceling Item",
"prune": "Prune",
"pruneTooltip": "Prune {{item_count}} Completed Items",
"pruneSucceeded": "Pruned {{item_count}} Completed Items from Queue",
"pruneFailed": "Problem Pruning Queue",
"clear": "Clear",
"clearTooltip": "Cancel and Clear All Items",
"clearSucceeded": "Queue Cleared",
"clearFailed": "Problem Clearing Queue",
"cancelBatch": "Cancel Batch",
"cancelItem": "Cancel Item",
"cancelBatchSucceeded": "Batch Canceled",
"cancelBatchFailed": "Problem Canceling Batch",
"clearQueueAlertDialog": "Clearing the queue immediately cancels any processing items and clears the queue entirely.",
"clearQueueAlertDialog2": "Are you sure you want to clear the queue?",
"current": "Current",
"next": "Next",
"status": "Status",
"total": "Total",
"pending": "Pending",
"in_progress": "In Progress",
"completed": "Completed",
"failed": "Failed",
"canceled": "Canceled",
"completedIn": "Completed in",
"batch": "Batch",
"item": "Item",
"session": "Session",
"batchValues": "Batch Values",
"notReady": "Unable to Queue",
"batchQueued": "Batch Queued",
"batchQueuedDesc": "Added {{item_count}} sessions to {{direction}} of queue",
"front": "front",
"back": "back",
"batchFailedToQueue": "Failed to Queue Batch",
"graphQueued": "Graph queued",
"graphFailedToQueue": "Failed to queue graph"
},
"invocationCache": {
"invocationCache": "Invocation Cache",
"cacheSize": "Cache Size",
"maxCacheSize": "Max Cache Size",
"hits": "Cache Hits",
"misses": "Cache Misses",
"clear": "Clear",
"clearSucceeded": "Invocation Cache Cleared",
"clearFailed": "Problem Clearing Invocation Cache",
"enable": "Enable",
"enableSucceeded": "Invocation Cache Enabled",
"enableFailed": "Problem Enabling Invocation Cache",
"disable": "Disable",
"disableSucceeded": "Invocation Cache Disabled",
"disableFailed": "Problem Disabling Invocation Cache"
},
"gallery": {
"allImagesLoaded": "All Images Loaded",
"assets": "Assets",
@ -641,7 +718,8 @@
"collectionItemDescription": "TODO",
"colorCodeEdges": "Color-Code Edges",
"colorCodeEdgesHelp": "Color-code edges according to their connected fields",
"colorCollectionDescription": "A collection of colors.",
"colorCollection": "A collection of colors.",
"colorCollectionDescription": "TODO",
"colorField": "Color",
"colorFieldDescription": "A RGBA color.",
"colorPolymorphic": "Color Polymorphic",
@ -688,7 +766,8 @@
"imageFieldDescription": "Images may be passed between nodes.",
"imagePolymorphic": "Image Polymorphic",
"imagePolymorphicDescription": "A collection of images.",
"inputFields": "Input Feilds",
"inputField": "Input Field",
"inputFields": "Input Fields",
"inputMayOnlyHaveOneConnection": "Input may only have one connection",
"inputNode": "Input Node",
"integer": "Integer",
@ -706,6 +785,7 @@
"latentsPolymorphicDescription": "Latents may be passed between nodes.",
"loadingNodes": "Loading Nodes...",
"loadWorkflow": "Load Workflow",
"noWorkflow": "No Workflow",
"loRAModelField": "LoRA",
"loRAModelFieldDescription": "TODO",
"mainModelField": "Model",
@ -727,14 +807,15 @@
"noImageFoundState": "No initial image found in state",
"noMatchingNodes": "No matching nodes",
"noNodeSelected": "No node selected",
"noOpacity": "Node Opacity",
"nodeOpacity": "Node Opacity",
"noOutputRecorded": "No outputs recorded",
"noOutputSchemaName": "No output schema name found in ref object",
"notes": "Notes",
"notesDescription": "Add notes about your workflow",
"oNNXModelField": "ONNX Model",
"oNNXModelFieldDescription": "ONNX model field.",
"outputFields": "Output Feilds",
"outputField": "Output Field",
"outputFields": "Output Fields",
"outputNode": "Output node",
"outputSchemaNotFound": "Output schema not found",
"pickOne": "Pick One",
@ -783,6 +864,7 @@
"unknownNode": "Unknown Node",
"unknownTemplate": "Unknown Template",
"unkownInvocation": "Unknown Invocation type",
"updateNode": "Update Node",
"updateApp": "Update App",
"vaeField": "Vae",
"vaeFieldDescription": "Vae submodel.",
@ -819,6 +901,7 @@
},
"cfgScale": "CFG Scale",
"clipSkip": "CLIP Skip",
"clipSkipWithLayerCount": "CLIP Skip {{layerCount}}",
"closeViewer": "Close Viewer",
"codeformerFidelity": "Fidelity",
"coherenceMode": "Mode",
@ -857,6 +940,7 @@
"noInitialImageSelected": "No initial image selected",
"noModelForControlNet": "ControlNet {{index}} has no model selected.",
"noModelSelected": "No model selected",
"noPrompts": "No prompts generated",
"noNodesInGraph": "No nodes in graph",
"readyToInvoke": "Ready to Invoke",
"systemBusy": "System busy",
@ -875,7 +959,12 @@
"perlinNoise": "Perlin Noise",
"positivePromptPlaceholder": "Positive Prompt",
"randomizeSeed": "Randomize Seed",
"manualSeed": "Manual Seed",
"randomSeed": "Random Seed",
"restoreFaces": "Restore Faces",
"iterations": "Iterations",
"iterationsWithCount_one": "{{count}} Iteration",
"iterationsWithCount_other": "{{count}} Iterations",
"scale": "Scale",
"scaleBeforeProcessing": "Scale Before Processing",
"scaledHeight": "Scaled H",
@ -886,13 +975,17 @@
"seamlessTiling": "Seamless Tiling",
"seamlessXAxis": "X Axis",
"seamlessYAxis": "Y Axis",
"seamlessX": "Seamless X",
"seamlessY": "Seamless Y",
"seamlessX&Y": "Seamless X & Y",
"seamLowThreshold": "Low",
"seed": "Seed",
"seedWeights": "Seed Weights",
"imageActions": "Image Actions",
"sendTo": "Send to",
"sendToImg2Img": "Send to Image to Image",
"sendToUnifiedCanvas": "Send To Unified Canvas",
"showOptionsPanel": "Show Options Panel",
"showOptionsPanel": "Show Side Panel (O or T)",
"showPreview": "Show Preview",
"shuffle": "Shuffle Seed",
"steps": "Steps",
@ -901,11 +994,13 @@
"tileSize": "Tile Size",
"toggleLoopback": "Toggle Loopback",
"type": "Type",
"upscale": "Upscale",
"upscale": "Upscale (Shift + U)",
"upscaleImage": "Upscale Image",
"upscaling": "Upscaling",
"useAll": "Use All",
"useCpuNoise": "Use CPU Noise",
"cpuNoise": "CPU Noise",
"gpuNoise": "GPU Noise",
"useInitImg": "Use Initial Image",
"usePrompt": "Use Prompt",
"useSeed": "Use Seed",
@ -914,11 +1009,20 @@
"vSymmetryStep": "V Symmetry Step",
"width": "Width"
},
"prompt": {
"dynamicPrompts": {
"combinatorial": "Combinatorial Generation",
"dynamicPrompts": "Dynamic Prompts",
"enableDynamicPrompts": "Enable Dynamic Prompts",
"maxPrompts": "Max Prompts"
"maxPrompts": "Max Prompts",
"promptsWithCount_one": "{{count}} Prompt",
"promptsWithCount_other": "{{count}} Prompts",
"seedBehaviour": {
"label": "Seed Behaviour",
"perIterationLabel": "Seed per Iteration",
"perIterationDesc": "Use a different seed for each iteration",
"perPromptLabel": "Seed per Prompt",
"perPromptDesc": "Use a different seed for each prompt"
}
},
"sdxl": {
"cfgScale": "CFG Scale",
@ -1066,6 +1170,136 @@
"variations": "Try a variation with a value between 0.1 and 1.0 to change the result for a given seed. Interesting variations of the seed are between 0.1 and 0.3."
}
},
"popovers": {
"clipSkip": {
"heading": "CLIP Skip",
"paragraph": "Choose how many layers of the CLIP model to skip. Certain models are better suited to be used with CLIP Skip."
},
"compositingBlur": {
"heading": "Blur",
"paragraph": "The blur radius of the mask."
},
"compositingBlurMethod": {
"heading": "Blur Method",
"paragraph": "The method of blur applied to the masked area."
},
"compositingCoherencePass": {
"heading": "Coherence Pass",
"paragraph": "Composite the Inpainted/Outpainted images."
},
"compositingCoherenceMode": {
"heading": "Mode",
"paragraph": "The mode of the Coherence Pass."
},
"compositingCoherenceSteps": {
"heading": "Steps",
"paragraph": "Number of steps in the Coherence Pass. Similar to Denoising Steps."
},
"compositingStrength": {
"heading": "Strength",
"paragraph": "Amount of noise added for the Coherence Pass. Similar to Denoising Strength."
},
"compositingMaskAdjustments": {
"heading": "Mask Adjustments",
"paragraph": "Adjust the mask."
},
"controlNetBeginEnd": {
"heading": "Begin / End Step Percentage",
"paragraph": "Which parts of the denoising process will have the ControlNet applied. ControlNets applied at the start of the process guide composition, and ControlNets applied at the end guide details."
},
"controlNetControlMode": {
"heading": "Control Mode",
"paragraph": "Lends more weight to either the prompt or ControlNet."
},
"controlNetResizeMode": {
"heading": "Resize Mode",
"paragraph": "How the ControlNet image will be fit to the image generation Ratio"
},
"controlNetToggle": {
"heading": "Enable ControlNet",
"paragraph": "ControlNets provide guidance to the generation process, helping create images with controlled composition, structure, or style, depending on the model selected."
},
"controlNetWeight": {
"heading": "Weight",
"paragraph": "How strongly the ControlNet will impact the generated image."
},
"dynamicPromptsToggle": {
"heading": "Enable Dynamic Prompts",
"paragraph": "Dynamic prompts allow multiple options within a prompt. Dynamic prompts can be used by: {option1|option2|option3}. Combinations of prompts will be randomly generated until the “Images” number has been reached."
},
"dynamicPromptsCombinatorial": {
"heading": "Combinatorial Generation",
"paragraph": "Generate an image for every possible combination of Dynamic Prompts until the Max Prompts is reached."
},
"infillMethod": {
"heading": "Infill Method",
"paragraph": "Method to infill the selected area."
},
"lora": {
"heading": "LoRA Weight",
"paragraph": "Weight of the LoRA. Higher weight will lead to larger impacts on the final image."
},
"noiseEnable": {
"heading": "Enable Noise Settings",
"paragraph": "Advanced control over noise generation."
},
"noiseUseCPU": {
"heading": "Use CPU Noise",
"paragraph": "Uses the CPU to generate random noise."
},
"paramCFGScale": {
"heading": "CFG Scale",
"paragraph": "Controls how much your prompt influences the generation process."
},
"paramDenoisingStrength": {
"heading": "Denoising Strength",
"paragraph": "How much noise is added to the input image. 0 will result in an identical image, while 1 will result in a completely new image."
},
"paramIterations": {
"heading": "Iterations",
"paragraph": "The number of images to generate. If Dynamic Prompts is enabled, each of the prompts will be generated this many times."
},
"paramModel": {
"heading": "Model",
"paragraph": "Model used for the denoising steps. Different models are trained to specialize in producing different aesthetic results and content."
},
"paramNegativeConditioning": {
"heading": "Negative Prompt",
"paragraph": "The generation process avoids the concepts in the negative prompt. Use this to exclude qualities or objects from the output. Supports Compel syntax and embeddings."
},
"paramPositiveConditioning": {
"heading": "Positive Prompt",
"paragraph": "Guides the generation process. You may use any words or phrases. Supports Compel and Dynamic Prompts syntaxes and embeddings."
},
"paramRatio": {
"heading": "Ratio",
"paragraph": "The ratio of the dimensions of the image generated. An image size (in number of pixels) equivalent to 512x512 is recommended for SD1.5 models and a size equivalent to 1024x1024 is recommended for SDXL models."
},
"paramScheduler": {
"heading": "Scheduler",
"paragraph": "Scheduler defines how to iteratively add noise to an image or how to update a sample based on a model's output."
},
"paramSeed": {
"heading": "Seed",
"paragraph": "Controls the starting noise used for generation. Disable “Random Seed” to produce identical results with the same generation settings."
},
"paramSteps": {
"heading": "Steps",
"paragraph": "Number of steps that will be performed in each generation. Higher step counts will typically create better images but will require more generation time."
},
"paramVAE": {
"heading": "VAE",
"paragraph": "Model used for translating AI output into the final image."
},
"paramVAEPrecision": {
"heading": "VAE Precision",
"paragraph": "The precision used during VAE encoding and decoding. Fp16/Half precision is more efficient, at the expense of minor image variations."
},
"scaleBeforeProcessing": {
"heading": "Scale Before Processing",
"paragraph": "Scales the selected area to the size best suited for the model before the image generation process."
}
},
"ui": {
"hideProgressImages": "Hide Progress Images",
"lockRatio": "Lock Ratio",

View File

@ -264,6 +264,22 @@
"graphQueued": "Graph queued",
"graphFailedToQueue": "Failed to queue graph"
},
"invocationCache": {
"invocationCache": "Invocation Cache",
"cacheSize": "Cache Size",
"maxCacheSize": "Max Cache Size",
"hits": "Cache Hits",
"misses": "Cache Misses",
"clear": "Clear",
"clearSucceeded": "Invocation Cache Cleared",
"clearFailed": "Problem Clearing Invocation Cache",
"enable": "Enable",
"enableSucceeded": "Invocation Cache Enabled",
"enableFailed": "Problem Enabling Invocation Cache",
"disable": "Disable",
"disableSucceeded": "Invocation Cache Disabled",
"disableFailed": "Problem Disabling Invocation Cache"
},
"gallery": {
"allImagesLoaded": "All Images Loaded",
"assets": "Assets",

View File

@ -21,7 +21,8 @@ export type AppFeature =
| 'multiselect'
| 'pauseQueue'
| 'resumeQueue'
| 'prependQueue';
| 'prependQueue'
| 'invocationCache';
/**
* A disable-able Stable Diffusion feature

View File

@ -45,6 +45,7 @@ const NodeEditorPanelGroup = () => {
<PanelGroup
ref={panelGroupRef}
id="workflow-panel-group"
autoSaveId="workflow-panel-group"
direction="vertical"
style={{ height: '100%', width: '100%' }}
storage={panelStorage}

View File

@ -1,5 +1,5 @@
import { createSlice, PayloadAction } from '@reduxjs/toolkit';
import { cloneDeep, forEach, isEqual, map, uniqBy } from 'lodash-es';
import { cloneDeep, forEach, isEqual, uniqBy } from 'lodash-es';
import {
addEdge,
applyEdgeChanges,
@ -19,7 +19,6 @@ import {
XYPosition,
} from 'reactflow';
import { receivedOpenAPISchema } from 'services/api/thunks/schema';
import { sessionCanceled, sessionInvoked } from 'services/api/thunks/session';
import { ImageField } from 'services/api/types';
import {
appSocketGeneratorProgress,
@ -869,28 +868,10 @@ const nodesSlice = createSlice({
node.progressImage = progress_image ?? null;
}
});
builder.addCase(sessionInvoked.fulfilled, (state) => {
forEach(state.nodeExecutionStates, (nes) => {
nes.status = NodeStatus.PENDING;
nes.error = null;
nes.progress = null;
nes.progressImage = null;
nes.outputs = [];
});
});
builder.addCase(sessionCanceled.fulfilled, (state) => {
map(state.nodeExecutionStates, (nes) => {
if (nes.status === NodeStatus.IN_PROGRESS) {
nes.status = NodeStatus.PENDING;
}
});
});
builder.addCase(appSocketQueueItemStatusChanged, (state, action) => {
if (
['completed', 'canceled', 'failed'].includes(action.payload.data.status)
) {
if (['in_progress'].includes(action.payload.data.status)) {
forEach(state.nodeExecutionStates, (nes) => {
nes.status = NodeStatus.PENDING;
nes.status = NodeStatus.IN_PROGRESS;
nes.error = null;
nes.progress = null;
nes.progressImage = null;

View File

@ -0,0 +1,22 @@
import IAIButton from 'common/components/IAIButton';
import { memo } from 'react';
import { useTranslation } from 'react-i18next';
import { useClearInvocationCache } from '../hooks/useClearInvocationCache';
const ClearInvocationCacheButton = () => {
const { t } = useTranslation();
const { clearInvocationCache, isDisabled, isLoading } =
useClearInvocationCache();
return (
<IAIButton
isDisabled={isDisabled}
isLoading={isLoading}
onClick={clearInvocationCache}
>
{t('invocationCache.clear')}
</IAIButton>
);
};
export default memo(ClearInvocationCacheButton);

View File

@ -0,0 +1,55 @@
import { ButtonGroup } from '@chakra-ui/react';
import { useAppSelector } from 'app/store/storeHooks';
import { memo } from 'react';
import { useTranslation } from 'react-i18next';
import { useGetInvocationCacheStatusQuery } from 'services/api/endpoints/appInfo';
import { useGetQueueStatusQuery } from 'services/api/endpoints/queue';
import ClearInvocationCacheButton from './ClearInvocationCacheButton';
import ToggleInvocationCacheButton from './ToggleInvocationCacheButton';
import StatusStatGroup from './common/StatusStatGroup';
import StatusStatItem from './common/StatusStatItem';
const InvocationCacheStatus = () => {
const { t } = useTranslation();
const isConnected = useAppSelector((state) => state.system.isConnected);
const { data: queueStatus } = useGetQueueStatusQuery(undefined);
const { data: cacheStatus } = useGetInvocationCacheStatusQuery(undefined, {
pollingInterval:
isConnected &&
queueStatus?.processor.is_started &&
queueStatus?.queue.pending > 0
? 5000
: 0,
});
return (
<StatusStatGroup>
<StatusStatItem
isDisabled={!cacheStatus?.enabled}
label={t('invocationCache.cacheSize')}
value={cacheStatus?.size ?? 0}
/>
<StatusStatItem
isDisabled={!cacheStatus?.enabled}
label={t('invocationCache.hits')}
value={cacheStatus?.hits ?? 0}
/>
<StatusStatItem
isDisabled={!cacheStatus?.enabled}
label={t('invocationCache.misses')}
value={cacheStatus?.misses ?? 0}
/>
<StatusStatItem
isDisabled={!cacheStatus?.enabled}
label={t('invocationCache.maxCacheSize')}
value={cacheStatus?.max_size ?? 0}
/>
<ButtonGroup w={24} orientation="vertical" size="xs">
<ClearInvocationCacheButton />
<ToggleInvocationCacheButton />
</ButtonGroup>
</StatusStatGroup>
);
};
export default memo(InvocationCacheStatus);

View File

@ -1,38 +1,39 @@
import { Stat, StatGroup, StatLabel, StatNumber } from '@chakra-ui/react';
import { memo } from 'react';
import { useTranslation } from 'react-i18next';
import { useGetQueueStatusQuery } from 'services/api/endpoints/queue';
import StatusStatGroup from './common/StatusStatGroup';
import StatusStatItem from './common/StatusStatItem';
const QueueStatus = () => {
const { data: queueStatus } = useGetQueueStatusQuery();
const { t } = useTranslation();
return (
<StatGroup alignItems="center" justifyContent="center" w="full" h="full">
<Stat w={24}>
<StatLabel>{t('queue.in_progress')}</StatLabel>
<StatNumber>{queueStatus?.queue.in_progress ?? 0}</StatNumber>
</Stat>
<Stat w={24}>
<StatLabel>{t('queue.pending')}</StatLabel>
<StatNumber>{queueStatus?.queue.pending ?? 0}</StatNumber>
</Stat>
<Stat w={24}>
<StatLabel>{t('queue.completed')}</StatLabel>
<StatNumber>{queueStatus?.queue.completed ?? 0}</StatNumber>
</Stat>
<Stat w={24}>
<StatLabel>{t('queue.failed')}</StatLabel>
<StatNumber>{queueStatus?.queue.failed ?? 0}</StatNumber>
</Stat>
<Stat w={24}>
<StatLabel>{t('queue.canceled')}</StatLabel>
<StatNumber>{queueStatus?.queue.canceled ?? 0}</StatNumber>
</Stat>
<Stat w={24}>
<StatLabel>{t('queue.total')}</StatLabel>
<StatNumber>{queueStatus?.queue.total}</StatNumber>
</Stat>
</StatGroup>
<StatusStatGroup>
<StatusStatItem
label={t('queue.in_progress')}
value={queueStatus?.queue.in_progress ?? 0}
/>
<StatusStatItem
label={t('queue.pending')}
value={queueStatus?.queue.pending ?? 0}
/>
<StatusStatItem
label={t('queue.completed')}
value={queueStatus?.queue.completed ?? 0}
/>
<StatusStatItem
label={t('queue.failed')}
value={queueStatus?.queue.failed ?? 0}
/>
<StatusStatItem
label={t('queue.canceled')}
value={queueStatus?.queue.canceled ?? 0}
/>
<StatusStatItem
label={t('queue.total')}
value={queueStatus?.queue.total ?? 0}
/>
</StatusStatGroup>
);
};

View File

@ -1,16 +1,14 @@
import { Box, ButtonGroup, Flex } from '@chakra-ui/react';
import { Box, Flex } from '@chakra-ui/react';
import { memo } from 'react';
import ClearQueueButton from './ClearQueueButton';
import PauseProcessorButton from './PauseProcessorButton';
import PruneQueueButton from './PruneQueueButton';
import { useFeatureStatus } from '../../system/hooks/useFeatureStatus';
import InvocationCacheStatus from './InvocationCacheStatus';
import QueueList from './QueueList/QueueList';
import QueueStatus from './QueueStatus';
import ResumeProcessorButton from './ResumeProcessorButton';
import { useFeatureStatus } from '../../system/hooks/useFeatureStatus';
import QueueTabQueueControls from './QueueTabQueueControls';
const QueueTabContent = () => {
const isPauseEnabled = useFeatureStatus('pauseQueue').isFeatureEnabled;
const isResumeEnabled = useFeatureStatus('resumeQueue').isFeatureEnabled;
const isInvocationCacheEnabled =
useFeatureStatus('invocationCache').isFeatureEnabled;
return (
<Flex
@ -23,33 +21,9 @@ const QueueTabContent = () => {
gap={2}
>
<Flex gap={2} w="full">
<Flex layerStyle="second" borderRadius="base" p={2} gap={2}>
{isPauseEnabled || isResumeEnabled ? (
<ButtonGroup w={28} orientation="vertical" isAttached size="sm">
{isResumeEnabled ? <ResumeProcessorButton /> : <></>}
{isPauseEnabled ? <PauseProcessorButton /> : <></>}
</ButtonGroup>
) : (
<></>
)}
<ButtonGroup w={28} orientation="vertical" isAttached size="sm">
<PruneQueueButton />
<ClearQueueButton />
</ButtonGroup>
</Flex>
<Flex
layerStyle="second"
borderRadius="base"
flexDir="column"
py={2}
px={3}
gap={2}
>
<QueueStatus />
</Flex>
{/* <QueueStatusCard />
<CurrentQueueItemCard />
<NextQueueItemCard /> */}
<QueueTabQueueControls />
<QueueStatus />
{isInvocationCacheEnabled && <InvocationCacheStatus />}
</Flex>
<Box layerStyle="second" p={2} borderRadius="base" w="full" h="full">
<QueueList />

View File

@ -0,0 +1,30 @@
import { ButtonGroup, Flex } from '@chakra-ui/react';
import { useFeatureStatus } from 'features/system/hooks/useFeatureStatus';
import { memo } from 'react';
import ClearQueueButton from './ClearQueueButton';
import PauseProcessorButton from './PauseProcessorButton';
import PruneQueueButton from './PruneQueueButton';
import ResumeProcessorButton from './ResumeProcessorButton';
const QueueTabQueueControls = () => {
const isPauseEnabled = useFeatureStatus('pauseQueue').isFeatureEnabled;
const isResumeEnabled = useFeatureStatus('resumeQueue').isFeatureEnabled;
return (
<Flex layerStyle="second" borderRadius="base" p={2} gap={2}>
{isPauseEnabled || isResumeEnabled ? (
<ButtonGroup w={28} orientation="vertical" isAttached size="sm">
{isResumeEnabled ? <ResumeProcessorButton /> : <></>}
{isPauseEnabled ? <PauseProcessorButton /> : <></>}
</ButtonGroup>
) : (
<></>
)}
<ButtonGroup w={28} orientation="vertical" isAttached size="sm">
<PruneQueueButton />
<ClearQueueButton />
</ButtonGroup>
</Flex>
);
};
export default memo(QueueTabQueueControls);

View File

@ -0,0 +1,47 @@
import IAIButton from 'common/components/IAIButton';
import { memo } from 'react';
import { useTranslation } from 'react-i18next';
import { useGetInvocationCacheStatusQuery } from 'services/api/endpoints/appInfo';
import { useDisableInvocationCache } from '../hooks/useDisableInvocationCache';
import { useEnableInvocationCache } from '../hooks/useEnableInvocationCache';
const ToggleInvocationCacheButton = () => {
const { t } = useTranslation();
const { data: cacheStatus } = useGetInvocationCacheStatusQuery();
const {
enableInvocationCache,
isDisabled: isEnableDisabled,
isLoading: isEnableLoading,
} = useEnableInvocationCache();
const {
disableInvocationCache,
isDisabled: isDisableDisabled,
isLoading: isDisableLoading,
} = useDisableInvocationCache();
if (cacheStatus?.enabled) {
return (
<IAIButton
isDisabled={isDisableDisabled}
isLoading={isDisableLoading}
onClick={disableInvocationCache}
>
{t('invocationCache.disable')}
</IAIButton>
);
}
return (
<IAIButton
isDisabled={isEnableDisabled}
isLoading={isEnableLoading}
onClick={enableInvocationCache}
>
{t('invocationCache.enable')}
</IAIButton>
);
};
export default memo(ToggleInvocationCacheButton);

View File

@ -1,27 +0,0 @@
import { ButtonGroup, ButtonGroupProps, Flex } from '@chakra-ui/react';
import { memo } from 'react';
import ClearQueueButton from './ClearQueueButton';
import PauseProcessorButton from './PauseProcessorButton';
import PruneQueueButton from './PruneQueueButton';
import ResumeProcessorButton from './ResumeProcessorButton';
type Props = ButtonGroupProps & {
asIconButtons?: boolean;
};
const VerticalQueueControls = ({ asIconButtons, ...rest }: Props) => {
return (
<Flex flexDir="column" gap={2}>
<ButtonGroup w="full" isAttached {...rest}>
<ResumeProcessorButton asIconButton={asIconButtons} />
<PauseProcessorButton asIconButton={asIconButtons} />
</ButtonGroup>
<ButtonGroup w="full" isAttached {...rest}>
<PruneQueueButton asIconButton={asIconButtons} />
<ClearQueueButton asIconButton={asIconButtons} />
</ButtonGroup>
</Flex>
);
};
export default memo(VerticalQueueControls);

View File

@ -0,0 +1,22 @@
import { StatGroup, StatGroupProps } from '@chakra-ui/react';
import { memo } from 'react';
const StatusStatGroup = ({ children, ...rest }: StatGroupProps) => (
<StatGroup
alignItems="center"
justifyContent="center"
w="full"
h="full"
layerStyle="second"
borderRadius="base"
py={2}
px={3}
gap={6}
flexWrap="nowrap"
{...rest}
>
{children}
</StatGroup>
);
export default memo(StatusStatGroup);

View File

@ -0,0 +1,47 @@
import {
ChakraProps,
Stat,
StatLabel,
StatNumber,
StatProps,
} from '@chakra-ui/react';
import { memo } from 'react';
const sx: ChakraProps['sx'] = {
'&[aria-disabled="true"]': {
color: 'base.400',
_dark: {
color: 'base.500',
},
},
};
type Props = Omit<StatProps, 'children'> & {
label: string;
value: string | number;
isDisabled?: boolean;
};
const StatusStatItem = ({
label,
value,
isDisabled = false,
...rest
}: Props) => (
<Stat
flexGrow={1}
textOverflow="ellipsis"
overflow="hidden"
whiteSpace="nowrap"
aria-disabled={isDisabled}
sx={sx}
{...rest}
>
<StatLabel textOverflow="ellipsis" overflow="hidden" whiteSpace="nowrap">
{label}
</StatLabel>
<StatNumber>{value}</StatNumber>
</Stat>
);
export default memo(StatusStatItem);

View File

@ -0,0 +1,48 @@
import { useAppDispatch, useAppSelector } from 'app/store/storeHooks';
import { addToast } from 'features/system/store/systemSlice';
import { useCallback, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import {
useClearInvocationCacheMutation,
useGetInvocationCacheStatusQuery,
} from 'services/api/endpoints/appInfo';
export const useClearInvocationCache = () => {
const { t } = useTranslation();
const dispatch = useAppDispatch();
const { data: cacheStatus } = useGetInvocationCacheStatusQuery();
const isConnected = useAppSelector((state) => state.system.isConnected);
const [trigger, { isLoading }] = useClearInvocationCacheMutation({
fixedCacheKey: 'clearInvocationCache',
});
const isDisabled = useMemo(
() => !cacheStatus?.size || !isConnected,
[cacheStatus?.size, isConnected]
);
const clearInvocationCache = useCallback(async () => {
if (isDisabled) {
return;
}
try {
await trigger().unwrap();
dispatch(
addToast({
title: t('invocationCache.clearSucceeded'),
status: 'success',
})
);
} catch {
dispatch(
addToast({
title: t('invocationCache.clearFailed'),
status: 'error',
})
);
}
}, [isDisabled, trigger, dispatch, t]);
return { clearInvocationCache, isLoading, cacheStatus, isDisabled };
};

View File

@ -0,0 +1,48 @@
import { useAppDispatch, useAppSelector } from 'app/store/storeHooks';
import { addToast } from 'features/system/store/systemSlice';
import { useCallback, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import {
useDisableInvocationCacheMutation,
useGetInvocationCacheStatusQuery,
} from 'services/api/endpoints/appInfo';
export const useDisableInvocationCache = () => {
const { t } = useTranslation();
const dispatch = useAppDispatch();
const { data: cacheStatus } = useGetInvocationCacheStatusQuery();
const isConnected = useAppSelector((state) => state.system.isConnected);
const [trigger, { isLoading }] = useDisableInvocationCacheMutation({
fixedCacheKey: 'disableInvocationCache',
});
const isDisabled = useMemo(
() => !cacheStatus?.enabled || !isConnected || cacheStatus?.max_size === 0,
[cacheStatus?.enabled, cacheStatus?.max_size, isConnected]
);
const disableInvocationCache = useCallback(async () => {
if (isDisabled) {
return;
}
try {
await trigger().unwrap();
dispatch(
addToast({
title: t('invocationCache.disableSucceeded'),
status: 'success',
})
);
} catch {
dispatch(
addToast({
title: t('invocationCache.disableFailed'),
status: 'error',
})
);
}
}, [isDisabled, trigger, dispatch, t]);
return { disableInvocationCache, isLoading, cacheStatus, isDisabled };
};

View File

@ -0,0 +1,48 @@
import { useAppDispatch, useAppSelector } from 'app/store/storeHooks';
import { addToast } from 'features/system/store/systemSlice';
import { useCallback, useMemo } from 'react';
import { useTranslation } from 'react-i18next';
import {
useEnableInvocationCacheMutation,
useGetInvocationCacheStatusQuery,
} from 'services/api/endpoints/appInfo';
export const useEnableInvocationCache = () => {
const { t } = useTranslation();
const dispatch = useAppDispatch();
const { data: cacheStatus } = useGetInvocationCacheStatusQuery();
const isConnected = useAppSelector((state) => state.system.isConnected);
const [trigger, { isLoading }] = useEnableInvocationCacheMutation({
fixedCacheKey: 'enableInvocationCache',
});
const isDisabled = useMemo(
() => cacheStatus?.enabled || !isConnected || cacheStatus?.max_size === 0,
[cacheStatus?.enabled, cacheStatus?.max_size, isConnected]
);
const enableInvocationCache = useCallback(async () => {
if (isDisabled) {
return;
}
try {
await trigger().unwrap();
dispatch(
addToast({
title: t('invocationCache.enableSucceeded'),
status: 'success',
})
);
} catch {
dispatch(
addToast({
title: t('invocationCache.enableFailed'),
status: 'error',
})
);
}
}, [isDisabled, trigger, dispatch, t]);
return { enableInvocationCache, isLoading, cacheStatus, isDisabled };
};

View File

@ -1,4 +1,5 @@
import { api } from '..';
import { paths } from '../schema';
import { AppConfig, AppVersion } from '../types';
export const appInfoApi = api.injectEndpoints({
@ -19,7 +20,45 @@ export const appInfoApi = api.injectEndpoints({
providesTags: ['AppConfig'],
keepUnusedDataFor: 86400000, // 1 day
}),
getInvocationCacheStatus: build.query<
paths['/api/v1/app/invocation_cache/status']['get']['responses']['200']['content']['application/json'],
void
>({
query: () => ({
url: `app/invocation_cache/status`,
method: 'GET',
}),
providesTags: ['InvocationCacheStatus'],
}),
clearInvocationCache: build.mutation<void, void>({
query: () => ({
url: `app/invocation_cache`,
method: 'DELETE',
}),
invalidatesTags: ['InvocationCacheStatus'],
}),
enableInvocationCache: build.mutation<void, void>({
query: () => ({
url: `app/invocation_cache/enable`,
method: 'PUT',
}),
invalidatesTags: ['InvocationCacheStatus'],
}),
disableInvocationCache: build.mutation<void, void>({
query: () => ({
url: `app/invocation_cache/disable`,
method: 'PUT',
}),
invalidatesTags: ['InvocationCacheStatus'],
}),
}),
});
export const { useGetAppVersionQuery, useGetAppConfigQuery } = appInfoApi;
export const {
useGetAppVersionQuery,
useGetAppConfigQuery,
useClearInvocationCacheMutation,
useDisableInvocationCacheMutation,
useEnableInvocationCacheMutation,
useGetInvocationCacheStatusQuery,
} = appInfoApi;

View File

@ -24,6 +24,7 @@ export const tagTypes = [
'SessionQueueStatus',
'SessionProcessorStatus',
'BatchStatus',
'InvocationCacheStatus',
];
export type ApiTagDescription = TagDescription<(typeof tagTypes)[number]>;
export const LIST_TAG = 'LIST';

View File

@ -317,6 +317,34 @@ export type paths = {
*/
post: operations["set_log_level"];
};
"/api/v1/app/invocation_cache": {
/**
* Clear Invocation Cache
* @description Clears the invocation cache
*/
delete: operations["clear_invocation_cache"];
};
"/api/v1/app/invocation_cache/enable": {
/**
* Enable Invocation Cache
* @description Clears the invocation cache
*/
put: operations["enable_invocation_cache"];
};
"/api/v1/app/invocation_cache/disable": {
/**
* Disable Invocation Cache
* @description Clears the invocation cache
*/
put: operations["disable_invocation_cache"];
};
"/api/v1/app/invocation_cache/status": {
/**
* Get Invocation Cache Status
* @description Clears the invocation cache
*/
get: operations["get_invocation_cache_status"];
};
"/api/v1/queue/{queue_id}/enqueue_graph": {
/**
* Enqueue Graph
@ -1259,11 +1287,6 @@ export type components = {
* @default true
*/
use_cache?: boolean;
/**
* CLIP
* @description CLIP (tokenizer, text encoder, LoRAs) and skipped layer count
*/
clip?: components["schemas"]["ClipField"];
/**
* Skipped Layers
* @description Number of layers to skip in text encoder
@ -1276,6 +1299,11 @@ export type components = {
* @enum {string}
*/
type: "clip_skip";
/**
* CLIP
* @description CLIP (tokenizer, text encoder, LoRAs) and skipped layer count
*/
clip?: components["schemas"]["ClipField"];
};
/**
* ClipSkipInvocationOutput
@ -2020,7 +2048,7 @@ export type components = {
* Clip Skip
* @description The number of skipped CLIP layers
*/
clip_skip: number;
clip_skip?: number;
/**
* Model
* @description The main model used for inference
@ -2309,10 +2337,7 @@ export type components = {
* @enum {string}
*/
scheduler?: "ddim" | "ddpm" | "deis" | "lms" | "lms_k" | "pndm" | "heun" | "heun_k" | "euler" | "euler_k" | "euler_a" | "kdpm_2" | "kdpm_2_a" | "dpmpp_2s" | "dpmpp_2s_k" | "dpmpp_2m" | "dpmpp_2m_k" | "dpmpp_2m_sde" | "dpmpp_2m_sde_k" | "dpmpp_sde" | "dpmpp_sde_k" | "unipc";
/**
* Control
* @description ControlNet(s) to apply
*/
/** Control */
control?: components["schemas"]["ControlField"] | components["schemas"]["ControlField"][];
/**
* IP-Adapter
@ -4718,6 +4743,34 @@ export type components = {
*/
type: "integer_output";
};
/** InvocationCacheStatus */
InvocationCacheStatus: {
/**
* Size
* @description The current size of the invocation cache
*/
size: number;
/**
* Hits
* @description The number of cache hits
*/
hits: number;
/**
* Misses
* @description The number of cache misses
*/
misses: number;
/**
* Enabled
* @description Whether the invocation cache is enabled
*/
enabled: boolean;
/**
* Max Size
* @description The maximum size of the invocation cache
*/
max_size: number;
};
/**
* IterateInvocation
* @description Iterates over a list of items
@ -7497,11 +7550,6 @@ export type components = {
* @default false
*/
use_cache?: boolean;
/**
* Image
* @description The image to load
*/
image?: components["schemas"]["ImageField"];
/**
* Metadata
* @description Optional core metadata to be written to image
@ -7513,6 +7561,11 @@ export type components = {
* @enum {string}
*/
type: "save_image";
/**
* Image
* @description The image to load
*/
image?: components["schemas"]["ImageField"];
};
/**
* Scale Latents
@ -9042,18 +9095,6 @@ export type components = {
/** Ui Order */
ui_order?: number;
};
/**
* IPAdapterModelFormat
* @description An enumeration.
* @enum {string}
*/
IPAdapterModelFormat: "invokeai";
/**
* ControlNetModelFormat
* @description An enumeration.
* @enum {string}
*/
ControlNetModelFormat: "checkpoint" | "diffusers";
/**
* StableDiffusionOnnxModelFormat
* @description An enumeration.
@ -9066,24 +9107,36 @@ export type components = {
* @enum {string}
*/
StableDiffusion2ModelFormat: "checkpoint" | "diffusers";
/**
* StableDiffusion1ModelFormat
* @description An enumeration.
* @enum {string}
*/
StableDiffusion1ModelFormat: "checkpoint" | "diffusers";
/**
* CLIPVisionModelFormat
* @description An enumeration.
* @enum {string}
*/
CLIPVisionModelFormat: "diffusers";
/**
* StableDiffusion1ModelFormat
* @description An enumeration.
* @enum {string}
*/
StableDiffusion1ModelFormat: "checkpoint" | "diffusers";
/**
* StableDiffusionXLModelFormat
* @description An enumeration.
* @enum {string}
*/
StableDiffusionXLModelFormat: "checkpoint" | "diffusers";
/**
* IPAdapterModelFormat
* @description An enumeration.
* @enum {string}
*/
IPAdapterModelFormat: "invokeai";
/**
* ControlNetModelFormat
* @description An enumeration.
* @enum {string}
*/
ControlNetModelFormat: "checkpoint" | "diffusers";
};
responses: never;
parameters: never;
@ -10505,6 +10558,62 @@ export type operations = {
};
};
};
/**
* Clear Invocation Cache
* @description Clears the invocation cache
*/
clear_invocation_cache: {
responses: {
/** @description The operation was successful */
200: {
content: {
"application/json": unknown;
};
};
};
};
/**
* Enable Invocation Cache
* @description Clears the invocation cache
*/
enable_invocation_cache: {
responses: {
/** @description The operation was successful */
200: {
content: {
"application/json": unknown;
};
};
};
};
/**
* Disable Invocation Cache
* @description Clears the invocation cache
*/
disable_invocation_cache: {
responses: {
/** @description The operation was successful */
200: {
content: {
"application/json": unknown;
};
};
};
};
/**
* Get Invocation Cache Status
* @description Clears the invocation cache
*/
get_invocation_cache_status: {
responses: {
/** @description Successful Response */
200: {
content: {
"application/json": components["schemas"]["InvocationCacheStatus"];
};
};
};
};
/**
* Enqueue Graph
* @description Enqueues a graph for single execution.

View File

@ -1 +1 @@
__version__ = "3.1.1"
__version__ = "3.2.0rc2"