mirror of
https://github.com/invoke-ai/InvokeAI
synced 2024-08-30 20:32:17 +00:00
feat: remove enqueue_graph
routes/methods (#4922)
This is totally extraneous - it's almost identical to `enqueue_batch`.
This commit is contained in:
parent
58a0709c1e
commit
284a257c25
@ -12,13 +12,11 @@ from invokeai.app.services.session_queue.session_queue_common import (
|
||||
CancelByBatchIDsResult,
|
||||
ClearResult,
|
||||
EnqueueBatchResult,
|
||||
EnqueueGraphResult,
|
||||
PruneResult,
|
||||
SessionQueueItem,
|
||||
SessionQueueItemDTO,
|
||||
SessionQueueStatus,
|
||||
)
|
||||
from invokeai.app.services.shared.graph import Graph
|
||||
from invokeai.app.services.shared.pagination import CursorPaginatedResults
|
||||
|
||||
from ..dependencies import ApiDependencies
|
||||
@ -33,23 +31,6 @@ class SessionQueueAndProcessorStatus(BaseModel):
|
||||
processor: SessionProcessorStatus
|
||||
|
||||
|
||||
@session_queue_router.post(
|
||||
"/{queue_id}/enqueue_graph",
|
||||
operation_id="enqueue_graph",
|
||||
responses={
|
||||
201: {"model": EnqueueGraphResult},
|
||||
},
|
||||
)
|
||||
async def enqueue_graph(
|
||||
queue_id: str = Path(description="The queue id to perform this operation on"),
|
||||
graph: Graph = Body(description="The graph to enqueue"),
|
||||
prepend: bool = Body(default=False, description="Whether or not to prepend this batch in the queue"),
|
||||
) -> EnqueueGraphResult:
|
||||
"""Enqueues a graph for single execution."""
|
||||
|
||||
return ApiDependencies.invoker.services.session_queue.enqueue_graph(queue_id=queue_id, graph=graph, prepend=prepend)
|
||||
|
||||
|
||||
@session_queue_router.post(
|
||||
"/{queue_id}/enqueue_batch",
|
||||
operation_id="enqueue_batch",
|
||||
|
@ -9,7 +9,6 @@ from invokeai.app.services.session_queue.session_queue_common import (
|
||||
CancelByQueueIDResult,
|
||||
ClearResult,
|
||||
EnqueueBatchResult,
|
||||
EnqueueGraphResult,
|
||||
IsEmptyResult,
|
||||
IsFullResult,
|
||||
PruneResult,
|
||||
@ -17,7 +16,6 @@ from invokeai.app.services.session_queue.session_queue_common import (
|
||||
SessionQueueItemDTO,
|
||||
SessionQueueStatus,
|
||||
)
|
||||
from invokeai.app.services.shared.graph import Graph
|
||||
from invokeai.app.services.shared.pagination import CursorPaginatedResults
|
||||
|
||||
|
||||
@ -29,11 +27,6 @@ class SessionQueueBase(ABC):
|
||||
"""Dequeues the next session queue item."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def enqueue_graph(self, queue_id: str, graph: Graph, prepend: bool) -> EnqueueGraphResult:
|
||||
"""Enqueues a single graph for execution."""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def enqueue_batch(self, queue_id: str, batch: Batch, prepend: bool) -> EnqueueBatchResult:
|
||||
"""Enqueues all permutations of a batch for execution."""
|
||||
|
@ -276,14 +276,6 @@ class EnqueueBatchResult(BaseModel):
|
||||
priority: int = Field(description="The priority of the enqueued batch")
|
||||
|
||||
|
||||
class EnqueueGraphResult(BaseModel):
|
||||
enqueued: int = Field(description="The total number of queue items enqueued")
|
||||
requested: int = Field(description="The total number of queue items requested to be enqueued")
|
||||
batch: Batch = Field(description="The batch that was enqueued")
|
||||
priority: int = Field(description="The priority of the enqueued batch")
|
||||
queue_item: SessionQueueItemDTO = Field(description="The queue item that was enqueued")
|
||||
|
||||
|
||||
class ClearResult(BaseModel):
|
||||
"""Result of clearing the session queue"""
|
||||
|
||||
|
@ -17,7 +17,6 @@ from invokeai.app.services.session_queue.session_queue_common import (
|
||||
CancelByQueueIDResult,
|
||||
ClearResult,
|
||||
EnqueueBatchResult,
|
||||
EnqueueGraphResult,
|
||||
IsEmptyResult,
|
||||
IsFullResult,
|
||||
PruneResult,
|
||||
@ -28,7 +27,6 @@ from invokeai.app.services.session_queue.session_queue_common import (
|
||||
calc_session_count,
|
||||
prepare_values_to_insert,
|
||||
)
|
||||
from invokeai.app.services.shared.graph import Graph
|
||||
from invokeai.app.services.shared.pagination import CursorPaginatedResults
|
||||
from invokeai.app.services.shared.sqlite import SqliteDatabase
|
||||
|
||||
@ -255,32 +253,6 @@ class SqliteSessionQueue(SessionQueueBase):
|
||||
)
|
||||
return cast(Union[int, None], self.__cursor.fetchone()[0]) or 0
|
||||
|
||||
def enqueue_graph(self, queue_id: str, graph: Graph, prepend: bool) -> EnqueueGraphResult:
|
||||
enqueue_result = self.enqueue_batch(queue_id=queue_id, batch=Batch(graph=graph), prepend=prepend)
|
||||
try:
|
||||
self.__lock.acquire()
|
||||
self.__cursor.execute(
|
||||
"""--sql
|
||||
SELECT *
|
||||
FROM session_queue
|
||||
WHERE queue_id = ?
|
||||
AND batch_id = ?
|
||||
""",
|
||||
(queue_id, enqueue_result.batch.batch_id),
|
||||
)
|
||||
result = cast(Union[sqlite3.Row, None], self.__cursor.fetchone())
|
||||
except Exception:
|
||||
self.__conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
self.__lock.release()
|
||||
if result is None:
|
||||
raise SessionQueueItemNotFoundError(f"No queue item with batch id {enqueue_result.batch.batch_id}")
|
||||
return EnqueueGraphResult(
|
||||
**enqueue_result.model_dump(),
|
||||
queue_item=SessionQueueItemDTO.queue_item_dto_from_dict(dict(result)),
|
||||
)
|
||||
|
||||
def enqueue_batch(self, queue_id: str, batch: Batch, prepend: bool) -> EnqueueBatchResult:
|
||||
try:
|
||||
self.__lock.acquire()
|
||||
|
@ -1,15 +1,9 @@
|
||||
import { isAnyOf } from '@reduxjs/toolkit';
|
||||
import { queueApi } from 'services/api/endpoints/queue';
|
||||
import { startAppListening } from '..';
|
||||
|
||||
const matcher = isAnyOf(
|
||||
queueApi.endpoints.enqueueBatch.matchFulfilled,
|
||||
queueApi.endpoints.enqueueGraph.matchFulfilled
|
||||
);
|
||||
|
||||
export const addAnyEnqueuedListener = () => {
|
||||
startAppListening({
|
||||
matcher,
|
||||
matcher: queueApi.endpoints.enqueueBatch.matchFulfilled,
|
||||
effect: async (_, { dispatch, getState }) => {
|
||||
const { data } = queueApi.endpoints.getQueueStatus.select()(getState());
|
||||
|
||||
|
@ -1,22 +1,22 @@
|
||||
import { logger } from 'app/logging/logger';
|
||||
import { parseify } from 'common/util/serialize';
|
||||
import { controlAdapterImageProcessed } from 'features/controlAdapters/store/actions';
|
||||
import {
|
||||
pendingControlImagesCleared,
|
||||
controlAdapterImageChanged,
|
||||
selectControlAdapterById,
|
||||
controlAdapterProcessedImageChanged,
|
||||
pendingControlImagesCleared,
|
||||
selectControlAdapterById,
|
||||
} from 'features/controlAdapters/store/controlAdaptersSlice';
|
||||
import { isControlNetOrT2IAdapter } from 'features/controlAdapters/store/types';
|
||||
import { SAVE_IMAGE } from 'features/nodes/util/graphBuilders/constants';
|
||||
import { addToast } from 'features/system/store/systemSlice';
|
||||
import { t } from 'i18next';
|
||||
import { imagesApi } from 'services/api/endpoints/images';
|
||||
import { queueApi } from 'services/api/endpoints/queue';
|
||||
import { isImageOutput } from 'services/api/guards';
|
||||
import { Graph, ImageDTO } from 'services/api/types';
|
||||
import { BatchConfig, ImageDTO } from 'services/api/types';
|
||||
import { socketInvocationComplete } from 'services/events/actions';
|
||||
import { startAppListening } from '..';
|
||||
import { controlAdapterImageProcessed } from 'features/controlAdapters/store/actions';
|
||||
import { isControlNetOrT2IAdapter } from 'features/controlAdapters/store/types';
|
||||
|
||||
export const addControlNetImageProcessedListener = () => {
|
||||
startAppListening({
|
||||
@ -37,41 +37,46 @@ export const addControlNetImageProcessedListener = () => {
|
||||
|
||||
// ControlNet one-off procressing graph is just the processor node, no edges.
|
||||
// Also we need to grab the image.
|
||||
const graph: Graph = {
|
||||
nodes: {
|
||||
[ca.processorNode.id]: {
|
||||
...ca.processorNode,
|
||||
is_intermediate: true,
|
||||
image: { image_name: ca.controlImage },
|
||||
},
|
||||
[SAVE_IMAGE]: {
|
||||
id: SAVE_IMAGE,
|
||||
type: 'save_image',
|
||||
is_intermediate: true,
|
||||
use_cache: false,
|
||||
|
||||
const enqueueBatchArg: BatchConfig = {
|
||||
prepend: true,
|
||||
batch: {
|
||||
graph: {
|
||||
nodes: {
|
||||
[ca.processorNode.id]: {
|
||||
...ca.processorNode,
|
||||
is_intermediate: true,
|
||||
image: { image_name: ca.controlImage },
|
||||
},
|
||||
[SAVE_IMAGE]: {
|
||||
id: SAVE_IMAGE,
|
||||
type: 'save_image',
|
||||
is_intermediate: true,
|
||||
use_cache: false,
|
||||
},
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
source: {
|
||||
node_id: ca.processorNode.id,
|
||||
field: 'image',
|
||||
},
|
||||
destination: {
|
||||
node_id: SAVE_IMAGE,
|
||||
field: 'image',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
runs: 1,
|
||||
},
|
||||
edges: [
|
||||
{
|
||||
source: {
|
||||
node_id: ca.processorNode.id,
|
||||
field: 'image',
|
||||
},
|
||||
destination: {
|
||||
node_id: SAVE_IMAGE,
|
||||
field: 'image',
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
try {
|
||||
const req = dispatch(
|
||||
queueApi.endpoints.enqueueGraph.initiate(
|
||||
{ graph, prepend: true },
|
||||
{
|
||||
fixedCacheKey: 'enqueueGraph',
|
||||
}
|
||||
)
|
||||
queueApi.endpoints.enqueueBatch.initiate(enqueueBatchArg, {
|
||||
fixedCacheKey: 'enqueueBatch',
|
||||
})
|
||||
);
|
||||
const enqueueResult = await req.unwrap();
|
||||
req.reset();
|
||||
@ -83,8 +88,8 @@ export const addControlNetImageProcessedListener = () => {
|
||||
const [invocationCompleteAction] = await take(
|
||||
(action): action is ReturnType<typeof socketInvocationComplete> =>
|
||||
socketInvocationComplete.match(action) &&
|
||||
action.payload.data.graph_execution_state_id ===
|
||||
enqueueResult.queue_item.session_id &&
|
||||
action.payload.data.queue_batch_id ===
|
||||
enqueueResult.batch.batch_id &&
|
||||
action.payload.data.source_node_id === SAVE_IMAGE
|
||||
);
|
||||
|
||||
@ -116,7 +121,10 @@ export const addControlNetImageProcessedListener = () => {
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
log.error({ graph: parseify(graph) }, t('queue.graphFailedToQueue'));
|
||||
log.error(
|
||||
{ enqueueBatchArg: parseify(enqueueBatchArg) },
|
||||
t('queue.graphFailedToQueue')
|
||||
);
|
||||
|
||||
// handle usage-related errors
|
||||
if (error instanceof Object) {
|
||||
|
@ -6,7 +6,7 @@ import { addToast } from 'features/system/store/systemSlice';
|
||||
import { t } from 'i18next';
|
||||
import { queueApi } from 'services/api/endpoints/queue';
|
||||
import { startAppListening } from '..';
|
||||
import { ImageDTO } from 'services/api/types';
|
||||
import { BatchConfig, ImageDTO } from 'services/api/types';
|
||||
import { createIsAllowedToUpscaleSelector } from 'features/parameters/hooks/useIsAllowedToUpscale';
|
||||
|
||||
export const upscaleRequested = createAction<{ imageDTO: ImageDTO }>(
|
||||
@ -44,20 +44,23 @@ export const addUpscaleRequestedListener = () => {
|
||||
const { esrganModelName } = state.postprocessing;
|
||||
const { autoAddBoardId } = state.gallery;
|
||||
|
||||
const graph = buildAdHocUpscaleGraph({
|
||||
image_name,
|
||||
esrganModelName,
|
||||
autoAddBoardId,
|
||||
});
|
||||
const enqueueBatchArg: BatchConfig = {
|
||||
prepend: true,
|
||||
batch: {
|
||||
graph: buildAdHocUpscaleGraph({
|
||||
image_name,
|
||||
esrganModelName,
|
||||
autoAddBoardId,
|
||||
}),
|
||||
runs: 1,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const req = dispatch(
|
||||
queueApi.endpoints.enqueueGraph.initiate(
|
||||
{ graph, prepend: true },
|
||||
{
|
||||
fixedCacheKey: 'enqueueGraph',
|
||||
}
|
||||
)
|
||||
queueApi.endpoints.enqueueBatch.initiate(enqueueBatchArg, {
|
||||
fixedCacheKey: 'enqueueBatch',
|
||||
})
|
||||
);
|
||||
|
||||
const enqueueResult = await req.unwrap();
|
||||
@ -67,7 +70,10 @@ export const addUpscaleRequestedListener = () => {
|
||||
t('queue.graphQueued')
|
||||
);
|
||||
} catch (error) {
|
||||
log.error({ graph: parseify(graph) }, t('queue.graphFailedToQueue'));
|
||||
log.error(
|
||||
{ enqueueBatchArg: parseify(enqueueBatchArg) },
|
||||
t('queue.graphFailedToQueue')
|
||||
);
|
||||
|
||||
// handle usage-related errors
|
||||
if (error instanceof Object) {
|
||||
|
@ -3,7 +3,6 @@ import {
|
||||
// useCancelByBatchIdsMutation,
|
||||
useClearQueueMutation,
|
||||
useEnqueueBatchMutation,
|
||||
useEnqueueGraphMutation,
|
||||
usePruneQueueMutation,
|
||||
useResumeProcessorMutation,
|
||||
usePauseProcessorMutation,
|
||||
@ -14,10 +13,6 @@ export const useIsQueueMutationInProgress = () => {
|
||||
useEnqueueBatchMutation({
|
||||
fixedCacheKey: 'enqueueBatch',
|
||||
});
|
||||
const [_triggerEnqueueGraph, { isLoading: isLoadingEnqueueGraph }] =
|
||||
useEnqueueGraphMutation({
|
||||
fixedCacheKey: 'enqueueGraph',
|
||||
});
|
||||
const [_triggerResumeProcessor, { isLoading: isLoadingResumeProcessor }] =
|
||||
useResumeProcessorMutation({
|
||||
fixedCacheKey: 'resumeProcessor',
|
||||
@ -44,7 +39,6 @@ export const useIsQueueMutationInProgress = () => {
|
||||
// });
|
||||
return (
|
||||
isLoadingEnqueueBatch ||
|
||||
isLoadingEnqueueGraph ||
|
||||
isLoadingResumeProcessor ||
|
||||
isLoadingPauseProcessor ||
|
||||
isLoadingCancelQueue ||
|
||||
|
@ -83,30 +83,6 @@ export const queueApi = api.injectEndpoints({
|
||||
}
|
||||
},
|
||||
}),
|
||||
enqueueGraph: build.mutation<
|
||||
paths['/api/v1/queue/{queue_id}/enqueue_graph']['post']['responses']['201']['content']['application/json'],
|
||||
paths['/api/v1/queue/{queue_id}/enqueue_graph']['post']['requestBody']['content']['application/json']
|
||||
>({
|
||||
query: (arg) => ({
|
||||
url: `queue/${$queueId.get()}/enqueue_graph`,
|
||||
body: arg,
|
||||
method: 'POST',
|
||||
}),
|
||||
invalidatesTags: [
|
||||
'SessionQueueStatus',
|
||||
'CurrentSessionQueueItem',
|
||||
'NextSessionQueueItem',
|
||||
],
|
||||
onQueryStarted: async (arg, api) => {
|
||||
const { dispatch, queryFulfilled } = api;
|
||||
try {
|
||||
await queryFulfilled;
|
||||
resetListQueryData(dispatch);
|
||||
} catch {
|
||||
// no-op
|
||||
}
|
||||
},
|
||||
}),
|
||||
resumeProcessor: build.mutation<
|
||||
paths['/api/v1/queue/{queue_id}/processor/resume']['put']['responses']['200']['content']['application/json'],
|
||||
void
|
||||
@ -341,7 +317,6 @@ export const queueApi = api.injectEndpoints({
|
||||
|
||||
export const {
|
||||
useCancelByBatchIdsMutation,
|
||||
useEnqueueGraphMutation,
|
||||
useEnqueueBatchMutation,
|
||||
usePauseProcessorMutation,
|
||||
useResumeProcessorMutation,
|
||||
|
160
invokeai/frontend/web/src/services/api/schema.d.ts
vendored
160
invokeai/frontend/web/src/services/api/schema.d.ts
vendored
File diff suppressed because one or more lines are too long
@ -26,7 +26,6 @@ export type BatchConfig =
|
||||
paths['/api/v1/queue/{queue_id}/enqueue_batch']['post']['requestBody']['content']['application/json'];
|
||||
|
||||
export type EnqueueBatchResult = components['schemas']['EnqueueBatchResult'];
|
||||
export type EnqueueGraphResult = components['schemas']['EnqueueGraphResult'];
|
||||
|
||||
/**
|
||||
* This is an unsafe type; the object inside is not guaranteed to be valid.
|
||||
|
Loading…
Reference in New Issue
Block a user