diff --git a/invokeai/app/services/session_processor/session_processor_default.py b/invokeai/app/services/session_processor/session_processor_default.py index f235544405..e49f79bcf3 100644 --- a/invokeai/app/services/session_processor/session_processor_default.py +++ b/invokeai/app/services/session_processor/session_processor_default.py @@ -139,7 +139,7 @@ class DefaultSessionProcessor(SessionProcessorBase): # Loop over invocations until the session is complete or canceled while invocation is not None and not cancel_event.is_set(): # get the source node id to provide to clients (the prepared node id is not as useful) - source_node_id = self._queue_item.session.prepared_source_mapping[invocation.id] + source_invocation_id = self._queue_item.session.prepared_source_mapping[invocation.id] # Send starting event self._invoker.services.events.emit_invocation_started( @@ -148,7 +148,7 @@ class DefaultSessionProcessor(SessionProcessorBase): queue_id=self._queue_item.queue_id, graph_execution_state_id=self._queue_item.session_id, node=invocation.model_dump(), - source_node_id=source_node_id, + source_node_id=source_invocation_id, ) # Innermost processor try block; any unhandled exception is an invocation error & will fail the graph @@ -159,12 +159,8 @@ class DefaultSessionProcessor(SessionProcessorBase): # Build invocation context (the node-facing API) context_data = InvocationContextData( invocation=invocation, - source_node_id=source_node_id, - session_id=self._queue_item.session.id, - workflow=self._queue_item.workflow, - queue_id=self._queue_item.queue_id, - queue_item_id=self._queue_item.item_id, - batch_id=self._queue_item.batch_id, + source_invocation_id=source_invocation_id, + queue_item=self._queue_item, ) context = build_invocation_context( context_data=context_data, @@ -187,7 +183,7 @@ class DefaultSessionProcessor(SessionProcessorBase): queue_id=self._queue_item.queue_id, graph_execution_state_id=self._queue_item.session.id, node=invocation.model_dump(), - source_node_id=source_node_id, + source_node_id=source_invocation_id, result=outputs.model_dump(), ) @@ -224,7 +220,7 @@ class DefaultSessionProcessor(SessionProcessorBase): queue_id=self._queue_item.queue_id, graph_execution_state_id=self._queue_item.session.id, node=invocation.model_dump(), - source_node_id=source_node_id, + source_node_id=source_invocation_id, error_type=e.__class__.__name__, error=error, ) diff --git a/invokeai/app/services/shared/invocation_context.py b/invokeai/app/services/shared/invocation_context.py index 6b6379dc5d..6b314d10bf 100644 --- a/invokeai/app/services/shared/invocation_context.py +++ b/invokeai/app/services/shared/invocation_context.py @@ -13,7 +13,6 @@ from invokeai.app.services.config.config_default import InvokeAIAppConfig from invokeai.app.services.image_records.image_records_common import ImageCategory, ResourceOrigin from invokeai.app.services.images.images_common import ImageDTO from invokeai.app.services.invocation_services import InvocationServices -from invokeai.app.services.workflow_records.workflow_records_common import WorkflowWithoutID from invokeai.app.util.step_callback import stable_diffusion_step_callback from invokeai.backend.model_manager.config import AnyModelConfig, BaseModelType, ModelFormat, ModelType, SubModelType from invokeai.backend.model_manager.load.load_base import LoadedModel @@ -23,6 +22,7 @@ from invokeai.backend.stable_diffusion.diffusion.conditioning_data import Condit if TYPE_CHECKING: from invokeai.app.invocations.baseinvocation import BaseInvocation + from invokeai.app.services.session_queue.session_queue_common import SessionQueueItem """ The InvocationContext provides access to various services and data about the current invocation. @@ -49,20 +49,12 @@ Note: The docstrings are in weird places, but that's where they must be to get I @dataclass class InvocationContextData: + queue_item: "SessionQueueItem" + """The queue item that is being executed.""" invocation: "BaseInvocation" """The invocation that is being executed.""" - session_id: str - """The session that is being executed.""" - queue_id: str - """The queue in which the session is being executed.""" - source_node_id: str - """The ID of the node from which the currently executing invocation was prepared.""" - queue_item_id: int - """The ID of the queue item that is being executed.""" - batch_id: str - """The ID of the batch that is being executed.""" - workflow: Optional[WorkflowWithoutID] = None - """The workflow associated with this queue item, if any.""" + source_invocation_id: str + """The ID of the invocation from which the currently executing invocation was prepared.""" class InvocationContextInterface: @@ -191,8 +183,8 @@ class ImagesInterface(InvocationContextInterface): board_id=board_id_, metadata=metadata_, image_origin=ResourceOrigin.INTERNAL, - workflow=self._context_data.workflow, - session_id=self._context_data.session_id, + workflow=self._context_data.queue_item.workflow, + session_id=self._context_data.queue_item.session_id, node_id=self._context_data.invocation.id, ) diff --git a/invokeai/app/util/step_callback.py b/invokeai/app/util/step_callback.py index 9c9f5254a4..8cb59f5b3a 100644 --- a/invokeai/app/util/step_callback.py +++ b/invokeai/app/util/step_callback.py @@ -114,12 +114,12 @@ def stable_diffusion_step_callback( dataURL = image_to_dataURL(image, image_format="JPEG") events.emit_generator_progress( - queue_id=context_data.queue_id, - queue_item_id=context_data.queue_item_id, - queue_batch_id=context_data.batch_id, - graph_execution_state_id=context_data.session_id, + queue_id=context_data.queue_item.queue_id, + queue_item_id=context_data.queue_item.item_id, + queue_batch_id=context_data.queue_item.batch_id, + graph_execution_state_id=context_data.queue_item.session_id, node_id=context_data.invocation.id, - source_node_id=context_data.source_node_id, + source_node_id=context_data.source_invocation_id, progress_image=ProgressImage(width=width, height=height, dataURL=dataURL), step=intermediate_state.step, order=intermediate_state.order,