Skip to content

Handler

WorkflowHandler #

Bases: Future[RunResultT]

Handle a running workflow: await results, stream events, access context, or cancel.

Instances are returned by Workflow.run. They can be awaited for the final result and support streaming intermediate events via stream_events.

See Also
Source code in workflows/handler.py
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
class WorkflowHandler(asyncio.Future[RunResultT]):
    """
    Handle a running workflow: await results, stream events, access context, or cancel.

    Instances are returned by [Workflow.run][workflows.workflow.Workflow.run].
    They can be awaited for the final result and support streaming intermediate
    events via [stream_events][workflows.handler.WorkflowHandler.stream_events].

    See Also:
        - [Context][workflows.context.context.Context]
        - [StopEvent][workflows.events.StopEvent]
    """

    _ctx: Context
    _run_task: asyncio.Task[None] | None
    _all_events_consumed: bool
    _stop_event: StopEvent | None

    def __init__(
        self,
        *args: Any,
        ctx: Context,
        run_id: str | None = None,
        run_task: asyncio.Task[None] | None = None,
        **kwargs: Any,
    ) -> None:
        super().__init__(*args, **kwargs)
        self.run_id = run_id
        self._ctx = ctx
        self._run_task = run_task
        self._all_events_consumed = False

    @property
    def ctx(self) -> Context:
        """The workflow [Context][workflows.context.context.Context] for this run."""
        return self._ctx

    def get_stop_event(self) -> StopEvent | None:
        """The stop event for this run. Always defined once the future is done. In a future major release, this will be removed, and the result will be the stop event itself."""
        return self._stop_event

    async def stop_event_result(self) -> StopEvent:
        """Get the stop event for this run. Always defined once the future is done. In a future major release, this will be removed, and the result will be the stop event itself."""
        await self.result()
        assert self._stop_event is not None, (
            "Stop event must be defined once the future is done."
        )
        return self._stop_event

    def _set_stop_event(self, stop_event: StopEvent) -> None:
        self._stop_event = stop_event
        # sad but necessary legacy behavior:
        # set the result to the stop event result. To be removed in a future major release,
        # and justuse the stop event directly.
        self.set_result(
            stop_event.result if type(stop_event) is StopEvent else stop_event
        )

    def __str__(self) -> str:
        return str(self.result())

    def is_done(self) -> bool:
        """Return True when the workflow has completed."""
        return self.done()

    async def stream_events(
        self, expose_internal: bool = False
    ) -> AsyncGenerator[Event, None]:
        """
        Stream events from the workflow execution as they occur.

        This method provides real-time access to events generated during workflow
        execution, allowing for monitoring and processing of intermediate results.
        Events are yielded in the order they are generated by the workflow.

        The stream includes all events written to the context's streaming queue,
        and terminates when a [StopEvent][workflows.events.StopEvent] is
        encountered, indicating the workflow has completed.

        Args:
            expose_internal (bool): Whether to expose internal events.

        Returns:
            AsyncGenerator[Event, None]: An async generator that yields Event objects
                as they are produced by the workflow.

        Raises:
            ValueError: If the context is not set on the handler.
            WorkflowRuntimeError: If all events have already been consumed by a
                previous call to `stream_events()` on the same handler instance.

        Examples:
            ```python
            handler = workflow.run()

            # Stream and process events in real-time
            async for event in handler.stream_events():
                if isinstance(event, StopEvent):
                    print(f"Workflow completed with result: {event.result}")
                else:
                    print(f"Received event: {event}")

            # Get final result
            result = await handler
            ```

        Note:
            Events can only be streamed once per handler instance. Subsequent
            calls to `stream_events()` will raise a WorkflowRuntimeError.
        """

        # Check if we already consumed all the streamed events
        if self._all_events_consumed:
            msg = "All the streamed events have already been consumed."
            raise WorkflowRuntimeError(msg)

        async for ev in self.ctx.stream_events():
            if isinstance(ev, InternalDispatchEvent) and not expose_internal:
                continue
            yield ev

            if isinstance(ev, StopEvent):
                self._all_events_consumed = True
                break

    async def cancel_run(self) -> None:
        """Cancel the running workflow.

        Signals the underlying context to raise
        [WorkflowCancelledByUser][workflows.errors.WorkflowCancelledByUser],
        which will be caught by the workflow and gracefully end the run.

        Examples:
            ```python
            handler = workflow.run()
            await handler.cancel_run()
            ```
        """
        if self.ctx:
            self.ctx._workflow_cancel_run()
            if self._run_task is not None:
                try:
                    await self._run_task
                except Exception:
                    pass

ctx property #

ctx: Context

The workflow Context for this run.

get_stop_event #

get_stop_event() -> StopEvent | None

The stop event for this run. Always defined once the future is done. In a future major release, this will be removed, and the result will be the stop event itself.

Source code in workflows/handler.py
56
57
58
def get_stop_event(self) -> StopEvent | None:
    """The stop event for this run. Always defined once the future is done. In a future major release, this will be removed, and the result will be the stop event itself."""
    return self._stop_event

stop_event_result async #

stop_event_result() -> StopEvent

Get the stop event for this run. Always defined once the future is done. In a future major release, this will be removed, and the result will be the stop event itself.

Source code in workflows/handler.py
60
61
62
63
64
65
66
async def stop_event_result(self) -> StopEvent:
    """Get the stop event for this run. Always defined once the future is done. In a future major release, this will be removed, and the result will be the stop event itself."""
    await self.result()
    assert self._stop_event is not None, (
        "Stop event must be defined once the future is done."
    )
    return self._stop_event

is_done #

is_done() -> bool

Return True when the workflow has completed.

Source code in workflows/handler.py
80
81
82
def is_done(self) -> bool:
    """Return True when the workflow has completed."""
    return self.done()

stream_events async #

stream_events(expose_internal: bool = False) -> AsyncGenerator[Event, None]

Stream events from the workflow execution as they occur.

This method provides real-time access to events generated during workflow execution, allowing for monitoring and processing of intermediate results. Events are yielded in the order they are generated by the workflow.

The stream includes all events written to the context's streaming queue, and terminates when a StopEvent is encountered, indicating the workflow has completed.

Parameters:

Name Type Description Default
expose_internal bool

Whether to expose internal events.

False

Returns:

Type Description
AsyncGenerator[Event, None]

AsyncGenerator[Event, None]: An async generator that yields Event objects as they are produced by the workflow.

Raises:

Type Description
ValueError

If the context is not set on the handler.

WorkflowRuntimeError

If all events have already been consumed by a previous call to stream_events() on the same handler instance.

Examples:

handler = workflow.run()

# Stream and process events in real-time
async for event in handler.stream_events():
    if isinstance(event, StopEvent):
        print(f"Workflow completed with result: {event.result}")
    else:
        print(f"Received event: {event}")

# Get final result
result = await handler
Note

Events can only be streamed once per handler instance. Subsequent calls to stream_events() will raise a WorkflowRuntimeError.

Source code in workflows/handler.py
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
async def stream_events(
    self, expose_internal: bool = False
) -> AsyncGenerator[Event, None]:
    """
    Stream events from the workflow execution as they occur.

    This method provides real-time access to events generated during workflow
    execution, allowing for monitoring and processing of intermediate results.
    Events are yielded in the order they are generated by the workflow.

    The stream includes all events written to the context's streaming queue,
    and terminates when a [StopEvent][workflows.events.StopEvent] is
    encountered, indicating the workflow has completed.

    Args:
        expose_internal (bool): Whether to expose internal events.

    Returns:
        AsyncGenerator[Event, None]: An async generator that yields Event objects
            as they are produced by the workflow.

    Raises:
        ValueError: If the context is not set on the handler.
        WorkflowRuntimeError: If all events have already been consumed by a
            previous call to `stream_events()` on the same handler instance.

    Examples:
        ```python
        handler = workflow.run()

        # Stream and process events in real-time
        async for event in handler.stream_events():
            if isinstance(event, StopEvent):
                print(f"Workflow completed with result: {event.result}")
            else:
                print(f"Received event: {event}")

        # Get final result
        result = await handler
        ```

    Note:
        Events can only be streamed once per handler instance. Subsequent
        calls to `stream_events()` will raise a WorkflowRuntimeError.
    """

    # Check if we already consumed all the streamed events
    if self._all_events_consumed:
        msg = "All the streamed events have already been consumed."
        raise WorkflowRuntimeError(msg)

    async for ev in self.ctx.stream_events():
        if isinstance(ev, InternalDispatchEvent) and not expose_internal:
            continue
        yield ev

        if isinstance(ev, StopEvent):
            self._all_events_consumed = True
            break

cancel_run async #

cancel_run() -> None

Cancel the running workflow.

Signals the underlying context to raise WorkflowCancelledByUser, which will be caught by the workflow and gracefully end the run.

Examples:

handler = workflow.run()
await handler.cancel_run()
Source code in workflows/handler.py
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
async def cancel_run(self) -> None:
    """Cancel the running workflow.

    Signals the underlying context to raise
    [WorkflowCancelledByUser][workflows.errors.WorkflowCancelledByUser],
    which will be caught by the workflow and gracefully end the run.

    Examples:
        ```python
        handler = workflow.run()
        await handler.cancel_run()
        ```
    """
    if self.ctx:
        self.ctx._workflow_cancel_run()
        if self._run_task is not None:
            try:
                await self._run_task
            except Exception:
                pass