diff --git a/pyproject.toml b/pyproject.toml index 9e0987b6b..1168373ea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,7 @@ opentelemetry = ["opentelemetry-api>=1.11.1,<2", "opentelemetry-sdk>=1.11.1,<2"] pydantic = ["pydantic>=2.0.0,<3"] openai-agents = ["openai-agents>=0.14.0", "mcp>=1.9.4, <2"] google-adk = ["google-adk>=1.27.0,<2"] +langgraph = ["langgraph>=1.1.0"] langsmith = ["langsmith>=0.7.0,<0.8"] lambda-worker-otel = [ "opentelemetry-api>=1.11.1,<2", @@ -79,6 +80,7 @@ dev = [ "pytest-rerunfailures>=16.1", "pytest-xdist>=3.6,<4", "moto[s3,server]>=5", + "langgraph>=1.1.0", "langsmith>=0.7.0,<0.8", "setuptools<82", "opentelemetry-exporter-otlp-proto-grpc>=1.11.1,<2", diff --git a/temporalio/contrib/langgraph/README.md b/temporalio/contrib/langgraph/README.md new file mode 100644 index 000000000..0189092a6 --- /dev/null +++ b/temporalio/contrib/langgraph/README.md @@ -0,0 +1,156 @@ +# LangGraph Plugin for Temporal Python SDK + +⚠️ **This package is currently at an experimental release stage.** ⚠️ + +This Temporal [Plugin](https://docs.temporal.io/develop/plugins-guide) allows you to run [LangGraph](https://www.langchain.com/langgraph) nodes and tasks as Temporal Activities, giving your AI workflows durable execution, automatic retries, and timeouts. It supports both the LangGraph Graph API (``StateGraph``) and Functional API (``@entrypoint`` / ``@task``). + +## Installation + +```sh +uv add temporalio[langgraph] +``` + +## Plugin Initialization + +### Graph API + +```python +from temporalio.contrib.langgraph import LangGraphPlugin + +plugin = LangGraphPlugin(graphs={"my-graph": graph}) +``` + +### Functional API + +```python +import datetime +from temporalio.contrib.langgraph import LangGraphPlugin + +plugin = LangGraphPlugin( + entrypoints={"my_entrypoint": my_entrypoint}, + tasks=[my_task], +) +``` + +## Checkpointer + +If your LangGraph code requires a checkpointer (for example, if you're using interrupts), use `InMemorySaver`. +Temporal handles durability, so third-party checkpointers (like PostgreSQL or Redis) are not needed. + +```python +import langgraph.checkpoint.memory +import typing + +from temporalio.contrib.langgraph import graph +from temporalio import workflow + +@workflow.defn +class MyWorkflow: + @workflow.run + async def run(self, input: str) -> typing.Any: + g = graph("my-graph").compile( + checkpointer=langgraph.checkpoint.memory.InMemorySaver(), + ) + + ... +``` + +## Activity Options + +Options are passed through to [`workflow.execute_activity()`](https://python.temporal.io/temporalio.workflow.html#execute_activity), which supports parameters like `start_to_close_timeout`, `retry_policy`, `schedule_to_close_timeout`, `heartbeat_timeout`, and more. + +### Graph API + +Pass Activity options as node `metadata` when calling `add_node`: + +```python +from datetime import timedelta +from temporalio.common import RetryPolicy + +g = StateGraph(State) +g.add_node("my_node", my_node, metadata={ + "start_to_close_timeout": timedelta(seconds=30), + "retry_policy": RetryPolicy(maximum_attempts=3), +}) +``` + +### Functional API + +Pass Activity options to the `LangGraphPlugin` constructor, keyed by task function name: + +```python +from datetime import timedelta +from temporalio.common import RetryPolicy +from temporalio.contrib.langgraph import LangGraphPlugin + +plugin = LangGraphPlugin( + entrypoints={"my_entrypoint": my_entrypoint}, + tasks=[my_task], + activity_options={ + "my_task": { + "start_to_close_timeout": timedelta(seconds=30), + "retry_policy": RetryPolicy(maximum_attempts=3), + }, + }, +) +``` + +### Runtime Context + +LangGraph's run-scoped context (`context_schema`) is reconstructed on the Activity side, so nodes and tasks can read from and write to `runtime.context`: + +```python +from langgraph.runtime import Runtime +from typing_extensions import TypedDict + +from temporalio.contrib.langgraph import graph + +class Context(TypedDict): + user_id: str + +async def my_node(state: State, runtime: Runtime[Context]) -> dict: + return {"user": runtime.context["user_id"]} + +# In the Workflow: +g = graph("my-graph").compile() +await g.ainvoke({...}, context=Context(user_id="alice")) +``` + +Your `context` object must be serializable by the configured Temporal payload converter, since it crosses the Activity boundary. + +## Stores are not supported + +LangGraph's `Store` (e.g. `InMemoryStore` passed via `graph.compile(store=...)` or `@entrypoint(store=...)`) isn't accessible inside Activity-wrapped nodes: the Store holds live state that can't cross the Activity boundary, and Activities may run on a different worker than the Workflow. If you pass a store, the plugin logs a warning on first use and `runtime.store` is `None` inside nodes. + +Use Workflow state for per-run memory, or an external database (Postgres/Redis/etc.) configured on each worker if you need shared memory across runs. + +## Running in the Workflow + +To run a node or task directly in the Workflow, set `execute_in` to `"workflow"`: + +```python +# Graph API +graph.add_node("my_node", my_node, metadata={"execute_in": "workflow"}) + +# Functional API +plugin = LangGraphPlugin( + tasks=[my_task], + activity_options={"my_task": {"execute_in": "workflow"}}, +) +``` + +## Running Tests + +Install dependencies: + +```sh +uv sync --all-extras +``` + +Run the test suite: + +```sh +uv run pytest tests/contrib/langgraph +``` + +Tests start a local Temporal dev server automatically — no external server needed. diff --git a/temporalio/contrib/langgraph/__init__.py b/temporalio/contrib/langgraph/__init__.py new file mode 100644 index 000000000..c12d459a6 --- /dev/null +++ b/temporalio/contrib/langgraph/__init__.py @@ -0,0 +1,25 @@ +"""LangGraph plugin for Temporal SDK. + +.. warning:: + This package is experimental and may change in future versions. + Use with caution in production environments. + +This plugin runs `LangGraph `_ nodes +and tasks as Temporal Activities, giving your AI agent workflows durable +execution, automatic retries, and timeouts. It supports both the LangGraph Graph +API (``StateGraph``) and Functional API (``@entrypoint`` / ``@task``). +""" + +from temporalio.contrib.langgraph._plugin import ( + LangGraphPlugin, + cache, + entrypoint, + graph, +) + +__all__ = [ + "LangGraphPlugin", + "entrypoint", + "cache", + "graph", +] diff --git a/temporalio/contrib/langgraph/_activity.py b/temporalio/contrib/langgraph/_activity.py new file mode 100644 index 000000000..c9be05849 --- /dev/null +++ b/temporalio/contrib/langgraph/_activity.py @@ -0,0 +1,155 @@ +"""Activity wrappers for executing LangGraph nodes and tasks.""" + +from collections.abc import Awaitable +from dataclasses import dataclass +from inspect import iscoroutinefunction, signature +from typing import Any, Callable + +from langgraph.errors import GraphInterrupt +from langgraph.types import Command, Interrupt + +from temporalio import workflow +from temporalio.contrib.langgraph._langgraph_config import ( + get_langgraph_config, + set_langgraph_config, +) +from temporalio.contrib.langgraph._task_cache import ( + cache_key, + cache_lookup, + cache_put, +) + +# Per-run dedupe so we only warn once when a user passes a Store via +# graph.compile(store=...) / @entrypoint(store=...). Cleared by +# LangGraphInterceptor.execute_workflow on workflow exit. +_warned_store_runs: set[str] = set() + + +def clear_store_warning(run_id: str) -> None: + """Drop the store-warning dedupe entry for a workflow run.""" + _warned_store_runs.discard(run_id) + + +@dataclass +class ActivityInput: + """Input for a LangGraph activity, containing args, kwargs, and config.""" + + args: tuple[Any, ...] + kwargs: dict[str, Any] + langgraph_config: dict[str, Any] + + +@dataclass +class ActivityOutput: + """Output from an Activity, containing result, command, or interrupts.""" + + result: Any = None + langgraph_command: Any = None + langgraph_interrupts: tuple[Interrupt] | None = None + + +def wrap_activity( + func: Callable, +) -> Callable[[ActivityInput], Awaitable[ActivityOutput]]: + """Wrap a function as a Temporal activity that handles LangGraph config and interrupts.""" + # Graph nodes declare `runtime: Runtime[Ctx]` in their signature; tasks + # don't and instead reach for Runtime via get_runtime(). We re-inject the + # reconstructed Runtime only when the user function asks. + accepts_runtime = "runtime" in signature(func).parameters + + async def wrapper(input: ActivityInput) -> ActivityOutput: + runtime = set_langgraph_config(input.langgraph_config) + kwargs = dict(input.kwargs) + if accepts_runtime: + kwargs["runtime"] = runtime + try: + if iscoroutinefunction(func): + result = await func(*input.args, **kwargs) + else: + result = func(*input.args, **kwargs) + if isinstance(result, Command): + return ActivityOutput(langgraph_command=result) + return ActivityOutput(result=result) + except GraphInterrupt as e: + return ActivityOutput(langgraph_interrupts=e.args[0]) + + return wrapper + + +def wrap_execute_activity( + afunc: Callable[[ActivityInput], Awaitable[ActivityOutput]], + task_id: str = "", + **execute_activity_kwargs: Any, +) -> Callable[..., Any]: + """Wrap an activity function to be called via workflow.execute_activity with caching.""" + + async def wrapper(*args: Any, **kwargs: Any) -> Any: + # LangGraph may inject a RunnableConfig as the 'config' kwarg. Strip it + # down to a serializable subset (metadata + tags) so it can cross the + # activity boundary; callbacks, stores, etc. aren't serializable. + if "config" in kwargs: + orig = kwargs["config"] or {} + kwargs["config"] = { + "metadata": dict(orig.get("metadata") or {}), + "tags": list(orig.get("tags") or []), + } + + # LangGraph may inject a Runtime as the 'runtime' kwarg. It's + # reconstructed on the activity side from the serialized langgraph + # config, so drop the live Runtime from the kwargs that cross the + # activity boundary (it holds non-serializable stream_writer, store). + runtime = kwargs.pop("runtime", None) + run_id = workflow.info().run_id + if ( + getattr(runtime, "store", None) is not None + and run_id not in _warned_store_runs + ): + _warned_store_runs.add(run_id) + workflow.logger.warning( + "LangGraph Store passed via compile(store=...) / @entrypoint(store=...) " + "is not accessible inside activity-wrapped nodes and tasks: the Store " + "object isn't serializable across the activity boundary, and activities " + "may run on a different worker than the workflow. Use a backend-backed " + "store (Postgres/Redis) configured on each worker if you need shared " + "memory, or use workflow state for per-run memory." + ) + + langgraph_config = get_langgraph_config() + + # Check task result cache (for continue-as-new deduplication). + key = ( + cache_key(task_id, args, kwargs, langgraph_config.get("context")) + if task_id + else "" + ) + if task_id: + found, cached = cache_lookup(key) + if found: + return cached + + input = ActivityInput( + args=args, kwargs=kwargs, langgraph_config=langgraph_config + ) + output = await workflow.execute_activity( + afunc, input, **execute_activity_kwargs + ) + if output.langgraph_interrupts is not None: + raise GraphInterrupt(output.langgraph_interrupts) + + result = output.result + if output.langgraph_command is not None: + cmd = output.langgraph_command + result = Command( + graph=cmd["graph"], + update=cmd["update"], + resume=cmd["resume"], + goto=cmd["goto"], + ) + + # Store in cache for future continue-as-new cycles. + if task_id: + cache_put(key, result) + + return result + + return wrapper diff --git a/temporalio/contrib/langgraph/_interceptor.py b/temporalio/contrib/langgraph/_interceptor.py new file mode 100644 index 000000000..fd583c052 --- /dev/null +++ b/temporalio/contrib/langgraph/_interceptor.py @@ -0,0 +1,61 @@ +"""Workflow interceptor that scopes LangGraph graphs/entrypoints to the workflow run.""" + +# pyright: reportMissingTypeStubs=false + +from __future__ import annotations + +from typing import Any + +from langgraph.graph import StateGraph +from langgraph.pregel import Pregel + +from temporalio import workflow +from temporalio.contrib.langgraph._activity import clear_store_warning +from temporalio.worker import ( + ExecuteWorkflowInput, + Interceptor, + WorkflowInboundInterceptor, + WorkflowInterceptorClassInput, + WorkflowOutboundInterceptor, +) + +_workflow_graphs: dict[str, dict[str, StateGraph[Any, Any, Any, Any]]] = {} +_workflow_entrypoints: dict[str, dict[str, Pregel[Any, Any, Any, Any]]] = {} + + +class LangGraphInterceptor(Interceptor): + """Interceptor that registers a workflow's graphs and entrypoints for the run.""" + + def __init__( + self, + graphs: dict[str, StateGraph[Any, Any, Any, Any]], + entrypoints: dict[str, Pregel[Any, Any, Any, Any]], + ) -> None: + """Initialize with the graphs and entrypoints to scope to each workflow run.""" + self._graphs = graphs + self._entrypoints = entrypoints + + def workflow_interceptor_class( + self, input: WorkflowInterceptorClassInput + ) -> type[WorkflowInboundInterceptor]: + """Return the inbound interceptor class used to scope graphs per run.""" + graphs = self._graphs + entrypoints = self._entrypoints + + class Inbound(WorkflowInboundInterceptor): + def init(self, outbound: WorkflowOutboundInterceptor) -> None: + run_id = outbound.info().run_id + _workflow_graphs[run_id] = graphs + _workflow_entrypoints[run_id] = entrypoints + super().init(outbound) + + async def execute_workflow(self, input: ExecuteWorkflowInput) -> Any: + try: + return await self.next.execute_workflow(input) + finally: + run_id = workflow.info().run_id + _workflow_graphs.pop(run_id, None) + _workflow_entrypoints.pop(run_id, None) + clear_store_warning(run_id) + + return Inbound diff --git a/temporalio/contrib/langgraph/_langgraph_config.py b/temporalio/contrib/langgraph/_langgraph_config.py new file mode 100644 index 000000000..23a2cd9ae --- /dev/null +++ b/temporalio/contrib/langgraph/_langgraph_config.py @@ -0,0 +1,83 @@ +"""LangGraph configuration management for Temporal workflows.""" + +# pyright: reportMissingTypeStubs=false + +from typing import Any + +from langchain_core.runnables.config import var_child_runnable_config +from langgraph._internal._constants import ( + CONFIG_KEY_CHECKPOINT_NS, + CONFIG_KEY_RUNTIME, + CONFIG_KEY_SCRATCHPAD, + CONFIG_KEY_SEND, +) +from langgraph.graph.state import RunnableConfig +from langgraph.pregel._algo import LazyAtomicCounter, PregelScratchpad +from langgraph.runtime import Runtime + + +def get_langgraph_config() -> dict[str, Any]: + """Get the current LangGraph runnable config as a serializable dict.""" + config = var_child_runnable_config.get() or {} + configurable = config.get("configurable") or {} + scratchpad = configurable.get(CONFIG_KEY_SCRATCHPAD) + runtime = configurable.get(CONFIG_KEY_RUNTIME) + + return { + "configurable": { + CONFIG_KEY_CHECKPOINT_NS: configurable.get(CONFIG_KEY_CHECKPOINT_NS), + CONFIG_KEY_SCRATCHPAD: { + "step": getattr(scratchpad, "step", 0), + "stop": getattr(scratchpad, "stop", 0), + "resume": list(getattr(scratchpad, "resume", [])), + "null_resume": scratchpad.get_null_resume() if scratchpad else None, + }, + }, + "context": getattr(runtime, "context", None), + } + + +def set_langgraph_config(config: dict[str, Any]) -> Runtime: + """Restore a LangGraph runnable config from a serialized dict. + + Returns the reconstructed Runtime so callers can re-inject it into the + user function's kwargs without needing to know the configurable layout. + """ + configurable = config.get("configurable") or {} + scratchpad = configurable.get(CONFIG_KEY_SCRATCHPAD) or {} + null_resume_box = [scratchpad.get("null_resume")] + + def get_null_resume(consume: bool = False) -> Any: + val = null_resume_box[0] + if consume and val is not None: + null_resume_box[0] = None + return val + + runtime = Runtime( + context=config.get("context"), + stream_writer=lambda _: None, + ) + + var_child_runnable_config.set( + RunnableConfig( + { + "configurable": { + CONFIG_KEY_CHECKPOINT_NS: configurable.get( + CONFIG_KEY_CHECKPOINT_NS + ), + CONFIG_KEY_SCRATCHPAD: PregelScratchpad( + step=scratchpad.get("step", 0), + stop=scratchpad.get("stop", 0), + call_counter=LazyAtomicCounter(), + interrupt_counter=LazyAtomicCounter(), + get_null_resume=get_null_resume, + resume=list(scratchpad.get("resume", [])), + subgraph_counter=LazyAtomicCounter(), + ), + CONFIG_KEY_SEND: lambda _: None, + CONFIG_KEY_RUNTIME: runtime, + }, + } + ) + ) + return runtime diff --git a/temporalio/contrib/langgraph/_plugin.py b/temporalio/contrib/langgraph/_plugin.py new file mode 100644 index 000000000..e7cde6e56 --- /dev/null +++ b/temporalio/contrib/langgraph/_plugin.py @@ -0,0 +1,242 @@ +"""LangGraph plugin for running LangGraph nodes and tasks as Temporal activities.""" + +# pyright: reportMissingTypeStubs=false + +from __future__ import annotations + +import inspect +import sys +import warnings +from dataclasses import replace +from typing import Any, Callable + +from langgraph._internal._runnable import RunnableCallable +from langgraph.graph import StateGraph +from langgraph.pregel import Pregel + +from temporalio import activity, workflow +from temporalio.contrib.langgraph._activity import wrap_activity, wrap_execute_activity +from temporalio.contrib.langgraph._interceptor import ( + LangGraphInterceptor, + _workflow_entrypoints, + _workflow_graphs, +) +from temporalio.contrib.langgraph._task_cache import ( + get_task_cache, + set_task_cache, + task_id, +) +from temporalio.plugin import SimplePlugin +from temporalio.worker import WorkflowRunner +from temporalio.worker.workflow_sandbox import SandboxedWorkflowRunner + +_ACTIVITY_OPTION_KEYS: frozenset[str] = frozenset( + {"execute_in", *inspect.signature(workflow.execute_activity).parameters} +) + + +class LangGraphPlugin(SimplePlugin): + """LangGraph plugin for Temporal SDK. + + .. warning:: + This package is experimental and may change in future versions. + Use with caution in production environments. + + This plugin runs `LangGraph `_ nodes + and tasks as Temporal Activities, giving your AI agent workflows durable + execution, automatic retries, and timeouts. It supports both the LangGraph Graph + API (``StateGraph``) and Functional API (``@entrypoint`` / ``@task``). + """ + + def __init__( + self, + # Graph API + graphs: dict[str, StateGraph[Any, Any, Any, Any]] | None = None, + # Functional API + entrypoints: dict[str, Pregel[Any, Any, Any, Any]] | None = None, + tasks: list | None = None, + # TODO: Remove activity_options when we have support for @task(metadata=...) + activity_options: dict[str, dict[str, Any]] | None = None, + default_activity_options: dict[str, Any] | None = None, + ): + """Initialize the LangGraph plugin with graphs, entrypoints, and tasks.""" + if sys.version_info < (3, 11): + warnings.warn( # type: ignore[reportUnreachable] + "LangGraphPlugin requires Python >= 3.11 for full async support. " + "On older versions, the Functional API (@task/@entrypoint) and " + "interrupt() will not work because LangGraph relies on " + "contextvars propagation through asyncio.create_task(), which is " + "only available in Python 3.11+. See " + "https://reference.langchain.com/python/langgraph/config/get_store/", + stacklevel=2, + ) + + self.activities: list = [] + + # Graph API: Wrap graph nodes as Temporal Activities. + if graphs: + for graph_name, graph in graphs.items(): + for node_name, node in graph.nodes.items(): + if node.retry_policy: + raise ValueError( + f"Node {graph_name}.{node_name} has a LangGraph " + f"retry_policy set. Use Temporal activity options " + f"instead, e.g. pass retry_policy=RetryPolicy(...) " + f"via default_activity_options or in the node's " + f"metadata dict." + ) + runnable = node.runnable + if not isinstance(runnable, RunnableCallable): + raise ValueError(f"Node {node_name} must be a RunnableCallable") + user_func = runnable.afunc or runnable.func + if user_func is None: + raise ValueError(f"Node {node_name} must have a function") + # Keep 'config' (for metadata/tags) and 'runtime' (for + # context + store — reconstructed on the activity side). + # Drop writer/etc., which hold non-serializable objects + # that can't cross the activity boundary. + runnable.func_accepts = { + k: v + for k, v in runnable.func_accepts.items() + if k in ("config", "runtime") + } + # Split node.metadata into activity options vs. user + # metadata. Activity-option keys (timeouts, retry policy, + # etc.) become kwargs to workflow.execute_activity; user + # keys stay on node.metadata so LangGraph exposes them to + # the node function via config["metadata"]. + node_meta = node.metadata or {} + node_opts = { + k: v for k, v in node_meta.items() if k in _ACTIVITY_OPTION_KEYS + } + node.metadata = { + k: v + for k, v in node_meta.items() + if k not in _ACTIVITY_OPTION_KEYS + } + opts = {**(default_activity_options or {}), **node_opts} + # Route all LangGraph node calls through afunc so the async + # activity wrapper is always used. wrap_activity handles + # sync vs. async user functions inside the activity itself. + runnable.afunc = self.execute( + f"{graph_name}.{node_name}", user_func, opts + ) + runnable.func = None + + # Functional API: Wrap @task functions as Temporal Activities. + if tasks: + for task in tasks: + name = task.func.__name__ + if task.retry_policy: + raise ValueError( + f"Task {name} has a LangGraph retry_policy set. " + f"Use Temporal activity options instead, e.g. pass " + f"retry_policy=RetryPolicy(...) via " + f"default_activity_options or activity_options[{name!r}]." + ) + opts = { + **(default_activity_options or {}), + **(activity_options or {}).get(name, {}), + } + + task.func = self.execute(task_id(task.func), task.func, opts) + task.func.__name__ = name + task.func.__qualname__ = getattr(task.func, "__qualname__", name) + + def workflow_runner(runner: WorkflowRunner | None) -> WorkflowRunner: + if not runner: + raise ValueError("No WorkflowRunner provided to the LangGraph plugin.") + if isinstance(runner, SandboxedWorkflowRunner): + return replace( + runner, + restrictions=runner.restrictions.with_passthrough_modules( + "langchain", + "langchain_core", + "langgraph", + "langsmith", + "numpy", # LangSmith uses numpy + ), + ) + return runner + + super().__init__( + "langchain.LangGraphPlugin", + activities=self.activities, + workflow_runner=workflow_runner, + interceptors=[LangGraphInterceptor(graphs or {}, entrypoints or {})], + ) + + def execute( + self, + activity_name: str, + func: Callable, + kwargs: dict[str, Any] | None = None, + ) -> Callable: + """Prepare a node or task to execute as an activity or inline in the workflow.""" + opts = kwargs or {} + execute_in = opts.pop("execute_in", "activity") + + if execute_in == "activity": + a = activity.defn(name=activity_name)(wrap_activity(func)) + self.activities.append(a) + return wrap_execute_activity(a, task_id=task_id(func), **opts) + elif execute_in == "workflow": + return func + else: + raise ValueError(f"Invalid execute_in value: {execute_in}") + + +def graph( + name: str, cache: dict[str, Any] | None = None +) -> StateGraph[Any, Any, Any, Any]: + """Retrieve a registered graph by name. + + Args: + name: Graph name as registered with LangGraphPlugin. + cache: Optional task result cache from a previous cache() call. + Restores cached results so previously-completed nodes are + not re-executed after continue-as-new. + """ + set_task_cache(cache or {}) + graphs = _workflow_graphs.get(workflow.info().run_id) + if graphs is None: + raise RuntimeError( + "graph() must be called from inside a workflow running under LangGraphPlugin" + ) + if name not in graphs: + raise KeyError(f"Graph {name!r} not found. Available graphs: {list(graphs)}") + return graphs[name] + + +def entrypoint( + name: str, cache: dict[str, Any] | None = None +) -> Pregel[Any, Any, Any, Any]: + """Retrieve a registered entrypoint by name. + + Args: + name: Entrypoint name as registered with Plugin. + cache: Optional task result cache from a previous cache() call. + Restores cached results so previously-completed tasks are + not re-executed after continue-as-new. + """ + set_task_cache(cache or {}) + entrypoints = _workflow_entrypoints.get(workflow.info().run_id) + if entrypoints is None: + raise RuntimeError( + "entrypoint() must be called from inside a workflow running under LangGraphPlugin" + ) + if name not in entrypoints: + raise KeyError( + f"Entrypoint {name!r} not found. Available entrypoints: {list(entrypoints)}" + ) + return entrypoints[name] + + +def cache() -> dict[str, Any] | None: + """Return the task result cache as a serializable dict. + + Returns a dict suitable for passing to entrypoint(name, cache=...) to + restore cached task results across continue-as-new boundaries. + Returns None if the cache is empty. + """ + return get_task_cache() or None diff --git a/temporalio/contrib/langgraph/_task_cache.py b/temporalio/contrib/langgraph/_task_cache.py new file mode 100644 index 000000000..ab3e683d7 --- /dev/null +++ b/temporalio/contrib/langgraph/_task_cache.py @@ -0,0 +1,83 @@ +"""Task result cache for continue-as-new support. + +Caches task results by (module.qualname, args, kwargs) hash so that previously +completed tasks are not re-executed after a continue-as-new. The cache state +is a plain dict that can travel through workflow.continue_as_new(). +""" + +from __future__ import annotations + +from contextvars import ContextVar +from hashlib import sha256 +from json import dumps +from typing import Any + +_task_cache: ContextVar[dict[str, Any] | None] = ContextVar( + "_temporal_task_cache", default=None +) + + +def set_task_cache(cache: dict[str, Any] | None) -> None: + """Set the task result cache for the current context.""" + _task_cache.set(cache) + + +def get_task_cache() -> dict[str, Any] | None: + """Get the task result cache for the current context.""" + return _task_cache.get() + + +def task_id(func: Any) -> str: + """Return the fully-qualified module.qualname for a function. + + Raises ValueError for functions that cannot be identified unambiguously + (lambdas, closures, __main__ functions). + """ + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) or getattr(func, "__name__", None) + + if module is None or qualname is None: + raise ValueError( + f"Cannot identify task {func}: missing __module__ or __qualname__. " + "Tasks must be defined at module level." + ) + if module == "__main__": + raise ValueError( + f"Cannot identify task {qualname}: defined in __main__. " + "Tasks must be importable from a named module." + ) + if "" in qualname: + raise ValueError( + f"Cannot identify task {qualname}: closures/local functions are not supported. " + "Tasks must be defined at module level." + ) + return f"{module}.{qualname}" + + +def cache_key( + task_id: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + context: Any = None, +) -> str: + """Build a cache key from the full task identifier, arguments, and runtime context.""" + try: + key_str = dumps([task_id, args, kwargs, context], sort_keys=True, default=str) + except (TypeError, ValueError): + key_str = repr([task_id, args, kwargs, context]) + return sha256(key_str.encode()).hexdigest()[:32] + + +def cache_lookup(key: str) -> tuple[bool, Any]: + """Return (True, value) if cached, (False, None) otherwise.""" + cache = _task_cache.get() + if cache is not None and key in cache: + return True, cache[key] + return False, None + + +def cache_put(key: str, value: Any) -> None: + """Store a value in the task result cache.""" + cache = _task_cache.get() + if cache is not None: + cache[key] = value diff --git a/temporalio/contrib/langsmith/_plugin.py b/temporalio/contrib/langsmith/_plugin.py index 6e9fba0ee..789c93414 100644 --- a/temporalio/contrib/langsmith/_plugin.py +++ b/temporalio/contrib/langsmith/_plugin.py @@ -9,6 +9,15 @@ import langsmith +# langsmith conditionally imports langchain_core when it is installed. +# Pre-import the lazily-loaded submodule so it is in sys.modules before the +# workflow sandbox starts; otherwise the sandbox's __getattr__-triggered +# import hits restrictions on concurrent.futures.ThreadPoolExecutor. +try: + import langchain_core.runnables.config # noqa: F401 # pyright: ignore[reportUnusedImport] +except ImportError: + pass + from temporalio.contrib.langsmith._interceptor import LangSmithInterceptor from temporalio.plugin import SimplePlugin from temporalio.worker import WorkflowRunner @@ -62,7 +71,8 @@ def workflow_runner(runner: WorkflowRunner | None) -> WorkflowRunner: return dataclasses.replace( runner, restrictions=runner.restrictions.with_passthrough_modules( - "langsmith" + "langsmith", + "langchain_core", ), ) return runner diff --git a/tests/contrib/langgraph/__init__.py b/tests/contrib/langgraph/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/contrib/langgraph/e2e_functional_entrypoints.py b/tests/contrib/langgraph/e2e_functional_entrypoints.py new file mode 100644 index 000000000..7f16abe8d --- /dev/null +++ b/tests/contrib/langgraph/e2e_functional_entrypoints.py @@ -0,0 +1,144 @@ +"""Functional API entrypoint definitions for E2E tests. + +These define @task and @entrypoint functions used in functional API E2E tests. +""" + +from __future__ import annotations + +import asyncio + +import langgraph.types +from langgraph.func import entrypoint, task # pyright: ignore[reportMissingTypeStubs] + + +@task +def double_value(x: int) -> int: + return x * 2 + + +@task +def add_ten(x: int) -> int: + return x + 10 + + +@entrypoint() +async def simple_functional_entrypoint(value: int) -> dict: + doubled = await double_value(value) + result = await add_ten(doubled) + return {"result": result} + + +# Track task execution count for continue-as-new testing +_task_execution_counts: dict[str, int] = {} + + +def get_task_execution_counts() -> dict[str, int]: + return _task_execution_counts.copy() + + +def reset_task_execution_counts() -> None: + _task_execution_counts.clear() + + +@task +def expensive_task_a(x: int) -> int: + _task_execution_counts["task_a"] = _task_execution_counts.get("task_a", 0) + 1 + return x * 3 + + +@task +def expensive_task_b(x: int) -> int: + _task_execution_counts["task_b"] = _task_execution_counts.get("task_b", 0) + 1 + return x + 100 + + +@task +def expensive_task_c(x: int) -> int: + _task_execution_counts["task_c"] = _task_execution_counts.get("task_c", 0) + 1 + return x * 2 + + +@entrypoint() +async def continue_as_new_entrypoint(value: int) -> dict: + """For input 10: 10 * 3 = 30 -> 30 + 100 = 130 -> 130 * 2 = 260""" + result_a = await expensive_task_a(value) + result_b = await expensive_task_b(result_a) + result_c = await expensive_task_c(result_b) + return {"result": result_c} + + +@task +def step_1(x: int) -> int: + _task_execution_counts["step_1"] = _task_execution_counts.get("step_1", 0) + 1 + return x * 2 + + +@task +def step_2(x: int) -> int: + _task_execution_counts["step_2"] = _task_execution_counts.get("step_2", 0) + 1 + return x + 5 + + +@task +def step_3(x: int) -> int: + _task_execution_counts["step_3"] = _task_execution_counts.get("step_3", 0) + 1 + return x * 3 + + +@task +def step_4(x: int) -> int: + _task_execution_counts["step_4"] = _task_execution_counts.get("step_4", 0) + 1 + return x - 10 + + +@task +def step_5(x: int) -> int: + _task_execution_counts["step_5"] = _task_execution_counts.get("step_5", 0) + 1 + return x + 100 + + +@entrypoint() +async def partial_execution_entrypoint(input_data: dict) -> dict: + """For value=10, all 5 tasks: 10*2=20 -> +5=25 -> *3=75 -> -10=65 -> +100=165""" + value = input_data["value"] + stop_after = input_data.get("stop_after", 5) + + result = value + result = await step_1(result) + if stop_after == 1: + return {"result": result, "completed_tasks": 1} + result = await step_2(result) + if stop_after == 2: + return {"result": result, "completed_tasks": 2} + result = await step_3(result) + if stop_after == 3: + return {"result": result, "completed_tasks": 3} + result = await step_4(result) + if stop_after == 4: + return {"result": result, "completed_tasks": 4} + result = await step_5(result) + return {"result": result, "completed_tasks": 5} + + +@task +def ask_human(question: str) -> str: + return langgraph.types.interrupt(question) + + +@entrypoint() +async def interrupt_entrypoint(value: str) -> dict: + """Entrypoint that interrupts for human input, then returns the answer.""" + answer = await ask_human("Do you approve?") + return {"input": value, "answer": answer} + + +@task +async def slow_task(x: int) -> int: + await asyncio.sleep(1) + return x + + +@entrypoint() +async def slow_entrypoint(value: int) -> dict: + result = await slow_task(value) + return {"result": result} diff --git a/tests/contrib/langgraph/e2e_functional_workflows.py b/tests/contrib/langgraph/e2e_functional_workflows.py new file mode 100644 index 000000000..d355bdb28 --- /dev/null +++ b/tests/contrib/langgraph/e2e_functional_workflows.py @@ -0,0 +1,97 @@ +"""Workflow definitions for Functional API E2E tests.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any + +from temporalio import workflow +from temporalio.contrib.langgraph import cache, entrypoint + + +@workflow.defn +class SimpleFunctionalE2EWorkflow: + def __init__(self) -> None: + self.app = entrypoint("e2e_simple_functional") + + @workflow.run + async def run(self, input_value: int) -> dict: + return await self.app.ainvoke(input_value) + + +@workflow.defn +class SlowFunctionalWorkflow: + def __init__(self) -> None: + self.app = entrypoint("e2e_slow_functional") + + @workflow.run + async def run(self, input_value: int) -> dict: + return await self.app.ainvoke(input_value) + + +@dataclass +class ContinueAsNewInput: + value: int + cache: dict[str, Any] | None = None + task_a_done: bool = False + task_b_done: bool = False + + +@workflow.defn +class ContinueAsNewFunctionalWorkflow: + """Continues-as-new after each phase, passing cache for task deduplication.""" + + @workflow.run + async def run(self, input_data: ContinueAsNewInput) -> dict[str, Any]: + app = entrypoint("e2e_continue_as_new_functional", cache=input_data.cache) + + result = await app.ainvoke(input_data.value) + + if not input_data.task_a_done: + workflow.continue_as_new( + ContinueAsNewInput( + value=input_data.value, + cache=cache(), + task_a_done=True, + ) + ) + + if not input_data.task_b_done: + workflow.continue_as_new( + ContinueAsNewInput( + value=input_data.value, + cache=cache(), + task_a_done=True, + task_b_done=True, + ) + ) + + return result + + +@dataclass +class PartialExecutionInput: + value: int + cache: dict[str, Any] | None = None + phase: int = 1 + + +@workflow.defn +class PartialExecutionWorkflow: + """Phase 1: 3 tasks + cache. Phase 2: all 5 (1-3 cached).""" + + @workflow.run + async def run(self, input_data: PartialExecutionInput) -> dict[str, Any]: + app = entrypoint("e2e_partial_execution", cache=input_data.cache) + + if input_data.phase == 1: + await app.ainvoke({"value": input_data.value, "stop_after": 3}) + workflow.continue_as_new( + PartialExecutionInput( + value=input_data.value, + cache=cache(), + phase=2, + ) + ) + + return await app.ainvoke({"value": input_data.value, "stop_after": 5}) diff --git a/tests/contrib/langgraph/test_command.py b/tests/contrib/langgraph/test_command.py new file mode 100644 index 000000000..2bbb5bb0c --- /dev/null +++ b/tests/contrib/langgraph/test_command.py @@ -0,0 +1,68 @@ +from datetime import timedelta +from typing import Any, Literal +from uuid import uuid4 + +from langgraph.graph import ( # pyright: ignore[reportMissingTypeStubs] + START, + StateGraph, +) +from langgraph.types import Command +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +def node_a(state: State) -> Command[Literal["node_b"]]: + return Command(update={"value": state["value"] + "a"}, goto="node_b") + + +def node_b(state: State) -> Command[Literal["__end__"]]: + return Command(update={"value": state["value"] + "b"}, goto="__end__") + + +@workflow.defn +class CommandWorkflow: + def __init__(self) -> None: + self.app = graph("my-graph").compile() + + @workflow.run + async def run(self, input: str) -> Any: + return await self.app.ainvoke({"value": input}) + + +async def test_command_goto_and_update(client: Client): + g = StateGraph(State) + g.add_node("node_a", node_a) + g.add_node("node_b", node_b) + g.add_edge(START, "node_a") + + task_queue = f"command-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[CommandWorkflow], + plugins=[ + LangGraphPlugin( + graphs={"my-graph": g}, + default_activity_options={ + "start_to_close_timeout": timedelta(seconds=10) + }, + ) + ], + ): + result = await client.execute_workflow( + CommandWorkflow.run, + "", + id=f"test-command-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"value": "ab"} diff --git a/tests/contrib/langgraph/test_continue_as_new.py b/tests/contrib/langgraph/test_continue_as_new.py new file mode 100644 index 000000000..a2eec3834 --- /dev/null +++ b/tests/contrib/langgraph/test_continue_as_new.py @@ -0,0 +1,72 @@ +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +from langgraph.checkpoint.memory import InMemorySaver +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from langgraph.graph.state import ( # pyright: ignore[reportMissingTypeStubs] + RunnableConfig, +) +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +async def node(state: State) -> dict[str, str]: + return {"value": state["value"] + "a"} + + +@workflow.defn +class ContinueAsNewWorkflow: + def __init__(self) -> None: + self.app = graph("my-graph").compile(checkpointer=InMemorySaver()) + + @workflow.run + async def run(self, values: State) -> Any: + config = RunnableConfig({"configurable": {"thread_id": "1"}}) + + await self.app.aupdate_state(config, values) + await self.app.ainvoke(values, config) + + if len(values["value"]) < 3: + state = await self.app.aget_state(config) + workflow.continue_as_new(state.values) + + return values + + +async def test_continue_as_new(client: Client): + g = StateGraph(State) + g.add_node("node", node) + g.add_edge(START, "node") + + task_queue = f"my-graph-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[ContinueAsNewWorkflow], + plugins=[ + LangGraphPlugin( + graphs={"my-graph": g}, + default_activity_options={ + "start_to_close_timeout": timedelta(seconds=10) + }, + ) + ], + ): + result = await client.execute_workflow( + ContinueAsNewWorkflow.run, + State(value=""), + id=f"test-workflow-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"value": "aaa"} diff --git a/tests/contrib/langgraph/test_continue_as_new_cached.py b/tests/contrib/langgraph/test_continue_as_new_cached.py new file mode 100644 index 000000000..a35574bb4 --- /dev/null +++ b/tests/contrib/langgraph/test_continue_as_new_cached.py @@ -0,0 +1,128 @@ +"""Test Graph API continue-as-new with task result caching. + +Verifies that node results are cached across continue-as-new boundaries, +so nodes don't re-execute when the graph is re-invoked with the same state. +""" + +from dataclasses import dataclass +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, cache, graph +from temporalio.worker import Worker + +# Track execution counts to verify caching +_execution_counts: dict[str, int] = {} + + +def _reset(): + _execution_counts.clear() + + +class State(TypedDict): + value: int + + +async def multiply_by_3(state: State) -> dict[str, int]: + _execution_counts["multiply"] = _execution_counts.get("multiply", 0) + 1 + return {"value": state["value"] * 3} + + +async def add_100(state: State) -> dict[str, int]: + _execution_counts["add"] = _execution_counts.get("add", 0) + 1 + return {"value": state["value"] + 100} + + +async def double(state: State) -> dict[str, int]: + _execution_counts["double"] = _execution_counts.get("double", 0) + 1 + return {"value": state["value"] * 2} + + +@dataclass +class GraphContinueAsNewInput: + value: int + cache: dict[str, Any] | None = None + phase: int = 1 # 1, 2, 3 — continues-as-new after phases 1 and 2 + + +@workflow.defn +class GraphContinueAsNewWorkflow: + """Runs a 3-node graph, continuing-as-new after each phase. + + Phase 1: runs graph (all 3 nodes execute), continues-as-new with cache. + Phase 2: runs graph again with same input (all 3 cached), continues-as-new. + Phase 3: runs graph again with same input (all 3 cached), returns result. + + Without caching: each node executes 3 times. + With caching: each node executes once (first run), cached for phases 2 & 3. + """ + + @workflow.run + async def run(self, input_data: GraphContinueAsNewInput) -> dict[str, int]: + app = graph("cached-graph", cache=input_data.cache).compile() + result = await app.ainvoke({"value": input_data.value}) + + if input_data.phase < 3: + workflow.continue_as_new( + GraphContinueAsNewInput( + value=input_data.value, + cache=cache(), + phase=input_data.phase + 1, + ) + ) + + return result + + +async def test_graph_continue_as_new_cached(client: Client): + """Each node executes once despite 3 continue-as-new cycles. + + Graph: multiply_by_3 -> add_100 -> double + Input 10: 10 * 3 = 30 -> 30 + 100 = 130 -> 130 * 2 = 260 + """ + _reset() + + timeout = {"start_to_close_timeout": timedelta(seconds=10)} + g = StateGraph(State) + g.add_node("multiply_by_3", multiply_by_3, metadata=timeout) + g.add_node("add_100", add_100, metadata=timeout) + g.add_node("double", double, metadata=timeout) + g.add_edge(START, "multiply_by_3") + g.add_edge("multiply_by_3", "add_100") + g.add_edge("add_100", "double") + + task_queue = f"graph-cached-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[GraphContinueAsNewWorkflow], + plugins=[LangGraphPlugin(graphs={"cached-graph": g})], + ): + result = await client.execute_workflow( + GraphContinueAsNewWorkflow.run, + GraphContinueAsNewInput(value=10), + id=f"graph-cached-{uuid4()}", + task_queue=task_queue, + execution_timeout=timedelta(seconds=60), + ) + + # 10 * 3 = 30 -> + 100 = 130 -> * 2 = 260 + assert result == {"value": 260} + + # Each node should execute exactly once — phases 2 and 3 use cached results. + assert ( + _execution_counts.get("multiply", 0) == 1 + ), f"multiply executed {_execution_counts.get('multiply', 0)} times, expected 1" + assert ( + _execution_counts.get("add", 0) == 1 + ), f"add executed {_execution_counts.get('add', 0)} times, expected 1" + assert ( + _execution_counts.get("double", 0) == 1 + ), f"double executed {_execution_counts.get('double', 0)} times, expected 1" diff --git a/tests/contrib/langgraph/test_e2e_functional.py b/tests/contrib/langgraph/test_e2e_functional.py new file mode 100644 index 000000000..fc55fc81c --- /dev/null +++ b/tests/contrib/langgraph/test_e2e_functional.py @@ -0,0 +1,321 @@ +"""End-to-end tests for LangGraph Functional API integration (v1 and v2). + +Requires a running Temporal test server (started by conftest.py). +LangGraph's Functional API requires Python >= 3.11 for async context +variable propagation (see langgraph.config.get_config). +""" + +from __future__ import annotations + +import sys +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +import pytest + +pytestmark = pytest.mark.skipif( + sys.version_info < (3, 11), + reason="LangGraph Functional API requires Python >= 3.11 for async context propagation", +) +from langchain_core.runnables import RunnableConfig +from langgraph.checkpoint.memory import InMemorySaver +from langgraph.func import ( # pyright: ignore[reportMissingTypeStubs] + entrypoint as lg_entrypoint, +) +from langgraph.func import task # pyright: ignore[reportMissingTypeStubs] +from langgraph.types import Command +from pytest import raises + +from temporalio import workflow +from temporalio.client import Client, WorkflowFailureError +from temporalio.common import RetryPolicy +from temporalio.contrib.langgraph import LangGraphPlugin, entrypoint +from temporalio.worker import Worker +from tests.contrib.langgraph.e2e_functional_entrypoints import ( + add_ten, + ask_human, + continue_as_new_entrypoint, + double_value, + expensive_task_a, + expensive_task_b, + expensive_task_c, + get_task_execution_counts, + interrupt_entrypoint, + partial_execution_entrypoint, + reset_task_execution_counts, + simple_functional_entrypoint, + slow_entrypoint, + slow_task, + step_1, + step_2, + step_3, + step_4, + step_5, +) +from tests.contrib.langgraph.e2e_functional_workflows import ( + ContinueAsNewFunctionalWorkflow, + ContinueAsNewInput, + PartialExecutionInput, + PartialExecutionWorkflow, + SimpleFunctionalE2EWorkflow, + SlowFunctionalWorkflow, +) + +_DEFAULT_ACTIVITY_OPTIONS = {"start_to_close_timeout": timedelta(seconds=30)} + + +# V2-only tasks defined here to avoid sharing mutated _TaskFunction objects +# (Plugin wraps task.func in-place). + + +@task +def triple_value(x: int) -> int: + return x * 3 + + +@task +def add_five(x: int) -> int: + return x + 5 + + +@lg_entrypoint() +async def simple_v2_entrypoint(value: int) -> dict: + tripled = await triple_value(value) + result = await add_five(tripled) + return {"result": result} + + +@workflow.defn +class SimpleV2Workflow: + def __init__(self) -> None: + self.app = entrypoint("v2_simple") + + @workflow.run + async def run(self, input_value: int) -> dict[str, Any]: + result = await self.app.ainvoke(input_value, version="v2") + return result.value + + +@workflow.defn +class InterruptV2FunctionalWorkflow: + def __init__(self) -> None: + self.app = entrypoint("v2_interrupt") + self.app.checkpointer = InMemorySaver() + + @workflow.run + async def run(self, input_value: str) -> dict[str, Any]: + config = RunnableConfig( + {"configurable": {"thread_id": workflow.info().workflow_id}} + ) + + result = await self.app.ainvoke(input_value, config, version="v2") + + assert result.value == {} + assert len(result.interrupts) == 1 + assert result.interrupts[0].value == "Do you approve?" + + resumed = await self.app.ainvoke( + Command(resume="approved"), config, version="v2" + ) + return resumed.value + + +class TestFunctionalAPIBasicExecution: + @pytest.mark.parametrize( + "workflow_cls,entrypoint_func,entrypoint_name,tasks,expected_result", + [ + ( + SimpleFunctionalE2EWorkflow, + simple_functional_entrypoint, + "e2e_simple_functional", + [double_value, add_ten], + 30, + ), + ( + SimpleV2Workflow, + simple_v2_entrypoint, + "v2_simple", + [triple_value, add_five], + 35, + ), + ], + ids=["v1", "v2"], + ) + async def test_simple_entrypoint( + self, + client: Client, + workflow_cls: Any, + entrypoint_func: Any, + entrypoint_name: str, + tasks: list, + expected_result: int, + ) -> None: + task_queue = f"e2e-functional-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[workflow_cls], + plugins=[ + LangGraphPlugin( + entrypoints={entrypoint_name: entrypoint_func}, + tasks=tasks, + default_activity_options=_DEFAULT_ACTIVITY_OPTIONS, + ) + ], + ): + result = await client.execute_workflow( + workflow_cls.run, + 10, + id=f"e2e-functional-{uuid4()}", + task_queue=task_queue, + execution_timeout=timedelta(seconds=30), + ) + + assert result["result"] == expected_result + + +class TestFunctionalAPIContinueAsNew: + async def test_continue_as_new_with_checkpoint(self, client: Client) -> None: + """10 * 3 = 30 -> + 100 = 130 -> * 2 = 260. Each task executes once.""" + reset_task_execution_counts() + + tasks = [expensive_task_a, expensive_task_b, expensive_task_c] + task_queue = f"e2e-continue-as-new-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[ContinueAsNewFunctionalWorkflow], + plugins=[ + LangGraphPlugin( + entrypoints={ + "e2e_continue_as_new_functional": continue_as_new_entrypoint + }, + tasks=tasks, + default_activity_options=_DEFAULT_ACTIVITY_OPTIONS, + ) + ], + ): + result = await client.execute_workflow( + ContinueAsNewFunctionalWorkflow.run, + ContinueAsNewInput(value=10), + id=f"e2e-continue-as-new-{uuid4()}", + task_queue=task_queue, + execution_timeout=timedelta(seconds=60), + ) + + assert result["result"] == 260 + + counts = get_task_execution_counts() + assert ( + counts.get("task_a", 0) == 1 + ), f"task_a executed {counts.get('task_a', 0)} times, expected 1" + assert ( + counts.get("task_b", 0) == 1 + ), f"task_b executed {counts.get('task_b', 0)} times, expected 1" + assert ( + counts.get("task_c", 0) == 1 + ), f"task_c executed {counts.get('task_c', 0)} times, expected 1" + + +class TestFunctionalAPIPartialExecution: + async def test_partial_execution_five_tasks(self, client: Client) -> None: + """10*2=20 -> +5=25 -> *3=75 -> -10=65 -> +100=165. Each task executes once.""" + reset_task_execution_counts() + + tasks = [step_1, step_2, step_3, step_4, step_5] + task_queue = f"e2e-partial-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[PartialExecutionWorkflow], + plugins=[ + LangGraphPlugin( + entrypoints={"e2e_partial_execution": partial_execution_entrypoint}, + tasks=tasks, + default_activity_options=_DEFAULT_ACTIVITY_OPTIONS, + ) + ], + ): + result = await client.execute_workflow( + PartialExecutionWorkflow.run, + PartialExecutionInput(value=10), + id=f"e2e-partial-{uuid4()}", + task_queue=task_queue, + execution_timeout=timedelta(seconds=60), + ) + + assert result["result"] == 165 + assert result["completed_tasks"] == 5 + + counts = get_task_execution_counts() + for i in range(1, 6): + assert ( + counts.get(f"step_{i}", 0) == 1 + ), f"step_{i} executed {counts.get(f'step_{i}', 0)} times, expected 1" + + +class TestFunctionalAPIInterruptV2: + async def test_interrupt_v2_functional(self, client: Client) -> None: + """version='v2' separates interrupts from value in functional API.""" + tasks = [ask_human] + task_queue = f"v2-interrupt-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[InterruptV2FunctionalWorkflow], + plugins=[ + LangGraphPlugin( + entrypoints={"v2_interrupt": interrupt_entrypoint}, + tasks=tasks, + default_activity_options=_DEFAULT_ACTIVITY_OPTIONS, + ) + ], + ): + result = await client.execute_workflow( + InterruptV2FunctionalWorkflow.run, + "hello", + id=f"v2-interrupt-{uuid4()}", + task_queue=task_queue, + execution_timeout=timedelta(seconds=30), + ) + + assert result["input"] == "hello" + assert result["answer"] == "approved" + + +class TestFunctionalAPIPerTaskOptions: + async def test_per_task_activity_options_override(self, client: Client) -> None: + """activity_options[task_name] overrides default_activity_options for that task.""" + task_queue = f"e2e-per-task-options-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[SlowFunctionalWorkflow], + plugins=[ + LangGraphPlugin( + entrypoints={"e2e_slow_functional": slow_entrypoint}, + tasks=[slow_task], + default_activity_options=_DEFAULT_ACTIVITY_OPTIONS, + activity_options={ + "slow_task": { + "start_to_close_timeout": timedelta(milliseconds=100), + "retry_policy": RetryPolicy(maximum_attempts=1), + } + }, + ) + ], + ): + with raises(WorkflowFailureError): + await client.execute_workflow( + SlowFunctionalWorkflow.run, + 1, + id=f"e2e-per-task-options-{uuid4()}", + task_queue=task_queue, + execution_timeout=timedelta(seconds=30), + ) diff --git a/tests/contrib/langgraph/test_execute_in_workflow.py b/tests/contrib/langgraph/test_execute_in_workflow.py new file mode 100644 index 000000000..15b44f5a9 --- /dev/null +++ b/tests/contrib/langgraph/test_execute_in_workflow.py @@ -0,0 +1,51 @@ +from typing import Any +from uuid import uuid4 + +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +async def node(state: State) -> dict[str, str]: # pyright: ignore[reportUnusedParameter] + return {"value": "done"} + + +@workflow.defn +class ExecuteInWorkflowWorkflow: + def __init__(self) -> None: + self.app = graph("my-graph").compile() + + @workflow.run + async def run(self, input: str) -> Any: + return await self.app.ainvoke({"value": input}) + + +async def test_execute_in_workflow(client: Client): + g = StateGraph(State) + g.add_node("node", node, metadata={"execute_in": "workflow"}) + g.add_edge(START, "node") + + task_queue = f"my-graph-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[ExecuteInWorkflowWorkflow], + plugins=[LangGraphPlugin(graphs={"my-graph": g})], + ): + result = await client.execute_workflow( + ExecuteInWorkflowWorkflow.run, + "", + id=f"test-workflow-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"value": "done"} diff --git a/tests/contrib/langgraph/test_interrupt.py b/tests/contrib/langgraph/test_interrupt.py new file mode 100644 index 000000000..eeff1e846 --- /dev/null +++ b/tests/contrib/langgraph/test_interrupt.py @@ -0,0 +1,97 @@ +import sys +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +import langgraph.types +import pytest + +pytestmark = pytest.mark.skipif( + sys.version_info < (3, 11), + reason="langgraph.types.interrupt() requires Python >= 3.11 for async context propagation", +) +from langgraph.checkpoint.memory import InMemorySaver +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from langgraph.graph.state import ( # pyright: ignore[reportMissingTypeStubs] + RunnableConfig, +) +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +async def node(state: State) -> dict[str, str]: # pyright: ignore[reportUnusedParameter] + return {"value": langgraph.types.interrupt("Continue?")} + + +@workflow.defn +class InterruptWorkflow: + def __init__(self) -> None: + self.app = graph("my-graph").compile(checkpointer=InMemorySaver()) + + @workflow.run + async def run(self, input: str) -> Any: + config = RunnableConfig({"configurable": {"thread_id": "1"}}) + + result = await self.app.ainvoke({"value": input}, config) + assert result["__interrupt__"][0].value == "Continue?" + + return await self.app.ainvoke(langgraph.types.Command(resume="yes"), config) + + +@workflow.defn +class InterruptV2Workflow: + def __init__(self) -> None: + self.app = graph("my-graph").compile(checkpointer=InMemorySaver()) + + @workflow.run + async def run(self, input: str) -> Any: + config = RunnableConfig({"configurable": {"thread_id": "1"}}) + + result = await self.app.ainvoke({"value": input}, config, version="v2") + + assert result.value == {"value": ""} + assert len(result.interrupts) == 1 + assert result.interrupts[0].value == "Continue?" + + return await self.app.ainvoke(langgraph.types.Command(resume="yes"), config) + + +@pytest.mark.parametrize( + "workflow_cls", [InterruptWorkflow, InterruptV2Workflow], ids=["v1", "v2"] +) +async def test_interrupt(client: Client, workflow_cls: Any) -> None: + g = StateGraph(State) + g.add_node("node", node) + g.add_edge(START, "node") + + task_queue = f"interrupt-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[workflow_cls], + plugins=[ + LangGraphPlugin( + graphs={"my-graph": g}, + default_activity_options={ + "start_to_close_timeout": timedelta(seconds=10) + }, + ) + ], + ): + result = await client.execute_workflow( + workflow_cls.run, + "", + id=f"test-workflow-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"value": "yes"} diff --git a/tests/contrib/langgraph/test_node_metadata.py b/tests/contrib/langgraph/test_node_metadata.py new file mode 100644 index 000000000..9d9537355 --- /dev/null +++ b/tests/contrib/langgraph/test_node_metadata.py @@ -0,0 +1,64 @@ +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +from langchain_core.runnables import ( + RunnableConfig, # pyright: ignore[reportMissingTypeStubs] +) +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +async def node(state: State, config: RunnableConfig) -> dict[str, str]: + metadata = config.get("metadata") or {} + return {"value": state["value"] + str(metadata.get("my_key", "NOT_FOUND"))} + + +metadata_graph: StateGraph[State, None, State, State] = StateGraph(State) +metadata_graph.add_node( + "node", + node, + metadata={ + "start_to_close_timeout": timedelta(seconds=10), + "my_key": "my_value", + }, +) +metadata_graph.add_edge(START, "node") + + +@workflow.defn +class NodeMetadataWorkflow: + def __init__(self) -> None: + self.app = metadata_graph.compile() + + @workflow.run + async def run(self, input: str) -> Any: + return await self.app.ainvoke({"value": input}) + + +async def test_node_metadata_readable_in_node(client: Client): + task_queue = f"my-graph-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[NodeMetadataWorkflow], + plugins=[LangGraphPlugin(graphs={"my-graph": metadata_graph})], + ): + result = await client.execute_workflow( + NodeMetadataWorkflow.run, + "prefix-", + id=f"test-workflow-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"value": "prefix-my_value"} diff --git a/tests/contrib/langgraph/test_plugin_validation.py b/tests/contrib/langgraph/test_plugin_validation.py new file mode 100644 index 000000000..d80413b6c --- /dev/null +++ b/tests/contrib/langgraph/test_plugin_validation.py @@ -0,0 +1,66 @@ +"""Tests for LangGraphPlugin validation.""" + +from __future__ import annotations + +from typing import Any +from uuid import uuid4 + +from langchain_core.runnables import RunnableLambda +from langgraph.func import task # pyright: ignore[reportMissingTypeStubs] +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from langgraph.types import RetryPolicy # pyright: ignore[reportMissingTypeStubs] +from pytest import raises +from typing_extensions import TypedDict + +from temporalio.contrib.langgraph import LangGraphPlugin + + +class State(TypedDict): + value: str + + +async def async_node(state: State) -> dict[str, str]: # pyright: ignore[reportUnusedParameter] + return {"value": "done"} + + +def sync_node(state: State) -> dict[str, str]: # pyright: ignore[reportUnusedParameter] + return {"value": "done"} + + +def test_non_runnable_callable_node_raises() -> None: + """Nodes whose runnable isn't a RunnableCallable can't be wrapped as activities.""" + g = StateGraph(State) + g.add_node("node", RunnableLambda(sync_node)) + g.add_edge(START, "node") + + with raises(ValueError, match="must be a RunnableCallable"): + LangGraphPlugin(graphs={f"validation-{uuid4()}": g}) + + +def test_invalid_execute_in_raises() -> None: + g = StateGraph(State) + g.add_node("node", async_node, metadata={"execute_in": "bogus"}) + g.add_edge(START, "node") + + with raises(ValueError, match="Invalid execute_in value"): + LangGraphPlugin(graphs={f"validation-{uuid4()}": g}) + + +def test_node_retry_policy_raises() -> None: + g = StateGraph(State) + g.add_node("node", async_node, retry_policy=RetryPolicy(max_attempts=3)) + g.add_edge(START, "node") + + with raises(ValueError, match="retry_policy"): + LangGraphPlugin(graphs={f"validation-{uuid4()}": g}) + + +def test_task_retry_policy_raises() -> None: + decorator: Any = task(retry_policy=RetryPolicy(max_attempts=3)) + + @decorator + def my_task(x: int) -> int: + return x + 1 + + with raises(ValueError, match="retry_policy"): + LangGraphPlugin(tasks=[my_task]) diff --git a/tests/contrib/langgraph/test_replay.py b/tests/contrib/langgraph/test_replay.py new file mode 100644 index 000000000..04bba96bf --- /dev/null +++ b/tests/contrib/langgraph/test_replay.py @@ -0,0 +1,93 @@ +import sys +from datetime import timedelta +from uuid import uuid4 + +import pytest +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] + +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin +from temporalio.worker import Replayer, Worker +from tests.contrib.langgraph.test_interrupt import ( + InterruptWorkflow, +) +from tests.contrib.langgraph.test_interrupt import ( + State as InterruptState, +) +from tests.contrib.langgraph.test_interrupt import ( + node as interrupt_node, +) +from tests.contrib.langgraph.test_two_nodes import ( + State, + TwoNodesWorkflow, + node_a, + node_b, +) + + +async def test_replay(client: Client): + g = StateGraph(State) + g.add_node("node_a", node_a) + g.add_node("node_b", node_b) + g.add_edge(START, "node_a") + g.add_edge("node_a", "node_b") + + task_queue = f"my-graph-{uuid4()}" + plugin = LangGraphPlugin( + graphs={"my-graph": g}, + default_activity_options={"start_to_close_timeout": timedelta(seconds=10)}, + ) + + async with Worker( + client, + task_queue=task_queue, + workflows=[TwoNodesWorkflow], + plugins=[plugin], + ): + handle = await client.start_workflow( + TwoNodesWorkflow.run, + "", + id=f"test-workflow-{uuid4()}", + task_queue=task_queue, + ) + await handle.result() + + await Replayer( + workflows=[TwoNodesWorkflow], + plugins=[plugin], + ).replay_workflow(await handle.fetch_history()) + + +@pytest.mark.skipif( + sys.version_info < (3, 11), + reason="langgraph.types.interrupt() requires Python >= 3.11 for async context propagation", +) +async def test_replay_interrupt(client: Client): + g = StateGraph(InterruptState) + g.add_node("node", interrupt_node) + g.add_edge(START, "node") + + task_queue = f"interrupt-replay-{uuid4()}" + plugin = LangGraphPlugin( + graphs={"my-graph": g}, + default_activity_options={"start_to_close_timeout": timedelta(seconds=10)}, + ) + + async with Worker( + client, + task_queue=task_queue, + workflows=[InterruptWorkflow], + plugins=[plugin], + ): + handle = await client.start_workflow( + InterruptWorkflow.run, + "", + id=f"test-interrupt-replay-{uuid4()}", + task_queue=task_queue, + ) + await handle.result() + + await Replayer( + workflows=[InterruptWorkflow], + plugins=[plugin], + ).replay_workflow(await handle.fetch_history()) diff --git a/tests/contrib/langgraph/test_send.py b/tests/contrib/langgraph/test_send.py new file mode 100644 index 000000000..445d20648 --- /dev/null +++ b/tests/contrib/langgraph/test_send.py @@ -0,0 +1,76 @@ +import operator +from datetime import timedelta +from typing import Annotated, Any +from uuid import uuid4 + +from langgraph.graph import ( # pyright: ignore[reportMissingTypeStubs] + END, + START, + StateGraph, +) +from langgraph.types import Send +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + items: list[str] + results: Annotated[list[str], operator.add] + + +class WorkerState(TypedDict): + item: str + + +def worker(state: WorkerState) -> dict[str, list[str]]: + return {"results": [state["item"].upper()]} + + +async def fan_out(state: State) -> list[Send]: + return [Send("worker", {"item": item}) for item in state["items"]] + + +@workflow.defn +class SendWorkflow: + def __init__(self) -> None: + self.app = graph("my-graph").compile() + + @workflow.run + async def run(self, items: list[str]) -> Any: + return await self.app.ainvoke({"items": items, "results": []}) + + +async def test_send(client: Client): + g = StateGraph(State) + g.add_node("worker", worker) + g.add_conditional_edges(START, fan_out, ["worker"]) + g.add_edge("worker", END) + + task_queue = f"send-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[SendWorkflow], + plugins=[ + LangGraphPlugin( + graphs={"my-graph": g}, + default_activity_options={ + "start_to_close_timeout": timedelta(seconds=10) + }, + ) + ], + ): + result = await client.execute_workflow( + SendWorkflow.run, + ["a", "b", "c"], + id=f"test-send-{uuid4()}", + task_queue=task_queue, + ) + + assert result["items"] == ["a", "b", "c"] + assert sorted(result["results"]) == ["A", "B", "C"] diff --git a/tests/contrib/langgraph/test_streaming.py b/tests/contrib/langgraph/test_streaming.py new file mode 100644 index 000000000..474a853c8 --- /dev/null +++ b/tests/contrib/langgraph/test_streaming.py @@ -0,0 +1,68 @@ +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +async def node_a(state: State) -> dict[str, str]: + return {"value": state["value"] + "a"} + + +async def node_b(state: State) -> dict[str, str]: + return {"value": state["value"] + "b"} + + +@workflow.defn +class StreamingWorkflow: + def __init__(self) -> None: + self.app = graph("streaming").compile() + + @workflow.run + async def run(self, input: str) -> Any: + chunks = [] + async for chunk in self.app.astream({"value": input}): + chunks.append(chunk) + return chunks + + +async def test_streaming(client: Client): + g = StateGraph(State) + g.add_node("node_a", node_a) + g.add_node("node_b", node_b) + g.add_edge(START, "node_a") + g.add_edge("node_a", "node_b") + + task_queue = f"streaming-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[StreamingWorkflow], + plugins=[ + LangGraphPlugin( + graphs={"streaming": g}, + default_activity_options={ + "start_to_close_timeout": timedelta(seconds=10) + }, + ) + ], + ): + chunks = await client.execute_workflow( + StreamingWorkflow.run, + "", + id=f"test-streaming-{uuid4()}", + task_queue=task_queue, + ) + + assert chunks == [{"node_a": {"value": "a"}}, {"node_b": {"value": "ab"}}] diff --git a/tests/contrib/langgraph/test_subgraph_activity.py b/tests/contrib/langgraph/test_subgraph_activity.py new file mode 100644 index 000000000..1cd6b5e96 --- /dev/null +++ b/tests/contrib/langgraph/test_subgraph_activity.py @@ -0,0 +1,67 @@ +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +async def child_node(state: State) -> dict[str, str]: # pyright: ignore[reportUnusedParameter] + return {"value": "child"} + + +async def parent_node(state: State) -> dict[str, str]: + child: StateGraph[State, None, State, State] = StateGraph(State) + child.add_node("child_node", child_node) + child.add_edge(START, "child_node") + + return await child.compile().ainvoke(state) + + +@workflow.defn +class ActivitySubgraphWorkflow: + def __init__(self) -> None: + self.app = graph("parent").compile() + + @workflow.run + async def run(self, input: str) -> Any: + return await self.app.ainvoke({"value": input}) + + +async def test_activity_subgraph(client: Client): + parent = StateGraph(State) + parent.add_node("parent_node", parent_node) + parent.add_edge(START, "parent_node") + + task_queue = f"subgraph-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[ActivitySubgraphWorkflow], + plugins=[ + LangGraphPlugin( + graphs={"parent": parent}, + default_activity_options={ + "start_to_close_timeout": timedelta(seconds=10) + }, + ) + ], + ): + result = await client.execute_workflow( + ActivitySubgraphWorkflow.run, + "", + id=f"test-workflow-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"value": "child"} diff --git a/tests/contrib/langgraph/test_subgraph_workflow.py b/tests/contrib/langgraph/test_subgraph_workflow.py new file mode 100644 index 000000000..5e77f6dd8 --- /dev/null +++ b/tests/contrib/langgraph/test_subgraph_workflow.py @@ -0,0 +1,64 @@ +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +async def child_node(state: State) -> dict[str, str]: # pyright: ignore[reportUnusedParameter] + return {"value": "child"} + + +async def parent_node(state: State) -> dict[str, str]: + return await graph("child").compile().ainvoke(state) + + +@workflow.defn +class WorkflowSubgraphWorkflow: + def __init__(self) -> None: + self.app = graph("parent").compile() + + @workflow.run + async def run(self, input: str) -> Any: + return await self.app.ainvoke({"value": input}) + + +async def test_workflow_subgraph(client: Client): + child = StateGraph(State) + child.add_node( + "child_node", + child_node, + metadata={"start_to_close_timeout": timedelta(seconds=10)}, + ) + child.add_edge(START, "child_node") + + parent = StateGraph(State) + parent.add_node("parent_node", parent_node, metadata={"execute_in": "workflow"}) + parent.add_edge(START, "parent_node") + + task_queue = f"subgraph-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[WorkflowSubgraphWorkflow], + plugins=[LangGraphPlugin(graphs={"parent": parent, "child": child})], + ): + result = await client.execute_workflow( + WorkflowSubgraphWorkflow.run, + "", + id=f"test-workflow-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"value": "child"} diff --git a/tests/contrib/langgraph/test_sync_node.py b/tests/contrib/langgraph/test_sync_node.py new file mode 100644 index 000000000..edc79200b --- /dev/null +++ b/tests/contrib/langgraph/test_sync_node.py @@ -0,0 +1,59 @@ +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +def sync_node(state: State) -> dict[str, str]: + return {"value": state["value"] + "!"} + + +@workflow.defn +class SyncNodeWorkflow: + def __init__(self) -> None: + self.app = graph("my-graph").compile() + + @workflow.run + async def run(self, input: str) -> Any: + return await self.app.ainvoke({"value": input}) + + +async def test_sync_node(client: Client): + g = StateGraph(State) + g.add_node("sync_node", sync_node) + g.add_edge(START, "sync_node") + + task_queue = f"sync-node-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[SyncNodeWorkflow], + plugins=[ + LangGraphPlugin( + graphs={"my-graph": g}, + default_activity_options={ + "start_to_close_timeout": timedelta(seconds=10) + }, + ) + ], + ): + result = await client.execute_workflow( + SyncNodeWorkflow.run, + "hello", + id=f"test-sync-node-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"value": "hello!"} diff --git a/tests/contrib/langgraph/test_sync_task.py b/tests/contrib/langgraph/test_sync_task.py new file mode 100644 index 000000000..a6e135b21 --- /dev/null +++ b/tests/contrib/langgraph/test_sync_task.py @@ -0,0 +1,68 @@ +import sys +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +import pytest + +pytestmark = pytest.mark.skipif( + sys.version_info < (3, 11), + reason="LangGraph Functional API requires Python >= 3.11 for async context propagation", +) +from langgraph.func import ( # pyright: ignore[reportMissingTypeStubs] + entrypoint as lg_entrypoint, +) +from langgraph.func import task # pyright: ignore[reportMissingTypeStubs] + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, entrypoint +from temporalio.worker import Worker + + +@task +def sync_task(x: int) -> int: + return x + 1 + + +@lg_entrypoint() +async def sync_task_entrypoint(value: int) -> dict[str, int]: + result = await sync_task(value) + return {"result": result} + + +@workflow.defn +class SyncTaskWorkflow: + def __init__(self) -> None: + self.app = entrypoint("sync-task") + + @workflow.run + async def run(self, input: int) -> Any: + return await self.app.ainvoke(input) + + +async def test_sync_task(client: Client): + task_queue = f"sync-task-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[SyncTaskWorkflow], + plugins=[ + LangGraphPlugin( + entrypoints={"sync-task": sync_task_entrypoint}, + tasks=[sync_task], + default_activity_options={ + "start_to_close_timeout": timedelta(seconds=10) + }, + ) + ], + ): + result = await client.execute_workflow( + SyncTaskWorkflow.run, + 41, + id=f"test-sync-task-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"result": 42} diff --git a/tests/contrib/langgraph/test_timeout.py b/tests/contrib/langgraph/test_timeout.py new file mode 100644 index 000000000..23e65caa5 --- /dev/null +++ b/tests/contrib/langgraph/test_timeout.py @@ -0,0 +1,63 @@ +from asyncio import sleep +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from pytest import raises +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client, WorkflowFailureError +from temporalio.common import RetryPolicy +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +async def node(state: State) -> dict[str, str]: # pyright: ignore[reportUnusedParameter] + await sleep(1) # 1 second + return {"value": "done"} + + +@workflow.defn +class TimeoutWorkflow: + def __init__(self) -> None: + self.app = graph("my-graph").compile() + + @workflow.run + async def run(self, input: str) -> Any: + return await self.app.ainvoke({"value": input}) + + +async def test_timeout(client: Client): + g = StateGraph(State) + g.add_node("node", node) + g.add_edge(START, "node") + + task_queue = f"my-graph-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[TimeoutWorkflow], + plugins=[ + LangGraphPlugin( + graphs={"my-graph": g}, + default_activity_options={ + "start_to_close_timeout": timedelta(milliseconds=100), + "retry_policy": RetryPolicy(maximum_attempts=1), + }, + ) + ], + ): + with raises(WorkflowFailureError): + await client.execute_workflow( + TimeoutWorkflow.run, + "", + id=f"test-workflow-{uuid4()}", + task_queue=task_queue, + ) diff --git a/tests/contrib/langgraph/test_two_nodes.py b/tests/contrib/langgraph/test_two_nodes.py new file mode 100644 index 000000000..5d9978ffe --- /dev/null +++ b/tests/contrib/langgraph/test_two_nodes.py @@ -0,0 +1,65 @@ +from datetime import timedelta +from typing import Any +from uuid import uuid4 + +from langgraph.graph import START, StateGraph # pyright: ignore[reportMissingTypeStubs] +from typing_extensions import TypedDict + +from temporalio import workflow +from temporalio.client import Client +from temporalio.contrib.langgraph import LangGraphPlugin, graph +from temporalio.worker import Worker + + +class State(TypedDict): + value: str + + +async def node_a(state: State) -> dict[str, str]: + return {"value": state["value"] + "a"} + + +async def node_b(state: State) -> dict[str, str]: + return {"value": state["value"] + "b"} + + +@workflow.defn +class TwoNodesWorkflow: + def __init__(self) -> None: + self.app = graph("my-graph").compile() + + @workflow.run + async def run(self, input: str) -> Any: + return await self.app.ainvoke({"value": input}) + + +async def test_two_nodes(client: Client): + g = StateGraph(State) + g.add_node("node_a", node_a) + g.add_node("node_b", node_b) + g.add_edge(START, "node_a") + g.add_edge("node_a", "node_b") + + task_queue = f"my-graph-{uuid4()}" + + async with Worker( + client, + task_queue=task_queue, + workflows=[TwoNodesWorkflow], + plugins=[ + LangGraphPlugin( + graphs={"my-graph": g}, + default_activity_options={ + "start_to_close_timeout": timedelta(seconds=10) + }, + ) + ], + ): + result = await client.execute_workflow( + TwoNodesWorkflow.run, + "", + id=f"test-workflow-{uuid4()}", + task_queue=task_queue, + ) + + assert result == {"value": "ab"} diff --git a/uv.lock b/uv.lock index bdc25a507..4fba27fc0 100644 --- a/uv.lock +++ b/uv.lock @@ -8,6 +8,13 @@ resolution-markers = [ "python_full_version < '3.11'", ] +[options] +exclude-newer = "2026-04-13T21:30:54.856039Z" +exclude-newer-span = "P1W" + +[options.exclude-newer-package] +openai-agents = false + [[package]] name = "aioboto3" version = "15.5.0" @@ -1812,7 +1819,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9bc885b89709d901859cf95179ec9f6bb67a3d2bb1f0e88456461bd4b7f8fd0d", size = 277747, upload-time = "2026-02-20T20:16:21.325Z" }, { url = "https://files.pythonhosted.org/packages/fb/07/cb284a8b5c6498dbd7cba35d31380bb123d7dceaa7907f606c8ff5993cbf/greenlet-3.3.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b568183cf65b94919be4438dc28416b234b678c608cafac8874dfeeb2a9bbe13", size = 579202, upload-time = "2026-02-20T20:47:28.955Z" }, { url = "https://files.pythonhosted.org/packages/ed/45/67922992b3a152f726163b19f890a85129a992f39607a2a53155de3448b8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:527fec58dc9f90efd594b9b700662ed3fb2493c2122067ac9c740d98080a620e", size = 590620, upload-time = "2026-02-20T20:55:55.581Z" }, - { url = "https://files.pythonhosted.org/packages/03/5f/6e2a7d80c353587751ef3d44bb947f0565ec008a2e0927821c007e96d3a7/greenlet-3.3.2-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:508c7f01f1791fbc8e011bd508f6794cb95397fdb198a46cb6635eb5b78d85a7", size = 602132, upload-time = "2026-02-20T21:02:43.261Z" }, { url = "https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad0c8917dd42a819fe77e6bdfcb84e3379c0de956469301d9fd36427a1ca501f", size = 591729, upload-time = "2026-02-20T20:20:58.395Z" }, { url = "https://files.pythonhosted.org/packages/24/b4/21f5455773d37f94b866eb3cf5caed88d6cea6dd2c6e1f9c34f463cba3ec/greenlet-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:97245cc10e5515dbc8c3104b2928f7f02b6813002770cfaffaf9a6e0fc2b94ef", size = 1551946, upload-time = "2026-02-20T20:49:31.102Z" }, { url = "https://files.pythonhosted.org/packages/00/68/91f061a926abead128fe1a87f0b453ccf07368666bd59ffa46016627a930/greenlet-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8c1fdd7d1b309ff0da81d60a9688a8bd044ac4e18b250320a96fc68d31c209ca", size = 1618494, upload-time = "2026-02-20T20:21:06.541Z" }, @@ -1820,7 +1826,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f3/47/16400cb42d18d7a6bb46f0626852c1718612e35dcb0dffa16bbaffdf5dd2/greenlet-3.3.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c56692189a7d1c7606cb794be0a8381470d95c57ce5be03fb3d0ef57c7853b86", size = 278890, upload-time = "2026-02-20T20:19:39.263Z" }, { url = "https://files.pythonhosted.org/packages/a3/90/42762b77a5b6aa96cd8c0e80612663d39211e8ae8a6cd47c7f1249a66262/greenlet-3.3.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ebd458fa8285960f382841da585e02201b53a5ec2bac6b156fc623b5ce4499f", size = 581120, upload-time = "2026-02-20T20:47:30.161Z" }, { url = "https://files.pythonhosted.org/packages/bf/6f/f3d64f4fa0a9c7b5c5b3c810ff1df614540d5aa7d519261b53fba55d4df9/greenlet-3.3.2-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a443358b33c4ec7b05b79a7c8b466f5d275025e750298be7340f8fc63dff2a55", size = 594363, upload-time = "2026-02-20T20:55:56.965Z" }, - { url = "https://files.pythonhosted.org/packages/9c/8b/1430a04657735a3f23116c2e0d5eb10220928846e4537a938a41b350bed6/greenlet-3.3.2-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4375a58e49522698d3e70cc0b801c19433021b5c37686f7ce9c65b0d5c8677d2", size = 605046, upload-time = "2026-02-20T21:02:45.234Z" }, { url = "https://files.pythonhosted.org/packages/72/83/3e06a52aca8128bdd4dcd67e932b809e76a96ab8c232a8b025b2850264c5/greenlet-3.3.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e2cd90d413acbf5e77ae41e5d3c9b3ac1d011a756d7284d7f3f2b806bbd6358", size = 594156, upload-time = "2026-02-20T20:20:59.955Z" }, { url = "https://files.pythonhosted.org/packages/70/79/0de5e62b873e08fe3cef7dbe84e5c4bc0e8ed0c7ff131bccb8405cd107c8/greenlet-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:442b6057453c8cb29b4fb36a2ac689382fc71112273726e2423f7f17dc73bf99", size = 1554649, upload-time = "2026-02-20T20:49:32.293Z" }, { url = "https://files.pythonhosted.org/packages/5a/00/32d30dee8389dc36d42170a9c66217757289e2afb0de59a3565260f38373/greenlet-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45abe8eb6339518180d5a7fa47fa01945414d7cca5ecb745346fc6a87d2750be", size = 1619472, upload-time = "2026-02-20T20:21:07.966Z" }, @@ -1829,7 +1834,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" }, { url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" }, { url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" }, - { url = "https://files.pythonhosted.org/packages/5c/c5/cc09412a29e43406eba18d61c70baa936e299bc27e074e2be3806ed29098/greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb", size = 626250, upload-time = "2026-02-20T21:02:46.596Z" }, { url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" }, { url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" }, { url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" }, @@ -1838,7 +1842,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" }, { url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" }, { url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" }, - { url = "https://files.pythonhosted.org/packages/94/2b/4d012a69759ac9d77210b8bfb128bc621125f5b20fc398bce3940d036b1c/greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd", size = 628268, upload-time = "2026-02-20T21:02:48.024Z" }, { url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" }, { url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" }, { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, @@ -1847,7 +1850,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, - { url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" }, { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, @@ -1856,7 +1858,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, - { url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" }, { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, @@ -2474,6 +2475,81 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/db/e655086b7f3a705df045bf0933bdd9c2f79bb3c97bfef1384598bb79a217/keyring-25.7.0-py3-none-any.whl", hash = "sha256:be4a0b195f149690c166e850609a477c532ddbfbaed96a404d4e43f8d5e2689f", size = 39160, upload-time = "2025-11-16T16:26:08.402Z" }, ] +[[package]] +name = "langchain-core" +version = "1.2.28" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonpatch" }, + { name = "langsmith" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "tenacity" }, + { name = "typing-extensions" }, + { name = "uuid-utils" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/a4/317a1a3ac1df33a64adb3670bf88bbe3b3d5baa274db6863a979db472897/langchain_core-1.2.28.tar.gz", hash = "sha256:271a3d8bd618f795fdeba112b0753980457fc90537c46a0c11998516a74dc2cb", size = 846119, upload-time = "2026-04-08T18:19:34.867Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/92/32f785f077c7e898da97064f113c73fbd9ad55d1e2169cf3a391b183dedb/langchain_core-1.2.28-py3-none-any.whl", hash = "sha256:80764232581eaf8057bcefa71dbf8adc1f6a28d257ebd8b95ba9b8b452e8c6ac", size = 508727, upload-time = "2026-04-08T18:19:32.823Z" }, +] + +[[package]] +name = "langgraph" +version = "1.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, + { name = "langgraph-prebuilt" }, + { name = "langgraph-sdk" }, + { name = "pydantic" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/e5/d3f72ead3c7f15769d5a9c07e373628f1fbaf6cbe7735694d7085859acf6/langgraph-1.1.6.tar.gz", hash = "sha256:1783f764b08a607e9f288dbcf6da61caeb0dd40b337e5c9fb8b412341fbc0b60", size = 549634, upload-time = "2026-04-03T19:01:32.561Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/e6/b36ecdb3ff4ba9a290708d514bae89ebbe2f554b6abbe4642acf3fddbe51/langgraph-1.1.6-py3-none-any.whl", hash = "sha256:fdbf5f54fa5a5a4c4b09b7b5e537f1b2fa283d2f0f610d3457ddeecb479458b9", size = 169755, upload-time = "2026-04-03T19:01:30.686Z" }, +] + +[[package]] +name = "langgraph-checkpoint" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "ormsgpack" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/44/a8df45d1e8b4637e29789fa8bae1db022c953cc7ac80093cfc52e923547e/langgraph_checkpoint-4.0.1.tar.gz", hash = "sha256:b433123735df11ade28829e40ce25b9be614930cd50245ff2af60629234befd9", size = 158135, upload-time = "2026-02-27T21:06:16.092Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/4c/09a4a0c42f5d2fc38d6c4d67884788eff7fd2cfdf367fdf7033de908b4c0/langgraph_checkpoint-4.0.1-py3-none-any.whl", hash = "sha256:e3adcd7a0e0166f3b48b8cf508ce0ea366e7420b5a73aa81289888727769b034", size = 50453, upload-time = "2026-02-27T21:06:14.293Z" }, +] + +[[package]] +name = "langgraph-prebuilt" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "langgraph-checkpoint" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/4c/06dac899f4945bedb0c3a1583c19484c2cc894114ea30d9a538dd270086e/langgraph_prebuilt-1.0.9.tar.gz", hash = "sha256:93de7512e9caade4b77ead92428f6215c521fdb71b8ffda8cd55f0ad814e64de", size = 165850, upload-time = "2026-04-03T14:06:37.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/a2/8368ac187b75e7f9d938ca075d34f116683f5cfc48d924029ee79aea147b/langgraph_prebuilt-1.0.9-py3-none-any.whl", hash = "sha256:776c8e3154a5aef5ad0e5bf3f263f2dcaab3983786cc20014b7f955d99d2d1b2", size = 35958, upload-time = "2026-04-03T14:06:36.58Z" }, +] + +[[package]] +name = "langgraph-sdk" +version = "0.3.13" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/db/77a45127dddcfea5e4256ba916182903e4c31dc4cfca305b8c386f0a9e53/langgraph_sdk-0.3.13.tar.gz", hash = "sha256:419ca5663eec3cec192ad194ac0647c0c826866b446073eb40f384f950986cd5", size = 196360, upload-time = "2026-04-07T20:34:18.766Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ef/64d64e9f8eea47ce7b939aa6da6863b674c8d418647813c20111645fcc62/langgraph_sdk-0.3.13-py3-none-any.whl", hash = "sha256:aee09e345c90775f6de9d6f4c7b847cfc652e49055c27a2aed0d981af2af3bd0", size = 96668, upload-time = "2026-04-07T20:34:17.866Z" }, +] + [[package]] name = "langsmith" version = "0.7.26" @@ -3682,6 +3758,62 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/d1/facb5b5051fabb0ef9d26c6544d87ef19a939a9a001198655d0d891062dd/orjson-3.11.8-cp314-cp314-win_arm64.whl", hash = "sha256:6ccdea2c213cf9f3d9490cbd5d427693c870753df41e6cb375bd79bcbafc8817", size = 127330, upload-time = "2026-03-31T16:16:25.496Z" }, ] +[[package]] +name = "ormsgpack" +version = "1.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/0c/f1761e21486942ab9bb6feaebc610fa074f7c5e496e6962dea5873348077/ormsgpack-1.12.2.tar.gz", hash = "sha256:944a2233640273bee67521795a73cf1e959538e0dfb7ac635505010455e53b33", size = 39031, upload-time = "2026-01-18T20:55:28.023Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/93/fa/a91f70829ebccf6387c4946e0a1a109f6ba0d6a28d65f628bedfad94b890/ormsgpack-1.12.2-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c1429217f8f4d7fcb053523bbbac6bed5e981af0b85ba616e6df7cce53c19657", size = 378262, upload-time = "2026-01-18T20:55:22.284Z" }, + { url = "https://files.pythonhosted.org/packages/5f/62/3698a9a0c487252b5c6a91926e5654e79e665708ea61f67a8bdeceb022bf/ormsgpack-1.12.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f13034dc6c84a6280c6c33db7ac420253852ea233fc3ee27c8875f8dd651163", size = 203034, upload-time = "2026-01-18T20:55:53.324Z" }, + { url = "https://files.pythonhosted.org/packages/66/3a/f716f64edc4aec2744e817660b317e2f9bb8de372338a95a96198efa1ac1/ormsgpack-1.12.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59f5da97000c12bc2d50e988bdc8576b21f6ab4e608489879d35b2c07a8ab51a", size = 210538, upload-time = "2026-01-18T20:55:20.097Z" }, + { url = "https://files.pythonhosted.org/packages/72/30/a436be9ce27d693d4e19fa94900028067133779f09fc45776db3f689c822/ormsgpack-1.12.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e4459c3f27066beadb2b81ea48a076a417aafffff7df1d3c11c519190ed44f2", size = 212401, upload-time = "2026-01-18T20:55:46.447Z" }, + { url = "https://files.pythonhosted.org/packages/10/c5/cde98300fd33fee84ca71de4751b19aeeca675f0cf3c0ec4b043f40f3b76/ormsgpack-1.12.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a1c460655d7288407ffa09065e322a7231997c0d62ce914bf3a96ad2dc6dedd", size = 387080, upload-time = "2026-01-18T20:56:00.884Z" }, + { url = "https://files.pythonhosted.org/packages/6a/31/30bf445ef827546747c10889dd254b3d84f92b591300efe4979d792f4c41/ormsgpack-1.12.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:458e4568be13d311ef7d8877275e7ccbe06c0e01b39baaac874caaa0f46d826c", size = 482346, upload-time = "2026-01-18T20:55:39.831Z" }, + { url = "https://files.pythonhosted.org/packages/2e/f5/e1745ddf4fa246c921b5ca253636c4c700ff768d78032f79171289159f6e/ormsgpack-1.12.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8cde5eaa6c6cbc8622db71e4a23de56828e3d876aeb6460ffbcb5b8aff91093b", size = 425178, upload-time = "2026-01-18T20:55:27.106Z" }, + { url = "https://files.pythonhosted.org/packages/8d/a2/e6532ed7716aed03dede8df2d0d0d4150710c2122647d94b474147ccd891/ormsgpack-1.12.2-cp310-cp310-win_amd64.whl", hash = "sha256:dc7a33be14c347893edbb1ceda89afbf14c467d593a5ee92c11de4f1666b4d4f", size = 117183, upload-time = "2026-01-18T20:55:55.52Z" }, + { url = "https://files.pythonhosted.org/packages/4b/08/8b68f24b18e69d92238aa8f258218e6dfeacf4381d9d07ab8df303f524a9/ormsgpack-1.12.2-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bd5f4bf04c37888e864f08e740c5a573c4017f6fd6e99fa944c5c935fabf2dd9", size = 378266, upload-time = "2026-01-18T20:55:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/0d/24/29fc13044ecb7c153523ae0a1972269fcd613650d1fa1a9cec1044c6b666/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34d5b28b3570e9fed9a5a76528fc7230c3c76333bc214798958e58e9b79cc18a", size = 203035, upload-time = "2026-01-18T20:55:30.59Z" }, + { url = "https://files.pythonhosted.org/packages/ad/c2/00169fb25dd8f9213f5e8a549dfb73e4d592009ebc85fbbcd3e1dcac575b/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3708693412c28f3538fb5a65da93787b6bbab3484f6bc6e935bfb77a62400ae5", size = 210539, upload-time = "2026-01-18T20:55:48.569Z" }, + { url = "https://files.pythonhosted.org/packages/1b/33/543627f323ff3c73091f51d6a20db28a1a33531af30873ea90c5ac95a9b5/ormsgpack-1.12.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43013a3f3e2e902e1d05e72c0f1aeb5bedbb8e09240b51e26792a3c89267e181", size = 212401, upload-time = "2026-01-18T20:56:10.101Z" }, + { url = "https://files.pythonhosted.org/packages/e8/5d/f70e2c3da414f46186659d24745483757bcc9adccb481a6eb93e2b729301/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7c8b1667a72cbba74f0ae7ecf3105a5e01304620ed14528b2cb4320679d2869b", size = 387082, upload-time = "2026-01-18T20:56:12.047Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d6/06e8dc920c7903e051f30934d874d4afccc9bb1c09dcaf0bc03a7de4b343/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:df6961442140193e517303d0b5d7bc2e20e69a879c2d774316125350c4a76b92", size = 482346, upload-time = "2026-01-18T20:56:05.152Z" }, + { url = "https://files.pythonhosted.org/packages/66/c4/f337ac0905eed9c393ef990c54565cd33644918e0a8031fe48c098c71dbf/ormsgpack-1.12.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c6a4c34ddef109647c769d69be65fa1de7a6022b02ad45546a69b3216573eb4a", size = 425181, upload-time = "2026-01-18T20:55:37.83Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/6d5758fabef3babdf4bbbc453738cc7de9cd3334e4c38dd5737e27b85653/ormsgpack-1.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:73670ed0375ecc303858e3613f407628dd1fca18fe6ac57b7b7ce66cc7bb006c", size = 117182, upload-time = "2026-01-18T20:55:31.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/17a15549233c37e7fd054c48fe9207492e06b026dbd872b826a0b5f833b6/ormsgpack-1.12.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2be829954434e33601ae5da328cccce3266b098927ca7a30246a0baec2ce7bd", size = 111464, upload-time = "2026-01-18T20:55:38.811Z" }, + { url = "https://files.pythonhosted.org/packages/4c/36/16c4b1921c308a92cef3bf6663226ae283395aa0ff6e154f925c32e91ff5/ormsgpack-1.12.2-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7a29d09b64b9694b588ff2f80e9826bdceb3a2b91523c5beae1fab27d5c940e7", size = 378618, upload-time = "2026-01-18T20:55:50.835Z" }, + { url = "https://files.pythonhosted.org/packages/c0/68/468de634079615abf66ed13bb5c34ff71da237213f29294363beeeca5306/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b39e629fd2e1c5b2f46f99778450b59454d1f901bc507963168985e79f09c5d", size = 203186, upload-time = "2026-01-18T20:56:11.163Z" }, + { url = "https://files.pythonhosted.org/packages/73/a9/d756e01961442688b7939bacd87ce13bfad7d26ce24f910f6028178b2cc8/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:958dcb270d30a7cb633a45ee62b9444433fa571a752d2ca484efdac07480876e", size = 210738, upload-time = "2026-01-18T20:56:09.181Z" }, + { url = "https://files.pythonhosted.org/packages/7b/ba/795b1036888542c9113269a3f5690ab53dd2258c6fb17676ac4bd44fcf94/ormsgpack-1.12.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d379d72b6c5e964851c77cfedfb386e474adee4fd39791c2c5d9efb53505cc", size = 212569, upload-time = "2026-01-18T20:56:06.135Z" }, + { url = "https://files.pythonhosted.org/packages/6c/aa/bff73c57497b9e0cba8837c7e4bcab584b1a6dbc91a5dd5526784a5030c8/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8463a3fc5f09832e67bdb0e2fda6d518dc4281b133166146a67f54c08496442e", size = 387166, upload-time = "2026-01-18T20:55:36.738Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cf/f8283cba44bcb7b14f97b6274d449db276b3a86589bdb363169b51bc12de/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:eddffb77eff0bad4e67547d67a130604e7e2dfbb7b0cde0796045be4090f35c6", size = 482498, upload-time = "2026-01-18T20:55:29.626Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/71e37b852d723dfcbe952ad04178c030df60d6b78eba26bfd14c9a40575e/ormsgpack-1.12.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fcd55e5f6ba0dbce624942adf9f152062135f991a0126064889f68eb850de0dd", size = 425518, upload-time = "2026-01-18T20:55:49.556Z" }, + { url = "https://files.pythonhosted.org/packages/7a/0c/9803aa883d18c7ef197213cd2cbf73ba76472a11fe100fb7dab2884edf48/ormsgpack-1.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:d024b40828f1dde5654faebd0d824f9cc29ad46891f626272dd5bfd7af2333a4", size = 117462, upload-time = "2026-01-18T20:55:47.726Z" }, + { url = "https://files.pythonhosted.org/packages/c8/9e/029e898298b2cc662f10d7a15652a53e3b525b1e7f07e21fef8536a09bb8/ormsgpack-1.12.2-cp312-cp312-win_arm64.whl", hash = "sha256:da538c542bac7d1c8f3f2a937863dba36f013108ce63e55745941dda4b75dbb6", size = 111559, upload-time = "2026-01-18T20:55:54.273Z" }, + { url = "https://files.pythonhosted.org/packages/eb/29/bb0eba3288c0449efbb013e9c6f58aea79cf5cb9ee1921f8865f04c1a9d7/ormsgpack-1.12.2-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5ea60cb5f210b1cfbad8c002948d73447508e629ec375acb82910e3efa8ff355", size = 378661, upload-time = "2026-01-18T20:55:57.765Z" }, + { url = "https://files.pythonhosted.org/packages/6e/31/5efa31346affdac489acade2926989e019e8ca98129658a183e3add7af5e/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3601f19afdbea273ed70b06495e5794606a8b690a568d6c996a90d7255e51c1", size = 203194, upload-time = "2026-01-18T20:56:08.252Z" }, + { url = "https://files.pythonhosted.org/packages/eb/56/d0087278beef833187e0167f8527235ebe6f6ffc2a143e9de12a98b1ce87/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29a9f17a3dac6054c0dce7925e0f4995c727f7c41859adf9b5572180f640d172", size = 210778, upload-time = "2026-01-18T20:55:17.694Z" }, + { url = "https://files.pythonhosted.org/packages/1c/a2/072343e1413d9443e5a252a8eb591c2d5b1bffbe5e7bfc78c069361b92eb/ormsgpack-1.12.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39c1bd2092880e413902910388be8715f70b9f15f20779d44e673033a6146f2d", size = 212592, upload-time = "2026-01-18T20:55:32.747Z" }, + { url = "https://files.pythonhosted.org/packages/a2/8b/a0da3b98a91d41187a63b02dda14267eefc2a74fcb43cc2701066cf1510e/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:50b7249244382209877deedeee838aef1542f3d0fc28b8fe71ca9d7e1896a0d7", size = 387164, upload-time = "2026-01-18T20:55:40.853Z" }, + { url = "https://files.pythonhosted.org/packages/19/bb/6d226bc4cf9fc20d8eb1d976d027a3f7c3491e8f08289a2e76abe96a65f3/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:5af04800d844451cf102a59c74a841324868d3f1625c296a06cc655c542a6685", size = 482516, upload-time = "2026-01-18T20:55:42.033Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f1/bb2c7223398543dedb3dbf8bb93aaa737b387de61c5feaad6f908841b782/ormsgpack-1.12.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:cec70477d4371cd524534cd16472d8b9cc187e0e3043a8790545a9a9b296c258", size = 425539, upload-time = "2026-01-18T20:55:24.727Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e8/0fb45f57a2ada1fed374f7494c8cd55e2f88ccd0ab0a669aa3468716bf5f/ormsgpack-1.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:21f4276caca5c03a818041d637e4019bc84f9d6ca8baa5ea03e5cc8bf56140e9", size = 117459, upload-time = "2026-01-18T20:55:56.876Z" }, + { url = "https://files.pythonhosted.org/packages/7a/d4/0cfeea1e960d550a131001a7f38a5132c7ae3ebde4c82af1f364ccc5d904/ormsgpack-1.12.2-cp313-cp313-win_arm64.whl", hash = "sha256:baca4b6773d20a82e36d6fd25f341064244f9f86a13dead95dd7d7f996f51709", size = 111577, upload-time = "2026-01-18T20:55:43.605Z" }, + { url = "https://files.pythonhosted.org/packages/94/16/24d18851334be09c25e87f74307c84950f18c324a4d3c0b41dabdbf19c29/ormsgpack-1.12.2-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:bc68dd5915f4acf66ff2010ee47c8906dc1cf07399b16f4089f8c71733f6e36c", size = 378717, upload-time = "2026-01-18T20:55:26.164Z" }, + { url = "https://files.pythonhosted.org/packages/b5/a2/88b9b56f83adae8032ac6a6fa7f080c65b3baf9b6b64fd3d37bd202991d4/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46d084427b4132553940070ad95107266656cb646ea9da4975f85cb1a6676553", size = 203183, upload-time = "2026-01-18T20:55:18.815Z" }, + { url = "https://files.pythonhosted.org/packages/a9/80/43e4555963bf602e5bdc79cbc8debd8b6d5456c00d2504df9775e74b450b/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c010da16235806cf1d7bc4c96bf286bfa91c686853395a299b3ddb49499a3e13", size = 210814, upload-time = "2026-01-18T20:55:33.973Z" }, + { url = "https://files.pythonhosted.org/packages/78/e1/7cfbf28de8bca6efe7e525b329c31277d1b64ce08dcba723971c241a9d60/ormsgpack-1.12.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18867233df592c997154ff942a6503df274b5ac1765215bceba7a231bea2745d", size = 212634, upload-time = "2026-01-18T20:55:28.634Z" }, + { url = "https://files.pythonhosted.org/packages/95/f8/30ae5716e88d792a4e879debee195653c26ddd3964c968594ddef0a3cc7e/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b009049086ddc6b8f80c76b3955df1aa22a5fbd7673c525cd63bf91f23122ede", size = 387139, upload-time = "2026-01-18T20:56:02.013Z" }, + { url = "https://files.pythonhosted.org/packages/dc/81/aee5b18a3e3a0e52f718b37ab4b8af6fae0d9d6a65103036a90c2a8ffb5d/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:1dcc17d92b6390d4f18f937cf0b99054824a7815818012ddca925d6e01c2e49e", size = 482578, upload-time = "2026-01-18T20:55:35.117Z" }, + { url = "https://files.pythonhosted.org/packages/bd/17/71c9ba472d5d45f7546317f467a5fc941929cd68fb32796ca3d13dcbaec2/ormsgpack-1.12.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f04b5e896d510b07c0ad733d7fce2d44b260c5e6c402d272128f8941984e4285", size = 425539, upload-time = "2026-01-18T20:56:04.009Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a6/ac99cd7fe77e822fed5250ff4b86fa66dd4238937dd178d2299f10b69816/ormsgpack-1.12.2-cp314-cp314-win_amd64.whl", hash = "sha256:ae3aba7eed4ca7cb79fd3436eddd29140f17ea254b91604aa1eb19bfcedb990f", size = 117493, upload-time = "2026-01-18T20:56:07.343Z" }, + { url = "https://files.pythonhosted.org/packages/3a/67/339872846a1ae4592535385a1c1f93614138566d7af094200c9c3b45d1e5/ormsgpack-1.12.2-cp314-cp314-win_arm64.whl", hash = "sha256:118576ea6006893aea811b17429bfc561b4778fad393f5f538c84af70b01260c", size = 111579, upload-time = "2026-01-18T20:55:21.161Z" }, + { url = "https://files.pythonhosted.org/packages/49/c2/6feb972dc87285ad381749d3882d8aecbde9f6ecf908dd717d33d66df095/ormsgpack-1.12.2-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7121b3d355d3858781dc40dafe25a32ff8a8242b9d80c692fd548a4b1f7fd3c8", size = 378721, upload-time = "2026-01-18T20:55:52.12Z" }, + { url = "https://files.pythonhosted.org/packages/a3/9a/900a6b9b413e0f8a471cf07830f9cf65939af039a362204b36bd5b581d8b/ormsgpack-1.12.2-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ee766d2e78251b7a63daf1cddfac36a73562d3ddef68cacfb41b2af64698033", size = 203170, upload-time = "2026-01-18T20:55:44.469Z" }, + { url = "https://files.pythonhosted.org/packages/87/4c/27a95466354606b256f24fad464d7c97ab62bce6cc529dd4673e1179b8fb/ormsgpack-1.12.2-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292410a7d23de9b40444636b9b8f1e4e4b814af7f1ef476e44887e52a123f09d", size = 212816, upload-time = "2026-01-18T20:55:23.501Z" }, + { url = "https://files.pythonhosted.org/packages/73/cd/29cee6007bddf7a834e6cd6f536754c0535fcb939d384f0f37a38b1cddb8/ormsgpack-1.12.2-cp314-cp314t-win_amd64.whl", hash = "sha256:837dd316584485b72ef451d08dd3e96c4a11d12e4963aedb40e08f89685d8ec2", size = 117232, upload-time = "2026-01-18T20:55:45.448Z" }, +] + [[package]] name = "packaging" version = "26.0" @@ -5039,6 +5171,9 @@ lambda-worker-otel = [ { name = "opentelemetry-sdk-extension-aws" }, { name = "opentelemetry-semantic-conventions" }, ] +langgraph = [ + { name = "langgraph" }, +] langsmith = [ { name = "langsmith" }, ] @@ -5061,6 +5196,7 @@ dev = [ { name = "googleapis-common-protos" }, { name = "grpcio-tools" }, { name = "httpx" }, + { name = "langgraph" }, { name = "langsmith" }, { name = "litellm" }, { name = "maturin" }, @@ -5096,6 +5232,7 @@ requires-dist = [ { name = "aioboto3", marker = "extra == 'aioboto3'", specifier = ">=10.4.0" }, { name = "google-adk", marker = "extra == 'google-adk'", specifier = ">=1.27.0,<2" }, { name = "grpcio", marker = "extra == 'grpc'", specifier = ">=1.48.2,<2" }, + { name = "langgraph", marker = "extra == 'langgraph'", specifier = ">=1.1.0" }, { name = "langsmith", marker = "extra == 'langsmith'", specifier = ">=0.7.0,<0.8" }, { name = "mcp", marker = "extra == 'openai-agents'", specifier = ">=1.9.4,<2" }, { name = "nexus-rpc", specifier = "==1.4.0" }, @@ -5114,7 +5251,7 @@ requires-dist = [ { name = "types-protobuf", specifier = ">=3.20,<7.0.0" }, { name = "typing-extensions", specifier = ">=4.2.0,<5" }, ] -provides-extras = ["grpc", "opentelemetry", "pydantic", "openai-agents", "google-adk", "langsmith", "lambda-worker-otel", "aioboto3"] +provides-extras = ["grpc", "opentelemetry", "pydantic", "openai-agents", "google-adk", "langgraph", "langsmith", "lambda-worker-otel", "aioboto3"] [package.metadata.requires-dev] dev = [ @@ -5123,6 +5260,7 @@ dev = [ { name = "googleapis-common-protos", specifier = "==1.70.0" }, { name = "grpcio-tools", specifier = ">=1.48.2,<2" }, { name = "httpx", specifier = ">=0.28.1" }, + { name = "langgraph", specifier = ">=1.1.0" }, { name = "langsmith", specifier = ">=0.7.0,<0.8" }, { name = "litellm", specifier = ">=1.83.0" }, { name = "maturin", specifier = ">=1.8.2" },