diff --git a/libraries/microsoft-agents-a365-observability-core/microsoft_agents_a365/observability/core/models/messages.py b/libraries/microsoft-agents-a365-observability-core/microsoft_agents_a365/observability/core/models/messages.py index 962f3932..7302988a 100644 --- a/libraries/microsoft-agents-a365-observability-core/microsoft_agents_a365/observability/core/models/messages.py +++ b/libraries/microsoft-agents-a365-observability-core/microsoft_agents_a365/observability/core/models/messages.py @@ -66,7 +66,7 @@ class ToolCallRequestPart: name: str id: str | None = None - arguments: dict[str, object] | list[object] | None = None + arguments: dict[str, object] | list[object] | str | None = None type: str = field(default="tool_call", init=False) diff --git a/libraries/microsoft-agents-a365-observability-extensions-agentframework/docs/design.md b/libraries/microsoft-agents-a365-observability-extensions-agentframework/docs/design.md index 72cca8e1..0f90f7b3 100644 --- a/libraries/microsoft-agents-a365-observability-extensions-agentframework/docs/design.md +++ b/libraries/microsoft-agents-a365-observability-extensions-agentframework/docs/design.md @@ -55,5 +55,5 @@ microsoft_agents_a365/observability/extensions/agentframework/ ## Dependencies -- `agent-framework-azure-ai` - Microsoft Agents SDK +- `agent-framework` - Microsoft Agents SDK - `microsoft-agents-a365-observability-core` - Core observability diff --git a/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/message_mapper.py b/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/message_mapper.py new file mode 100644 index 00000000..1e1f5fba --- /dev/null +++ b/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/message_mapper.py @@ -0,0 +1,213 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Maps Agent Framework span tag messages to A365 versioned message format. + +Agent Framework sets ``gen_ai.input.messages`` / ``gen_ai.output.messages`` as span +tags containing JSON arrays of ``{role, parts[{type, content}], finish_reason?}``. +This mapper converts them to :class:`InputMessages` / :class:`OutputMessages`. +""" + +from __future__ import annotations + +import json +import logging +from typing import Any + +from microsoft_agents_a365.observability.core.message_utils import serialize_messages +from microsoft_agents_a365.observability.core.models.messages import ( + BlobPart, + ChatMessage, + FilePart, + GenericPart, + InputMessages, + MessagePart, + MessageRole, + OutputMessage, + OutputMessages, + ReasoningPart, + TextPart, + ToolCallRequestPart, + ToolCallResponsePart, + UriPart, +) + +logger = logging.getLogger(__name__) + +_ROLE_MAP: dict[str, MessageRole] = { + "system": MessageRole.SYSTEM, + "user": MessageRole.USER, + "assistant": MessageRole.ASSISTANT, + "tool": MessageRole.TOOL, +} + + +def map_input_messages(messages_json: str) -> str | None: + """Map a ``gen_ai.input.messages`` tag value to a serialized A365 JSON string. + + Args: + messages_json: The raw JSON string from the span attribute. + + Returns: + Serialized :class:`InputMessages` JSON string, or ``None`` if the + input is empty or cannot be parsed. + """ + try: + raw = json.loads(messages_json) + except (json.JSONDecodeError, TypeError): + logger.debug("Failed to parse input messages JSON: %s", messages_json[:200]) + return None + + if not isinstance(raw, list): + return None + + chat_messages: list[ChatMessage] = [] + for msg in raw: + if not isinstance(msg, dict): + continue + role = _map_role(msg.get("role"), MessageRole.USER) + parts = _map_parts(msg) + if parts: + chat_messages.append(ChatMessage(role=role, parts=parts, name=msg.get("name"))) + + if not chat_messages: + return None + + return serialize_messages(InputMessages(messages=chat_messages)) + + +def map_output_messages(messages_json: str) -> str | None: + """Map a ``gen_ai.output.messages`` tag value to a serialized A365 JSON string. + + Args: + messages_json: The raw JSON string from the span attribute. + + Returns: + Serialized :class:`OutputMessages` JSON string, or ``None`` if the + input is empty or cannot be parsed. + """ + try: + raw = json.loads(messages_json) + except (json.JSONDecodeError, TypeError): + logger.debug("Failed to parse output messages JSON: %s", messages_json[:200]) + return None + + if not isinstance(raw, list): + return None + + output_messages: list[OutputMessage] = [] + for msg in raw: + if not isinstance(msg, dict): + continue + role = _map_role(msg.get("role"), MessageRole.ASSISTANT) + parts = _map_parts(msg) + finish_reason = msg.get("finish_reason") + if parts: + output_messages.append( + OutputMessage(role=role, parts=parts, finish_reason=finish_reason) + ) + + if not output_messages: + return None + + return serialize_messages(OutputMessages(messages=output_messages)) + + +# --------------------------------------------------------------------------- +# Internal helpers +# --------------------------------------------------------------------------- + + +def _map_role(role: str | None, default: MessageRole) -> MessageRole: + """Map a raw role string to a :class:`MessageRole` enum.""" + if not role: + return default + return _ROLE_MAP.get(role.lower(), default) + + +def _map_parts(msg: dict[str, Any]) -> list[MessagePart]: + """Map all parts in a raw message dict.""" + parts_data = msg.get("parts", []) + if not isinstance(parts_data, list): + return [] + mapped = [_map_single_part(p) for p in parts_data if isinstance(p, dict)] + return [p for p in mapped if p is not None] + + +def _map_single_part(part: dict[str, Any]) -> MessagePart | None: + """Map a single raw part dict to the appropriate A365 message part.""" + part_type = part.get("type", "") + + if part_type == "text": + content = part.get("content", "") + return TextPart(content=content) if content else None + + if part_type == "reasoning": + content = part.get("content", "") + return ReasoningPart(content=content) if content else None + + if part_type == "tool_call": + name = part.get("name") + if not name: + return None + return ToolCallRequestPart( + name=name, + id=part.get("id"), + arguments=_parse_arguments(part.get("arguments")), + ) + + if part_type == "tool_call_response": + return ToolCallResponsePart( + id=part.get("id"), + response=part.get("response"), + ) + + if part_type == "blob": + modality = part.get("modality", "") + content = part.get("content", "") + if not modality or not content: + return None + return BlobPart(modality=modality, content=content, mime_type=part.get("mime_type")) + + if part_type == "file": + modality = part.get("modality", "") + file_id = part.get("file_id", "") + if not modality or not file_id: + return None + return FilePart(modality=modality, file_id=file_id, mime_type=part.get("mime_type")) + + if part_type == "uri": + modality = part.get("modality", "") + uri = part.get("uri", "") + if not modality or not uri: + return None + return UriPart(modality=modality, uri=uri, mime_type=part.get("mime_type")) + + # Fallback: GenericPart for unknown/future part types + data = {k: v for k, v in part.items() if k != "type"} + return GenericPart(type=part_type, data=data) if part_type else None + + +def _parse_arguments( + raw: dict[str, object] | list[object] | str | None, +) -> dict[str, object] | list[object] | str | None: + """Return structured arguments when possible, keeping strings that fail JSON parsing. + + If ``raw`` is already a ``dict`` or ``list``, it is returned as-is. + If ``raw`` is a ``str``, an attempt is made to parse it with :func:`json.loads`. + A successfully parsed ``dict`` or ``list`` is returned; any other parsed + value (scalar) or a parse failure returns the original string. + ``None`` is returned unchanged. + """ + if raw is None or isinstance(raw, (dict, list)): + return raw + if isinstance(raw, str): + try: + parsed = json.loads(raw) + except (json.JSONDecodeError, ValueError): + logger.debug("Failed to parse tool call arguments as JSON: %s", raw[:200]) + return raw + if isinstance(parsed, (dict, list)): + return parsed + return raw + return raw # pragma: no cover — unexpected type, pass through unchanged diff --git a/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/span_enricher.py b/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/span_enricher.py index 158db985..ed9d4bfa 100644 --- a/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/span_enricher.py +++ b/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/span_enricher.py @@ -2,8 +2,10 @@ # Licensed under the MIT License. from microsoft_agents_a365.observability.core.constants import ( + CHAT_OPERATION_NAME, EXECUTE_TOOL_OPERATION_NAME, GEN_AI_INPUT_MESSAGES_KEY, + GEN_AI_OPERATION_NAME_KEY, GEN_AI_OUTPUT_MESSAGES_KEY, GEN_AI_TOOL_ARGS_KEY, GEN_AI_TOOL_CALL_RESULT_KEY, @@ -12,33 +14,50 @@ from microsoft_agents_a365.observability.core.exporters.enriched_span import EnrichedReadableSpan from opentelemetry.sdk.trace import ReadableSpan -from .utils import extract_input_content, extract_output_content +from .message_mapper import map_input_messages, map_output_messages # Agent Framework specific attribute keys AF_TOOL_CALL_ARGUMENTS_KEY = "gen_ai.tool.call.arguments" AF_TOOL_CALL_RESULT_KEY = "gen_ai.tool.call.result" +_MESSAGE_OPERATIONS = {INVOKE_AGENT_OPERATION_NAME, CHAT_OPERATION_NAME} + def enrich_agent_framework_span(span: ReadableSpan) -> ReadableSpan: + """Enricher function for Agent Framework spans. + + For ``invoke_agent`` and ``chat`` operations, maps the raw + ``gen_ai.input.messages`` / ``gen_ai.output.messages`` JSON arrays + to the A365 versioned format. + + For ``execute_tool`` operations, maps Agent Framework tool attribute + keys to the A365 standard keys. """ - Enricher function for Agent Framework spans. - """ - extra_attributes = {} + extra_attributes: dict[str, str] = {} attributes = span.attributes or {} + operation = attributes.get(GEN_AI_OPERATION_NAME_KEY, "") + + is_message_span = operation in _MESSAGE_OPERATIONS or span.name.startswith( + INVOKE_AGENT_OPERATION_NAME + ) + is_tool_span = operation == EXECUTE_TOOL_OPERATION_NAME or span.name.startswith( + EXECUTE_TOOL_OPERATION_NAME + ) - # Only extract content for invoke_agent spans - if span.name.startswith(INVOKE_AGENT_OPERATION_NAME): - # Extract all text content from input messages + if is_message_span: input_messages = attributes.get(GEN_AI_INPUT_MESSAGES_KEY) if input_messages: - extra_attributes[GEN_AI_INPUT_MESSAGES_KEY] = extract_input_content(input_messages) + mapped = map_input_messages(input_messages) + if mapped is not None: + extra_attributes[GEN_AI_INPUT_MESSAGES_KEY] = mapped output_messages = attributes.get(GEN_AI_OUTPUT_MESSAGES_KEY) if output_messages: - extra_attributes[GEN_AI_OUTPUT_MESSAGES_KEY] = extract_output_content(output_messages) + mapped = map_output_messages(output_messages) + if mapped is not None: + extra_attributes[GEN_AI_OUTPUT_MESSAGES_KEY] = mapped - # Map tool attributes for execute_tool spans - elif span.name.startswith(EXECUTE_TOOL_OPERATION_NAME): + elif is_tool_span: if AF_TOOL_CALL_ARGUMENTS_KEY in attributes: extra_attributes[GEN_AI_TOOL_ARGS_KEY] = attributes[AF_TOOL_CALL_ARGUMENTS_KEY] diff --git a/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/span_processor.py b/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/span_processor.py index 09fddeea..448d43f5 100644 --- a/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/span_processor.py +++ b/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/span_processor.py @@ -7,12 +7,11 @@ class AgentFrameworkSpanProcessor(SpanProcessor): """SpanProcessor for Agent Framework. - Note: The span processing logic was removed as GEN_AI_EVENT_CONTENT is no longer used. - This processor is kept for interface compatibility. + Attribute mutation happens in the enricher (via :class:`EnrichedReadableSpan`) + because OTel Python ``ReadableSpan`` is immutable after ``on_end``. + The enricher is invoked at export time by the ``EnrichingSpanProcessor``. """ - TOOL_CALL_RESULT_TAG = "gen_ai.tool.call.result" - def __init__(self, service_name: str | None = None): self.service_name = service_name super().__init__() @@ -22,5 +21,9 @@ def on_start(self, span, parent_context): pass def on_end(self, span): - """Called when a span ends. Intentionally a no-op.""" + """Called when a span ends. Intentionally a no-op. + + Message mapping is handled by the span enricher at export time + since ReadableSpan is immutable in the Python OTel SDK. + """ pass diff --git a/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/trace_instrumentor.py b/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/trace_instrumentor.py index 41d63247..a241018f 100644 --- a/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/trace_instrumentor.py +++ b/libraries/microsoft-agents-a365-observability-extensions-agentframework/microsoft_agents_a365/observability/extensions/agentframework/trace_instrumentor.py @@ -23,7 +23,7 @@ # ----------------------------- # 3) The Instrumentor class # ----------------------------- -_instruments = ("agent-framework-azure-ai >= 1.0.0",) +_instruments = ("agent-framework >= 1.0.0",) class AgentFrameworkInstrumentor(BaseInstrumentor): diff --git a/libraries/microsoft-agents-a365-observability-extensions-langchain/microsoft_agents_a365/observability/extensions/langchain/message_mapper.py b/libraries/microsoft-agents-a365-observability-extensions-langchain/microsoft_agents_a365/observability/extensions/langchain/message_mapper.py new file mode 100644 index 00000000..0c9f51ee --- /dev/null +++ b/libraries/microsoft-agents-a365-observability-extensions-langchain/microsoft_agents_a365/observability/extensions/langchain/message_mapper.py @@ -0,0 +1,253 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Maps LangChain messages to A365 versioned message format. + +LangChain provides ``BaseMessage`` objects (``HumanMessage``, ``AIMessage``, +``SystemMessage``, ``ToolMessage``) in ``run.inputs["messages"]`` and +``run.outputs["generations"]``. This mapper converts them to the A365 +versioned format (``InputMessages`` / ``OutputMessages``). +""" + +from __future__ import annotations + +import json +import logging +from collections.abc import Iterable, Mapping +from typing import Any + +from langchain_core.messages import BaseMessage + +from microsoft_agents_a365.observability.core.message_utils import serialize_messages +from microsoft_agents_a365.observability.core.models.messages import ( + ChatMessage, + InputMessages, + MessagePart, + MessageRole, + OutputMessage, + OutputMessages, + TextPart, + ToolCallRequestPart, + ToolCallResponsePart, +) + +logger = logging.getLogger(__name__) + +_ROLE_MAP: dict[str, MessageRole] = { + "human": MessageRole.USER, + "user": MessageRole.USER, + "ai": MessageRole.ASSISTANT, + "assistant": MessageRole.ASSISTANT, + "system": MessageRole.SYSTEM, + "tool": MessageRole.TOOL, +} + + +def map_input_messages(inputs: Mapping[str, Any] | None) -> str | None: + """Map LangChain input messages to a serialized A365 InputMessages JSON string. + + Args: + inputs: The ``run.inputs`` mapping from a LangChain run. + + Returns: + Serialized InputMessages JSON string, or None if no messages found. + """ + if not inputs or not isinstance(inputs, Mapping): + return None + + multiple_messages = inputs.get("messages") + if not multiple_messages or not isinstance(multiple_messages, Iterable): + return None + + first_messages = next(iter(multiple_messages), None) + if not first_messages: + return None + + # Normalize to a list + if isinstance(first_messages, BaseMessage): + first_messages = [first_messages] + elif not isinstance(first_messages, list): + return None + + chat_messages: list[ChatMessage] = [] + for msg in first_messages: + mapped = _map_base_message(msg) + if mapped is not None: + chat_messages.append(mapped) + + if not chat_messages: + return None + + return serialize_messages(InputMessages(messages=chat_messages)) + + +def map_output_messages(outputs: Mapping[str, Any] | None) -> str | None: + """Map LangChain output messages to a serialized A365 OutputMessages JSON string. + + Args: + outputs: The ``run.outputs`` mapping from a LangChain run. + + Returns: + Serialized OutputMessages JSON string, or None if no messages found. + """ + if not outputs or not isinstance(outputs, Mapping): + return None + + multiple_generations = outputs.get("generations") + if not multiple_generations or not isinstance(multiple_generations, Iterable): + return None + + first_generations = next(iter(multiple_generations), None) + if not first_generations or not isinstance(first_generations, Iterable): + return None + + output_messages: list[OutputMessage] = [] + for generation in first_generations: + if not isinstance(generation, Mapping): + continue + message_data = generation.get("message") + if message_data is None: + continue + + mapped = _map_to_output_message(message_data, generation) + if mapped is not None: + output_messages.append(mapped) + + if not output_messages: + return None + + return serialize_messages(OutputMessages(messages=output_messages)) + + +# --------------------------------------------------------------------------- +# Internal helpers +# --------------------------------------------------------------------------- + + +def _map_role( + msg: BaseMessage | Mapping[str, Any], default: MessageRole = MessageRole.USER +) -> MessageRole: + """Extract the role from a LangChain message.""" + if isinstance(msg, BaseMessage): + role_str = msg.type + elif isinstance(msg, Mapping): + # Direct type field (e.g. "human", "ai", "system", "tool") + role_str = msg.get("type", "") + # LC serialization uses "constructor" as type with role in kwargs + if role_str == "constructor": + kwargs = msg.get("kwargs", {}) + role_str = kwargs.get("type", "") if isinstance(kwargs, Mapping) else "" + # Also check "role" field + if not role_str or role_str not in _ROLE_MAP: + role_str = msg.get("role", role_str) + else: + role_str = "" + return _ROLE_MAP.get(role_str.lower(), default) + + +def _extract_parts(msg: BaseMessage | Mapping[str, Any]) -> list[MessagePart]: + """Extract message parts from a LangChain message.""" + parts: list[MessagePart] = [] + + # Extract content and tool_calls + if isinstance(msg, BaseMessage): + content = msg.content + tool_calls = getattr(msg, "tool_calls", None) + msg_type = msg.type + tool_call_id = getattr(msg, "tool_call_id", None) + elif isinstance(msg, Mapping): + # Handle LC serialization: {"type": "constructor", "kwargs": {content, type, ...}} + kwargs = msg.get("kwargs", {}) if msg.get("type") == "constructor" else msg + if not isinstance(kwargs, Mapping): + kwargs = msg + content = kwargs.get("content", "") or msg.get("content", "") + tool_calls = kwargs.get("tool_calls") or msg.get("tool_calls") + msg_type = kwargs.get("type", "") or msg.get("type", "") + tool_call_id = kwargs.get("tool_call_id") or msg.get("tool_call_id") + else: + return parts + + # Tool response (from ToolMessage) — handle before text to avoid double-counting + if msg_type == "tool": + response = content if isinstance(content, str) else str(content) if content else "" + if response or tool_call_id: + parts.append(ToolCallResponsePart(id=tool_call_id, response=response)) + return parts + + # Text content + if content and isinstance(content, str) and content.strip(): + parts.append(TextPart(content=content)) + + # Tool calls (from AIMessage.tool_calls) + if tool_calls and isinstance(tool_calls, list): + for tc in tool_calls: + if not isinstance(tc, Mapping): + continue + name = tc.get("name") + if not name: + continue + parts.append( + ToolCallRequestPart( + name=name, + id=tc.get("id"), + arguments=_parse_tool_call_args(name, tc.get("args")), + ) + ) + + return parts + + +def _map_base_message(msg: BaseMessage | Mapping[str, Any]) -> ChatMessage | None: + """Map a single LangChain message to an A365 ChatMessage.""" + role = _map_role(msg) + parts = _extract_parts(msg) + if not parts: + return None + + name = None + if isinstance(msg, BaseMessage): + name = getattr(msg, "name", None) + + return ChatMessage(role=role, parts=parts, name=name) + + +def _map_to_output_message( + message_data: BaseMessage | Mapping[str, Any], + generation: Mapping[str, Any], +) -> OutputMessage | None: + """Map a LangChain generation to an A365 OutputMessage.""" + role = _map_role(message_data, default=MessageRole.ASSISTANT) + parts = _extract_parts(message_data) + if not parts: + return None + + # Extract finish_reason from generation metadata + finish_reason = None + gen_info = generation.get("generation_info") + if isinstance(gen_info, Mapping): + finish_reason = gen_info.get("finish_reason") + + return OutputMessage(role=role, parts=parts, finish_reason=finish_reason) + + +def _parse_tool_call_args( + name: str, + args: dict[str, object] | list[object] | str | None, +) -> dict[str, object] | list[object] | str | None: + """Return structured tool-call arguments when possible. + + ``dict`` and ``list`` values are returned as-is. A ``str`` is attempted + as JSON; if parsing succeeds and yields a ``dict`` or ``list``, the parsed + value is returned. Otherwise the original string is kept so no information + is lost. + """ + if args is None or isinstance(args, (dict, list)): + return args + if isinstance(args, str): + try: + decoded = json.loads(args) + return decoded if isinstance(decoded, (dict, list)) else args + except (json.JSONDecodeError, ValueError): + logger.debug("Failed to parse tool call args for '%s': %s", name, args) + return args + return args # pragma: no cover — unexpected type, pass through unchanged diff --git a/libraries/microsoft-agents-a365-observability-extensions-langchain/microsoft_agents_a365/observability/extensions/langchain/utils.py b/libraries/microsoft-agents-a365-observability-extensions-langchain/microsoft_agents_a365/observability/extensions/langchain/utils.py index 256496ad..6114dba7 100644 --- a/libraries/microsoft-agents-a365-observability-extensions-langchain/microsoft_agents_a365/observability/extensions/langchain/utils.py +++ b/libraries/microsoft-agents-a365-observability-extensions-langchain/microsoft_agents_a365/observability/extensions/langchain/utils.py @@ -2,7 +2,7 @@ # Licensed under the MIT License. import json -from collections.abc import Iterable, Iterator, Mapping, Sequence +from collections.abc import Iterable, Iterator, Mapping from copy import deepcopy from typing import Any @@ -36,6 +36,9 @@ stop_on_exception, ) +from .message_mapper import map_input_messages as _map_input +from .message_mapper import map_output_messages as _map_output + IGNORED_EXCEPTION_PATTERNS = [ r"^Command\(", r"^ParentCommand\(", @@ -207,42 +210,13 @@ def _parse_message_data(message_data: Mapping[str, Any] | None) -> Iterator[tupl def input_messages( inputs: Mapping[str, Any] | None, ) -> Iterator[tuple[str, str]]: - """Yields chat messages as a JSON array of content strings.""" + """Yields input messages in A365 versioned format.""" if not inputs: return - if not isinstance(inputs, Mapping): - return - # There may be more than one set of messages. We'll use just the first set. - if not (multiple_messages := inputs.get("messages")): - return - if not isinstance(multiple_messages, Iterable): - return - # This will only get the first set of messages. - if not (first_messages := next(iter(multiple_messages), None)): - return - contents: list[str] = [] - if isinstance(first_messages, list): - for message_data in first_messages: - if isinstance(message_data, BaseMessage): - if hasattr(message_data, "content") and message_data.content: - contents.append(str(message_data.content)) - elif hasattr(message_data, "get"): - if content := message_data.get("content"): - contents.append(str(content)) - elif kwargs := message_data.get("kwargs"): - if hasattr(kwargs, "get") and (content := kwargs.get("content")): - contents.append(str(content)) - elif isinstance(first_messages, BaseMessage): - if hasattr(first_messages, "content") and first_messages.content: - contents.append(str(first_messages.content)) - elif hasattr(first_messages, "get"): - if content := first_messages.get("content"): - contents.append(str(content)) - elif isinstance(first_messages, Sequence) and len(first_messages) == 2: - role, content = first_messages - contents.append(str(content)) - if contents: - yield GEN_AI_INPUT_MESSAGES_KEY, safe_json_dumps(contents) + + mapped = _map_input(inputs) + if mapped is not None: + yield GEN_AI_INPUT_MESSAGES_KEY, mapped @stop_on_exception @@ -266,11 +240,12 @@ def metadata(run: Run) -> Iterator[tuple[str, str]]: def output_messages( outputs: Mapping[str, Any] | None, ) -> Iterator[tuple[str, str]]: - """Yields chat messages as a JSON array of content strings.""" + """Yields output messages in A365 versioned format.""" if not outputs: return if not isinstance(outputs, Mapping): return + # Preserve response ID extraction output_type = outputs.get("type") if output_type and output_type.lower() == "llmresult": llm_output = outputs.get("llm_output") @@ -278,32 +253,10 @@ def output_messages( response_id = llm_output.get("id") if response_id: yield GEN_AI_RESPONSE_ID_KEY, response_id - # There may be more than one set of generations. We'll use just the first set. - if not (multiple_generations := outputs.get("generations")): - return - if not isinstance(multiple_generations, Iterable): - return - # This will only get the first set of generations. - if not (first_generations := next(iter(multiple_generations), None)): - return - if not isinstance(first_generations, Iterable): - return - contents: list[str] = [] - for generation in first_generations: - if not isinstance(generation, Mapping): - continue - if message_data := generation.get("message"): - if isinstance(message_data, BaseMessage): - if hasattr(message_data, "content") and message_data.content: - contents.append(str(message_data.content)) - elif hasattr(message_data, "get"): - if content := message_data.get("content"): - contents.append(str(content)) - elif kwargs := message_data.get("kwargs"): - if hasattr(kwargs, "get") and (content := kwargs.get("content")): - contents.append(str(content)) - if contents: - yield GEN_AI_OUTPUT_MESSAGES_KEY, safe_json_dumps(contents) + + mapped = _map_output(outputs) + if mapped is not None: + yield GEN_AI_OUTPUT_MESSAGES_KEY, mapped @stop_on_exception diff --git a/pyproject.toml b/pyproject.toml index a90176b5..aa9866ab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ dev-dependencies = [ "agent-framework", "azure-identity", "openai-agents", + "langchain-openai", ] # Override semantic-kernel's azure-ai-projects constraint to allow 2.x diff --git a/tests/observability/extensions/agentframework/integration/test_message_format.py b/tests/observability/extensions/agentframework/integration/test_message_format.py new file mode 100644 index 00000000..ff260a0f --- /dev/null +++ b/tests/observability/extensions/agentframework/integration/test_message_format.py @@ -0,0 +1,236 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Integration tests for AgentFramework message format mapping. + +These tests use the real A365 observability pipeline: + configure() → get_tracer_provider() → AgentFrameworkInstrumentor +with a SpanCapturingExporter inside _EnrichingBatchSpanProcessor, so spans +are captured after the enricher has run. This exercises the full code path: + auto-instrumentation → enricher → mapper → serialize → export. +""" + +import json +import time +from typing import Any + +import pytest +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult + +# AgentFramework SDK +try: + from agent_framework import RawAgent, tool + from agent_framework.azure import AzureOpenAIChatClient + from agent_framework.observability import enable_instrumentation + from azure.identity import AzureCliCredential +except ImportError: + pytest.skip( + "AgentFramework library and dependencies required for integration tests", + allow_module_level=True, + ) + +from microsoft_agents_a365.observability.core import configure, get_tracer_provider +from microsoft_agents_a365.observability.core.constants import ( + GEN_AI_INPUT_MESSAGES_KEY, + GEN_AI_OPERATION_NAME_KEY, + GEN_AI_OUTPUT_MESSAGES_KEY, +) +from microsoft_agents_a365.observability.core.exporters.enriching_span_processor import ( + _EnrichingBatchSpanProcessor, +) +from microsoft_agents_a365.observability.extensions.agentframework import ( + AgentFrameworkInstrumentor, +) + + +@tool +def get_weather(city: str) -> str: + """Get the current weather for a city. + + Args: + city: The city name to get weather for. + + Returns: + A string describing the weather. + """ + return f"The weather in {city} is sunny, 22°C." + + +class SpanCapturingExporter(SpanExporter): + """Exporter that collects enriched spans in-memory. + + When used inside _EnrichingBatchSpanProcessor, spans arrive here + after the registered enricher has already run. + """ + + def __init__(self) -> None: + self.spans: list[ReadableSpan] = [] + + def export(self, spans: list[ReadableSpan]) -> SpanExportResult: + self.spans.extend(spans) + return SpanExportResult.SUCCESS + + def shutdown(self) -> None: + pass + + def force_flush(self, timeout_millis: int = 30000) -> bool: + return True + + +@pytest.mark.integration +class TestAgentFrameworkMessageFormat: + """Capture real AgentFramework span attributes after enrichment + and verify the A365 versioned message format.""" + + @pytest.fixture(autouse=True) + def setup_observability(self) -> None: + """Set up A365 observability with AgentFrameworkInstrumentor. + + A SpanCapturingExporter is attached via _EnrichingBatchSpanProcessor + so that spans are captured after the enricher has run. + """ + if not hasattr(TestAgentFrameworkMessageFormat, "_exporter"): + configure( + service_name="integration-test-message-format", + service_namespace="agent365-tests", + logger_name="test-logger", + ) + + exporter = SpanCapturingExporter() + provider = get_tracer_provider() + provider.add_span_processor( + _EnrichingBatchSpanProcessor( + exporter, + max_queue_size=100, + schedule_delay_millis=100, + max_export_batch_size=100, + ) + ) + + enable_instrumentation(enable_sensitive_data=True) + + instrumentor = AgentFrameworkInstrumentor() + instrumentor.instrument() + + TestAgentFrameworkMessageFormat._exporter = exporter + TestAgentFrameworkMessageFormat._instrumentor = instrumentor + + self.exporter = TestAgentFrameworkMessageFormat._exporter + self.exporter.spans.clear() + + @pytest.fixture + def chat_client(self, azure_openai_config: dict[str, Any]) -> AzureOpenAIChatClient: + """Create a real Azure OpenAI chat client.""" + return AzureOpenAIChatClient( + endpoint=azure_openai_config["endpoint"], + credential=AzureCliCredential(), + deployment_name=azure_openai_config["deployment"], + api_version=azure_openai_config["api_version"], + ) + + def _find_chat_spans(self) -> list[ReadableSpan]: + """Find exported spans that have gen_ai.input.messages. + + Forces a flush so batched spans are exported before inspection. + """ + get_tracer_provider().force_flush() + time.sleep(0.5) + return [ + s + for s in self.exporter.spans + if s.attributes and GEN_AI_INPUT_MESSAGES_KEY in s.attributes + ] + + @pytest.mark.asyncio + async def test_simple_chat_message_mapping(self, chat_client: AzureOpenAIChatClient) -> None: + """Simple chat: verify exported spans contain versioned A365 messages + after enrichment (no manual mapper call).""" + agent = RawAgent( + client=chat_client, + instructions="You are a helpful assistant. Reply in one sentence.", + tools=[], + ) + + result = await agent.run("What is the capital of France?") + assert result is not None + assert len(result.text) > 0 + + chat_spans = self._find_chat_spans() + assert len(chat_spans) > 0, ( + f"No chat spans found. All spans: {[s.name for s in self.exporter.spans]}" + ) + + attrs = dict(chat_spans[-1].attributes or {}) + + # --- Input messages: enriched to versioned format --- + input_data = json.loads(attrs[GEN_AI_INPUT_MESSAGES_KEY]) + # Enricher should have produced versioned wrapper for chat spans + if isinstance(input_data, dict): + assert input_data["version"] == "0.1.0" + messages = input_data["messages"] + else: + messages = input_data + + roles = [m["role"] for m in messages] + assert "system" in roles + assert "user" in roles + for msg in messages: + for part in msg["parts"]: + assert "type" in part + + # --- Output messages: enriched to versioned format --- + output_data = json.loads(attrs[GEN_AI_OUTPUT_MESSAGES_KEY]) + if isinstance(output_data, dict): + assert output_data["version"] == "0.1.0" + out_messages = output_data["messages"] + else: + out_messages = output_data + + assert out_messages[0]["role"] == "assistant" + assert any(p["type"] == "text" for p in out_messages[0]["parts"]) + + print(f"\n=== Enriched input ===\n{json.dumps(input_data, indent=2)}") + print(f"\n=== Enriched output ===\n{json.dumps(output_data, indent=2)}") + + @pytest.mark.asyncio + async def test_tool_call_message_mapping(self, chat_client: AzureOpenAIChatClient) -> None: + """Tool-calling chat: verify tool_call and tool_call_response parts + survive enrichment in exported spans.""" + agent = RawAgent( + client=chat_client, + instructions="You are a weather assistant. Always use the get_weather function.", + tools=[get_weather], + ) + + result = await agent.run("What's the weather in Seattle?") + assert result is not None + assert len(result.text) > 0 + + chat_spans = self._find_chat_spans() + assert len(chat_spans) > 0 + + print(f"\n=== All exported spans ({len(self.exporter.spans)}) ===") + for s in self.exporter.spans: + op = (s.attributes or {}).get(GEN_AI_OPERATION_NAME_KEY, "(none)") + print(f" {s.name} | op={op}") + + # Collect part types from exported (enriched) spans + part_types: set[str] = set() + for span in chat_spans: + attrs = dict(span.attributes or {}) + for key in (GEN_AI_INPUT_MESSAGES_KEY, GEN_AI_OUTPUT_MESSAGES_KEY): + raw = attrs.get(key) + if not raw: + continue + data = json.loads(raw) + messages = data["messages"] if isinstance(data, dict) else data + for msg in messages: + for part in msg.get("parts", []): + part_types.add(part.get("type", "")) + + assert "tool_call" in part_types, f"Expected tool_call in exported parts: {part_types}" + assert "tool_call_response" in part_types, ( + f"Expected tool_call_response in exported parts: {part_types}" + ) + print(f"\n Exported part types: {part_types}") diff --git a/tests/observability/extensions/agentframework/integration/test_observability_pipeline.py b/tests/observability/extensions/agentframework/integration/test_observability_pipeline.py new file mode 100644 index 00000000..b5df805d --- /dev/null +++ b/tests/observability/extensions/agentframework/integration/test_observability_pipeline.py @@ -0,0 +1,421 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""End-to-end pipeline integration tests for AgentFramework observability. + +These tests verify the full A365 observability pipeline: + InvokeAgentScope → InferenceScope (auto-instrumented) → ExecuteToolScope (auto-instrumented) + +All three scope types must appear in a single trace with correct parent-child +relationships and A365 message format attributes. +""" + +import json +import os +import time + +import pytest +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult + +try: + from agent_framework import RawAgent + from agent_framework import tool as ai_function + from agent_framework.azure import AzureOpenAIChatClient + from agent_framework.observability import enable_instrumentation + from azure.identity import AzureCliCredential +except ImportError: + pytest.skip( + "AgentFramework library and dependencies required for integration tests", + allow_module_level=True, + ) + +from microsoft_agents_a365.observability.core import configure, get_tracer_provider +from microsoft_agents_a365.observability.core.agent_details import AgentDetails +from microsoft_agents_a365.observability.core.constants import ( + EXECUTE_TOOL_OPERATION_NAME, + GEN_AI_INPUT_MESSAGES_KEY, + GEN_AI_OPERATION_NAME_KEY, + GEN_AI_OUTPUT_MESSAGES_KEY, + GEN_AI_REQUEST_MODEL_KEY, + GEN_AI_TOOL_NAME_KEY, + INVOKE_AGENT_OPERATION_NAME, + TENANT_ID_KEY, +) +from microsoft_agents_a365.observability.core.exporters.enriching_span_processor import ( + _EnrichingBatchSpanProcessor, +) +from microsoft_agents_a365.observability.core.invoke_agent_details import InvokeAgentScopeDetails +from microsoft_agents_a365.observability.core.invoke_agent_scope import InvokeAgentScope +from microsoft_agents_a365.observability.core.request import Request +from microsoft_agents_a365.observability.extensions.agentframework.trace_instrumentor import ( + AgentFrameworkInstrumentor, +) + +# Ensure A365 observability scopes are active for tests +os.environ["ENABLE_A365_OBSERVABILITY"] = "true" + + +@ai_function +def add_numbers(a: float, b: float) -> float: + """Add two numbers together. + + Args: + a: First number + b: Second number + + Returns: + The sum of a and b + """ + return a + b + + +class SpanCapturingExporter(SpanExporter): + """Exporter that collects spans in-memory after enrichment.""" + + def __init__(self) -> None: + self.spans: list[ReadableSpan] = [] + + def export(self, spans: list[ReadableSpan]) -> SpanExportResult: + self.spans.extend(spans) + return SpanExportResult.SUCCESS + + def shutdown(self) -> None: + pass + + def force_flush(self, timeout_millis: int = 30000) -> bool: + return True + + +def _get_span_attr(span: ReadableSpan, key: str) -> str | None: + """Safely get an attribute from a span.""" + attrs = span.attributes or {} + return attrs.get(key) + + +def _find_spans_by_operation(spans: list[ReadableSpan], operation_name: str) -> list[ReadableSpan]: + """Find spans matching a given gen_ai.operation.name.""" + return [s for s in spans if _get_span_attr(s, GEN_AI_OPERATION_NAME_KEY) == operation_name] + + +def _find_spans_by_name_prefix(spans: list[ReadableSpan], prefix: str) -> list[ReadableSpan]: + """Find spans whose name starts with a given prefix.""" + return [s for s in spans if s.name.startswith(prefix)] + + +@pytest.mark.integration +class TestAgentFrameworkObservabilityPipeline: + """End-to-end pipeline tests: InvokeAgent → Inference → ToolExecution. + + Verifies that wrapping an AgentFramework call inside InvokeAgentScope + produces a single trace with correct parent-child span hierarchy, + operation names, and A365 message format attributes. + """ + + @pytest.fixture(autouse=True) + def setup_observability(self) -> None: + """Set up A365 observability with AgentFrameworkInstrumentor.""" + if not hasattr(TestAgentFrameworkObservabilityPipeline, "_exporter"): + configure( + service_name="integration-test-pipeline", + service_namespace="agent365-tests", + logger_name="test-logger", + ) + + exporter = SpanCapturingExporter() + provider = get_tracer_provider() + provider.add_span_processor( + _EnrichingBatchSpanProcessor( + exporter, + max_queue_size=100, + schedule_delay_millis=100, + max_export_batch_size=100, + ) + ) + + enable_instrumentation(enable_sensitive_data=True) + + instrumentor = AgentFrameworkInstrumentor() + instrumentor.instrument() + + TestAgentFrameworkObservabilityPipeline._exporter = exporter + TestAgentFrameworkObservabilityPipeline._instrumentor = instrumentor + + self.exporter = TestAgentFrameworkObservabilityPipeline._exporter + self.exporter.spans.clear() + + @pytest.fixture + def chat_client(self, azure_openai_config: dict) -> AzureOpenAIChatClient: + """Create a real Azure OpenAI chat client.""" + return AzureOpenAIChatClient( + endpoint=azure_openai_config["endpoint"], + credential=AzureCliCredential(), + deployment_name=azure_openai_config["deployment"], + api_version=azure_openai_config["api_version"], + ) + + @pytest.fixture + def agent_details(self, agent365_config: dict) -> AgentDetails: + """Create AgentDetails for the test agent.""" + return AgentDetails( + agent_id=agent365_config["agent_id"], + agent_name="pipeline-test-agent", + agent_description="Integration test agent for pipeline verification", + tenant_id=agent365_config["tenant_id"], + ) + + def _flush_and_collect(self) -> list[ReadableSpan]: + """Force flush and return all captured spans.""" + get_tracer_provider().force_flush() + time.sleep(0.5) + return list(self.exporter.spans) + + # ------------------------------------------------------------------ + # Test: Full pipeline with tool call + # ------------------------------------------------------------------ + + @pytest.mark.asyncio + async def test_pipeline_invoke_agent_with_tool_call( + self, + chat_client: AzureOpenAIChatClient, + agent_details: AgentDetails, + agent365_config: dict, + ) -> None: + """Full pipeline: InvokeAgentScope wraps AgentFramework with tool. + + Verifies: + 1. All spans share the same trace_id + 2. invoke_agent span is the root (no parent) + 3. Inference (chat) spans are descendants of invoke_agent + 4. Tool execution spans are descendants of invoke_agent + 5. A365 message format on chat spans (versioned JSON) + 6. Correct operation names and key attributes + """ + request = Request(content="What is 15 + 27?", session_id="test-session-pipeline") + + agent = RawAgent( + client=chat_client, + instructions=( + "You are a math assistant. You MUST use the add_numbers function " + "for any arithmetic. Never compute in your head." + ), + tools=[add_numbers], + ) + + with InvokeAgentScope.start( + request=request, + scope_details=InvokeAgentScopeDetails(), + agent_details=agent_details, + ): + result = await agent.run("What is 15 + 27?") + + assert result is not None + assert len(result.text) > 0 + assert "42" in result.text, f"Expected '42' in response: {result.text}" + + spans = self._flush_and_collect() + assert len(spans) > 0, "No spans were captured" + + # --- Print span tree for debugging --- + print(f"\n=== Captured {len(spans)} spans ===") + for s in spans: + op = _get_span_attr(s, GEN_AI_OPERATION_NAME_KEY) or "(none)" + parent_id = f"{s.parent.span_id:016x}" if s.parent else "None" + print( + f" {s.name} | op={op} | trace={s.context.trace_id:032x} " + f"| span={s.context.span_id:016x} | parent={parent_id}" + ) + + # --- 1. All spans share the same trace_id --- + invoke_spans = _find_spans_by_name_prefix(spans, "invoke_agent") + assert len(invoke_spans) >= 1, ( + f"Expected at least 1 invoke_agent span, got: {[s.name for s in spans]}" + ) + invoke_span = invoke_spans[0] + trace_id = invoke_span.context.trace_id + + for s in spans: + assert s.context.trace_id == trace_id, ( + f"Span '{s.name}' has different trace_id: " + f"{s.context.trace_id:032x} vs {trace_id:032x}" + ) + + # --- 2. invoke_agent span is the root (no parent) --- + assert invoke_span.parent is None, ( + f"invoke_agent span should be root but has parent: {invoke_span.parent.span_id:016x}" + ) + + # --- 3. invoke_agent has correct operation name --- + assert _get_span_attr(invoke_span, GEN_AI_OPERATION_NAME_KEY) == INVOKE_AGENT_OPERATION_NAME + + # --- 4. Tenant ID is set --- + assert _get_span_attr(invoke_span, TENANT_ID_KEY) == agent365_config["tenant_id"] + + # --- 5. Chat (inference) spans are descendants of invoke_agent --- + chat_spans = [ + s + for s in spans + if _get_span_attr(s, GEN_AI_OPERATION_NAME_KEY) == "chat" + or (s.name.startswith("chat") and _get_span_attr(s, GEN_AI_REQUEST_MODEL_KEY)) + ] + assert len(chat_spans) >= 1, ( + f"Expected at least 1 chat span, got: {[s.name for s in spans]}" + ) + + invoke_span_id = invoke_span.context.span_id + for chat_span in chat_spans: + assert chat_span.parent is not None, ( + f"Chat span '{chat_span.name}' should have a parent" + ) + # Chat span should be a child of invoke_agent (directly or transitively) + self._assert_ancestor( + chat_span, + invoke_span_id, + spans, + f"Chat span '{chat_span.name}' is not a descendant of invoke_agent", + ) + + # --- 6. Tool execution spans are descendants of invoke_agent --- + tool_spans = _find_spans_by_name_prefix(spans, "execute_tool") + if not tool_spans: + # Also check by operation name + tool_spans = _find_spans_by_operation(spans, EXECUTE_TOOL_OPERATION_NAME) + + assert len(tool_spans) >= 1, ( + f"Expected at least 1 execute_tool span. All spans: {[s.name for s in spans]}" + ) + for tool_span in tool_spans: + assert tool_span.parent is not None, ( + f"Tool span '{tool_span.name}' should have a parent" + ) + self._assert_ancestor( + tool_span, + invoke_span_id, + spans, + f"Tool span '{tool_span.name}' is not a descendant of invoke_agent", + ) + + # --- 7. A365 message format on chat spans --- + for chat_span in chat_spans: + attrs = dict(chat_span.attributes or {}) + if GEN_AI_INPUT_MESSAGES_KEY in attrs: + input_data = json.loads(attrs[GEN_AI_INPUT_MESSAGES_KEY]) + if isinstance(input_data, dict) and "version" in input_data: + assert input_data["version"] == "0.1.0" + for msg in input_data["messages"]: + assert "role" in msg + assert "parts" in msg + + if GEN_AI_OUTPUT_MESSAGES_KEY in attrs: + output_data = json.loads(attrs[GEN_AI_OUTPUT_MESSAGES_KEY]) + if isinstance(output_data, dict) and "version" in output_data: + assert output_data["version"] == "0.1.0" + for msg in output_data["messages"]: + assert "role" in msg + assert "parts" in msg + + # --- 8. Tool spans have tool-specific attributes --- + for tool_span in tool_spans: + attrs = dict(tool_span.attributes or {}) + op = attrs.get(GEN_AI_OPERATION_NAME_KEY, "") + if op == EXECUTE_TOOL_OPERATION_NAME or tool_span.name.startswith("execute_tool"): + assert GEN_AI_TOOL_NAME_KEY in attrs or "add_numbers" in tool_span.name, ( + f"Tool span missing tool name attribute: {list(attrs.keys())}" + ) + + print("\n✓ All pipeline assertions passed") + + # ------------------------------------------------------------------ + # Test: Pipeline without tools (simple inference only) + # ------------------------------------------------------------------ + + @pytest.mark.asyncio + async def test_pipeline_invoke_agent_simple_inference( + self, + chat_client: AzureOpenAIChatClient, + agent_details: AgentDetails, + agent365_config: dict, + ) -> None: + """Pipeline with InvokeAgentScope + simple inference (no tools). + + Verifies invoke_agent → chat span hierarchy without tool calls. + """ + request = Request(content="Say hello", session_id="test-session-simple") + + agent = RawAgent( + client=chat_client, + instructions="You are a helpful assistant. Reply in one sentence.", + tools=[], + ) + + with InvokeAgentScope.start( + request=request, + scope_details=InvokeAgentScopeDetails(), + agent_details=agent_details, + ): + result = await agent.run("Say hello in exactly 5 words.") + + assert result is not None + assert len(result.text) > 0 + + spans = self._flush_and_collect() + assert len(spans) > 0, "No spans were captured" + + # All spans share the same trace_id + invoke_spans = _find_spans_by_name_prefix(spans, "invoke_agent") + assert len(invoke_spans) >= 1 + invoke_span = invoke_spans[0] + trace_id = invoke_span.context.trace_id + + for s in spans: + assert s.context.trace_id == trace_id + + # invoke_agent is root + assert invoke_span.parent is None + + # Chat spans are descendants + chat_spans = [ + s + for s in spans + if _get_span_attr(s, GEN_AI_OPERATION_NAME_KEY) == "chat" + or (s.name.startswith("chat") and _get_span_attr(s, GEN_AI_REQUEST_MODEL_KEY)) + ] + assert len(chat_spans) >= 1 + + invoke_span_id = invoke_span.context.span_id + for chat_span in chat_spans: + self._assert_ancestor( + chat_span, + invoke_span_id, + spans, + f"Chat span '{chat_span.name}' not a descendant of invoke_agent", + ) + + print(f"\n✓ Simple pipeline: {len(spans)} spans, hierarchy verified") + + # ------------------------------------------------------------------ + # Helper: assert ancestor relationship + # ------------------------------------------------------------------ + + def _assert_ancestor( + self, + span: ReadableSpan, + ancestor_span_id: int, + all_spans: list[ReadableSpan], + message: str, + ) -> None: + """Walk up the parent chain and assert that ancestor_span_id is found.""" + span_map = {s.context.span_id: s for s in all_spans} + current = span + visited: set[int] = set() + while current.parent is not None: + parent_id = current.parent.span_id + if parent_id == ancestor_span_id: + return + if parent_id in visited: + break + visited.add(parent_id) + current = span_map.get(parent_id) + if current is None: + break + raise AssertionError(message) diff --git a/tests/observability/extensions/agentframework/test_span_enricher.py b/tests/observability/extensions/agentframework/test_span_enricher.py index 49ee706f..950359e7 100644 --- a/tests/observability/extensions/agentframework/test_span_enricher.py +++ b/tests/observability/extensions/agentframework/test_span_enricher.py @@ -3,6 +3,7 @@ """Tests for Agent Framework span enricher.""" +import json import unittest from unittest.mock import Mock @@ -23,7 +24,7 @@ class TestAgentFrameworkSpanEnricher(unittest.TestCase): """Test suite for enrich_agent_framework_span function.""" def test_invoke_agent_span_enrichment(self): - """Test invoke_agent span extracts user input and assistant output text only.""" + """Test invoke_agent span maps messages to A365 versioned format.""" span = Mock( name="invoke_agent Agent365Assistant", attributes={ @@ -33,8 +34,24 @@ def test_invoke_agent_span_enrichment(self): ) span.name = "invoke_agent Agent365Assistant" result = enrich_agent_framework_span(span) - self.assertEqual(result.attributes[GEN_AI_INPUT_MESSAGES_KEY], '["Compute 15 % 4"]') - self.assertEqual(result.attributes[GEN_AI_OUTPUT_MESSAGES_KEY], '["Result is 3."]') + + # Input should be versioned format with user message + input_json = json.loads(result.attributes[GEN_AI_INPUT_MESSAGES_KEY]) + self.assertEqual(input_json["version"], "0.1.0") + self.assertEqual(len(input_json["messages"]), 1) + self.assertEqual(input_json["messages"][0]["role"], "user") + self.assertEqual(input_json["messages"][0]["parts"][0]["content"], "Compute 15 % 4") + + # Output should be versioned format: tool_call (no name -> filtered) + tool response + text + output_json = json.loads(result.attributes[GEN_AI_OUTPUT_MESSAGES_KEY]) + self.assertEqual(output_json["version"], "0.1.0") + # tool_call with no name is filtered, tool_call_response with no id/response passes, + # assistant text passes + assistant_msgs = [m for m in output_json["messages"] if m["role"] == "assistant"] + self.assertTrue(len(assistant_msgs) >= 1) + # At least one assistant message should have a text part + text_parts = [p for m in assistant_msgs for p in m["parts"] if p.get("type") == "text"] + self.assertEqual(text_parts[0]["content"], "Result is 3.") def test_execute_tool_span_enrichment(self): """Test execute_tool span maps tool arguments and result to standard keys.""" diff --git a/tests/observability/extensions/langchain/integration/__init__.py b/tests/observability/extensions/langchain/integration/__init__.py new file mode 100644 index 00000000..59e481eb --- /dev/null +++ b/tests/observability/extensions/langchain/integration/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. diff --git a/tests/observability/extensions/langchain/integration/conftest.py b/tests/observability/extensions/langchain/integration/conftest.py new file mode 100644 index 00000000..1df58995 --- /dev/null +++ b/tests/observability/extensions/langchain/integration/conftest.py @@ -0,0 +1,57 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Fixtures for LangChain observability integration tests.""" + +import os +from pathlib import Path +from typing import Any + +import pytest + +try: + from dotenv import load_dotenv + + current_file = Path(__file__) + tests_dir = current_file.parent.parent.parent.parent.parent + env_file = tests_dir / ".env" + if env_file.exists(): + load_dotenv(env_file) +except ImportError: + pass + + +def pytest_configure(config: pytest.Config) -> None: + """Add integration marker.""" + config.addinivalue_line("markers", "integration: marks tests as integration tests") + + +@pytest.fixture(scope="session") +def azure_openai_config() -> dict[str, Any]: + """Azure OpenAI configuration for integration tests.""" + api_key = os.getenv("AZURE_OPENAI_API_KEY") + endpoint = os.getenv("AZURE_OPENAI_ENDPOINT") + deployment = os.getenv("AZURE_OPENAI_DEPLOYMENT", "gpt-4o-mini") + api_version = os.getenv("AZURE_OPENAI_API_VERSION", "2024-08-01-preview") + + if not api_key or not endpoint: + pytest.skip("Integration tests require AZURE_OPENAI_API_KEY and AZURE_OPENAI_ENDPOINT") + + return { + "api_key": api_key, + "endpoint": endpoint, + "deployment": deployment, + "api_version": api_version, + } + + +@pytest.fixture(scope="session") +def agent365_config() -> dict[str, Any]: + """Microsoft Agent 365 configuration for integration tests.""" + tenant_id = os.getenv("AGENT365_TEST_TENANT_ID", "4d44f041-f91e-4d00-b107-61e47b26f5a8") + agent_id = os.getenv("AGENT365_TEST_AGENT_ID", "3bccd52b-daaa-4b11-af40-47443852137c") + + if not tenant_id: + pytest.skip("Integration tests require AGENT365_TEST_TENANT_ID") + + return {"tenant_id": tenant_id, "agent_id": agent_id} diff --git a/tests/observability/extensions/langchain/integration/test_message_format.py b/tests/observability/extensions/langchain/integration/test_message_format.py new file mode 100644 index 00000000..cc00c524 --- /dev/null +++ b/tests/observability/extensions/langchain/integration/test_message_format.py @@ -0,0 +1,236 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Integration tests for LangChain message format mapping. + +These tests use the real A365 observability pipeline: + configure() → get_tracer_provider() → CustomLangChainInstrumentor +with a SpanCapturingExporter inside _EnrichingBatchSpanProcessor, then make +real Azure OpenAI calls via LangChain and capture the span attributes. + +The A365 versioned format (``{"version": "0.1.0", "messages": [...]}`` ) is the +canonical output produced by the LangChain message mapper. The raw-list format +is accepted as a backward-compatible fallback when the mapper has not yet run. +""" + +import json +import time +from typing import Any + +import pytest +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult + +try: + from langchain_openai import AzureChatOpenAI +except ImportError: + pytest.skip( + "langchain-openai required for LangChain integration tests", + allow_module_level=True, + ) + +from microsoft_agents_a365.observability.core import configure, get_tracer_provider +from microsoft_agents_a365.observability.core.constants import ( + GEN_AI_INPUT_MESSAGES_KEY, + GEN_AI_OPERATION_NAME_KEY, + GEN_AI_OUTPUT_MESSAGES_KEY, +) +from microsoft_agents_a365.observability.core.exporters.enriching_span_processor import ( + _EnrichingBatchSpanProcessor, +) +from microsoft_agents_a365.observability.extensions.langchain import ( + CustomLangChainInstrumentor, +) + + +class SpanCapturingExporter(SpanExporter): + """Exporter that collects spans in-memory after enrichment.""" + + def __init__(self) -> None: + self.spans: list[ReadableSpan] = [] + + def export(self, spans: list[ReadableSpan]) -> SpanExportResult: + self.spans.extend(spans) + return SpanExportResult.SUCCESS + + def shutdown(self) -> None: + pass + + def force_flush(self, timeout_millis: int = 30000) -> bool: + return True + + +@pytest.mark.integration +class TestLangChainMessageFormat: + """Capture real LangChain span attributes and verify message structure.""" + + @pytest.fixture(autouse=True) + def setup_observability(self) -> None: + """Set up A365 observability with CustomLangChainInstrumentor.""" + if not hasattr(TestLangChainMessageFormat, "_exporter"): + configure( + service_name="integration-test-langchain", + service_namespace="agent365-tests", + logger_name="test-logger", + ) + + exporter = SpanCapturingExporter() + provider = get_tracer_provider() + provider.add_span_processor( + _EnrichingBatchSpanProcessor( + exporter, + max_queue_size=100, + schedule_delay_millis=100, + max_export_batch_size=100, + ) + ) + + # CustomLangChainInstrumentor calls instrument() in __init__ + instrumentor = CustomLangChainInstrumentor() + + TestLangChainMessageFormat._exporter = exporter + TestLangChainMessageFormat._instrumentor = instrumentor + + self.exporter = TestLangChainMessageFormat._exporter + self.exporter.spans.clear() + + @pytest.fixture + def llm(self, azure_openai_config: dict[str, Any]) -> AzureChatOpenAI: + """Create a real Azure OpenAI LangChain chat model.""" + return AzureChatOpenAI( + azure_endpoint=azure_openai_config["endpoint"], + api_key=azure_openai_config["api_key"], + azure_deployment=azure_openai_config["deployment"], + api_version=azure_openai_config["api_version"], + ) + + def _find_chat_spans(self) -> list[ReadableSpan]: + """Find exported spans that have gen_ai.input.messages.""" + get_tracer_provider().force_flush() + time.sleep(0.5) + return [ + s + for s in self.exporter.spans + if s.attributes and GEN_AI_INPUT_MESSAGES_KEY in s.attributes + ] + + @pytest.mark.asyncio + async def test_simple_chat_message_mapping(self, llm: AzureChatOpenAI) -> None: + """Simple chat: capture LangChain message format on exported spans.""" + from langchain_core.messages import HumanMessage, SystemMessage + + messages = [ + SystemMessage(content="You are a helpful assistant. Reply in one sentence."), + HumanMessage(content="What is the capital of France?"), + ] + + result = await llm.ainvoke(messages) + assert result is not None + assert len(result.content) > 0 + + chat_spans = self._find_chat_spans() + assert len(chat_spans) > 0, ( + f"No chat spans found. All spans: {[s.name for s in self.exporter.spans]}" + ) + + print(f"\n=== All exported spans ({len(self.exporter.spans)}) ===") + for s in self.exporter.spans: + attrs = dict(s.attributes or {}) + print(f" {s.name} | attrs: {list(attrs.keys())}") + + attrs = dict(chat_spans[-1].attributes or {}) + + # --- Input messages --- + raw_input = attrs[GEN_AI_INPUT_MESSAGES_KEY] + print(f"\n=== gen_ai.input.messages ===\n{raw_input}") + input_data = json.loads(raw_input) + + # Verify structure (currently plain string list or versioned format) + if isinstance(input_data, dict) and "version" in input_data: + # Versioned A365 format (after mapper is added) + assert input_data["version"] == "0.1.0" + messages_list = input_data["messages"] + for msg in messages_list: + assert "role" in msg + assert "parts" in msg + print("\n ✓ Versioned A365 format detected") + elif isinstance(input_data, list): + # Current raw format: list of content strings + assert len(input_data) > 0 + assert any("capital" in s.lower() for s in input_data if isinstance(s, str)) + print("\n → Raw string list format (pre-mapper)") + + # --- Output messages --- + raw_output = attrs.get(GEN_AI_OUTPUT_MESSAGES_KEY) + assert raw_output is not None, "gen_ai.output.messages not found" + print(f"\n=== gen_ai.output.messages ===\n{raw_output}") + output_data = json.loads(raw_output) + + if isinstance(output_data, dict) and "version" in output_data: + assert output_data["version"] == "0.1.0" + for msg in output_data["messages"]: + assert msg["role"] == "assistant" + assert any(p["type"] == "text" for p in msg["parts"]) + print("\n ✓ Versioned A365 format detected") + elif isinstance(output_data, list): + assert len(output_data) > 0 + print("\n → Raw string list format (pre-mapper)") + + @pytest.mark.asyncio + async def test_tool_call_message_mapping(self, llm: AzureChatOpenAI) -> None: + """Tool-calling chat: verify tool_call and tool_call_response parts in LangChain spans.""" + from langchain_core.messages import HumanMessage, SystemMessage + from langchain_core.tools import tool + + @tool + def get_weather(city: str) -> str: + """Get the current weather for a city.""" + return f"The weather in {city} is sunny, 22°C." + + llm_with_tools = llm.bind_tools([get_weather]) + + messages = [ + SystemMessage(content="You are a weather assistant. Always use the get_weather tool."), + HumanMessage(content="What's the weather in Seattle?"), + ] + + result = await llm_with_tools.ainvoke(messages) + assert result is not None + + chat_spans = self._find_chat_spans() + assert len(chat_spans) > 0 + + print(f"\n=== All exported spans ({len(self.exporter.spans)}) ===") + for s in self.exporter.spans: + attrs = dict(s.attributes or {}) + op = attrs.get(GEN_AI_OPERATION_NAME_KEY, "(none)") + print(f" {s.name} | op={op} | attrs: {list(attrs.keys())}") + + # Check all spans for message content and accumulate part types + all_part_types: list[str] = [] + for span in chat_spans: + attrs = dict(span.attributes or {}) + for key in (GEN_AI_INPUT_MESSAGES_KEY, GEN_AI_OUTPUT_MESSAGES_KEY): + raw = attrs.get(key) + if not raw: + continue + print(f"\n--- {span.name} | {key} ---\n{raw}") + data = json.loads(raw) + if isinstance(data, dict) and "version" in data: + # Versioned A365 format — assert canonical structure + assert data["version"] == "0.1.0" + assert isinstance(data.get("messages"), list) + for msg in data["messages"]: + assert "role" in msg + assert "parts" in msg + for part in msg["parts"]: + all_part_types.append(part.get("type", "")) + elif isinstance(data, list): + # Legacy raw-list format — allowed for backward compatibility + print(" → Raw list format (backward-compatible)") + + # When versioned format is present, assert tool_call and tool_call_response exist + if all_part_types: + assert "tool_call" in all_part_types, ( + f"Expected at least one tool_call part; found types: {all_part_types}" + ) diff --git a/tests/observability/extensions/langchain/integration/test_observability_pipeline.py b/tests/observability/extensions/langchain/integration/test_observability_pipeline.py new file mode 100644 index 00000000..2bde8738 --- /dev/null +++ b/tests/observability/extensions/langchain/integration/test_observability_pipeline.py @@ -0,0 +1,429 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""End-to-end pipeline integration tests for LangChain observability. + +These tests verify the full A365 observability pipeline: + InvokeAgentScope → Inference (auto-instrumented) → ToolExecution (auto-instrumented) + +The CustomLangChainInstrumentor automatically creates inference spans for LLM +calls and execute_tool spans for tool runs. Its built-in message mapper +converts LangChain messages into the versioned A365 message format +(``{"version": "0.1.0", "messages": [...]}``) on ``gen_ai.input.messages`` +and ``gen_ai.output.messages`` span attributes. + +Wrapping the entire call in InvokeAgentScope makes all auto-instrumented spans +children of the invoke_agent span (since ``separate_trace_from_runtime_context`` +defaults to ``False``). + +Note: the message-format assertions accept both the versioned dict structure +*and* a raw JSON list. The raw-list branch exists for backward compatibility +with older instrumentation versions or third-party LangChain instrumentors that +emit ``gen_ai.*.messages`` as plain JSON arrays before the A365 mapper was +integrated. +""" + +import json +import os +import time + +import pytest +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult + +try: + from langchain_core.messages import HumanMessage, SystemMessage + from langchain_core.tools import tool + from langchain_openai import AzureChatOpenAI +except ImportError: + pytest.skip( + "langchain-openai required for LangChain integration tests", + allow_module_level=True, + ) + +from microsoft_agents_a365.observability.core import configure, get_tracer_provider +from microsoft_agents_a365.observability.core.agent_details import AgentDetails +from microsoft_agents_a365.observability.core.constants import ( + GEN_AI_INPUT_MESSAGES_KEY, + GEN_AI_OPERATION_NAME_KEY, + GEN_AI_OUTPUT_MESSAGES_KEY, + INVOKE_AGENT_OPERATION_NAME, + TENANT_ID_KEY, +) +from microsoft_agents_a365.observability.core.exporters.enriching_span_processor import ( + _EnrichingBatchSpanProcessor, +) +from microsoft_agents_a365.observability.core.invoke_agent_details import InvokeAgentScopeDetails +from microsoft_agents_a365.observability.core.invoke_agent_scope import InvokeAgentScope +from microsoft_agents_a365.observability.core.request import Request +from microsoft_agents_a365.observability.extensions.langchain import ( + CustomLangChainInstrumentor, +) + +# Ensure A365 observability scopes are active for tests +os.environ["ENABLE_A365_OBSERVABILITY"] = "true" + + +@tool +def add_numbers(a: float, b: float) -> str: + """Add two numbers together. + + Args: + a: First number + b: Second number + + Returns: + A string describing the sum. + """ + return f"The sum of {a} and {b} is {a + b}" + + +class SpanCapturingExporter(SpanExporter): + """Exporter that collects spans in-memory after enrichment.""" + + def __init__(self) -> None: + self.spans: list[ReadableSpan] = [] + + def export(self, spans: list[ReadableSpan]) -> SpanExportResult: + self.spans.extend(spans) + return SpanExportResult.SUCCESS + + def shutdown(self) -> None: + pass + + def force_flush(self, timeout_millis: int = 30000) -> bool: + return True + + +def _get_span_attr(span: ReadableSpan, key: str) -> str | None: + """Safely get an attribute from a span.""" + attrs = span.attributes or {} + return attrs.get(key) + + +def _find_spans_by_name_prefix(spans: list[ReadableSpan], prefix: str) -> list[ReadableSpan]: + """Find spans whose name starts with a given prefix.""" + return [s for s in spans if s.name.startswith(prefix)] + + +@pytest.mark.integration +class TestLangChainObservabilityPipeline: + """End-to-end pipeline tests: InvokeAgent → Inference → ToolExecution. + + Verifies that wrapping LangChain calls inside InvokeAgentScope + produces a single trace with correct parent-child span hierarchy, + operation names, and A365 versioned message format attributes. + """ + + @pytest.fixture(autouse=True) + def setup_observability(self) -> None: + """Set up A365 observability with CustomLangChainInstrumentor.""" + if not hasattr(TestLangChainObservabilityPipeline, "_exporter"): + configure( + service_name="integration-test-langchain-pipeline", + service_namespace="agent365-tests", + logger_name="test-logger", + ) + + exporter = SpanCapturingExporter() + provider = get_tracer_provider() + provider.add_span_processor( + _EnrichingBatchSpanProcessor( + exporter, + max_queue_size=100, + schedule_delay_millis=100, + max_export_batch_size=100, + ) + ) + + # CustomLangChainInstrumentor instruments on init + instrumentor = CustomLangChainInstrumentor() + + TestLangChainObservabilityPipeline._exporter = exporter + TestLangChainObservabilityPipeline._instrumentor = instrumentor + + self.exporter = TestLangChainObservabilityPipeline._exporter + self.exporter.spans.clear() + + @pytest.fixture + def llm(self, azure_openai_config: dict) -> AzureChatOpenAI: + """Create a real Azure OpenAI LangChain chat model.""" + return AzureChatOpenAI( + azure_endpoint=azure_openai_config["endpoint"], + api_key=azure_openai_config["api_key"], + azure_deployment=azure_openai_config["deployment"], + api_version=azure_openai_config["api_version"], + ) + + @pytest.fixture + def agent_details(self) -> AgentDetails: + """Create AgentDetails for the test agent.""" + tenant_id = os.getenv("AGENT365_TEST_TENANT_ID", "4d44f041-f91e-4d00-b107-61e47b26f5a8") + agent_id = os.getenv("AGENT365_TEST_AGENT_ID", "3bccd52b-daaa-4b11-af40-47443852137c") + return AgentDetails( + agent_id=agent_id, + agent_name="langchain-pipeline-test-agent", + agent_description="Integration test agent for LangChain pipeline verification", + tenant_id=tenant_id, + ) + + def _flush_and_collect(self) -> list[ReadableSpan]: + """Force flush and return all captured spans.""" + get_tracer_provider().force_flush() + time.sleep(0.5) + return list(self.exporter.spans) + + # ------------------------------------------------------------------ + # Test: Full pipeline with tool execution loop + # ------------------------------------------------------------------ + + @pytest.mark.asyncio + async def test_pipeline_invoke_agent_with_tool_call( + self, + llm: AzureChatOpenAI, + agent_details: AgentDetails, + ) -> None: + """Full pipeline: InvokeAgentScope wraps LangChain with tool execution. + + Uses a manual tool loop: LLM call → tool execution → LLM call. + This ensures both inference and tool execution spans are created. + + Verifies: + 1. All spans share the same trace_id + 2. invoke_agent span is the root (no parent) + 3. Inference spans are descendants of invoke_agent + 4. Tool execution spans are descendants of invoke_agent + 5. A365 message format on inference spans + """ + from langchain_core.messages import ToolMessage + + request = Request(content="What is 15 + 27?", session_id="test-langchain-pipeline") + + llm_with_tools = llm.bind_tools([add_numbers]) + + with InvokeAgentScope.start( + request=request, + scope_details=InvokeAgentScopeDetails(), + agent_details=agent_details, + ): + messages = [ + SystemMessage( + content=( + "You are a math assistant. You MUST use the add_numbers tool " + "for any arithmetic. Never compute in your head." + ) + ), + HumanMessage(content="What is 15 + 27?"), + ] + + # First LLM call — should produce a tool_calls response + ai_response = await llm_with_tools.ainvoke(messages) + messages.append(ai_response) + + # Execute tool calls if present + if hasattr(ai_response, "tool_calls") and ai_response.tool_calls: + for tc in ai_response.tool_calls: + tool_result = add_numbers.invoke(tc) + messages.append(ToolMessage(content=str(tool_result), tool_call_id=tc["id"])) + + # Second LLM call with tool results + final_response = await llm_with_tools.ainvoke(messages) + else: + final_response = ai_response + + assert final_response is not None + assert len(str(final_response.content)) > 0 + + spans = self._flush_and_collect() + assert len(spans) > 0, "No spans were captured" + + # --- Print span tree for debugging --- + print(f"\n=== Captured {len(spans)} spans ===") + for s in spans: + op = _get_span_attr(s, GEN_AI_OPERATION_NAME_KEY) or "(none)" + parent_id = f"{s.parent.span_id:016x}" if s.parent else "None" + print( + f" {s.name} | op={op} | trace={s.context.trace_id:032x} " + f"| span={s.context.span_id:016x} | parent={parent_id}" + ) + + # --- 1. Find invoke_agent span --- + invoke_spans = _find_spans_by_name_prefix(spans, "invoke_agent") + assert len(invoke_spans) >= 1, ( + f"Expected at least 1 invoke_agent span, got: {[s.name for s in spans]}" + ) + invoke_span = invoke_spans[0] + trace_id = invoke_span.context.trace_id + + # --- 2. All spans share the same trace_id --- + for s in spans: + assert s.context.trace_id == trace_id, ( + f"Span '{s.name}' has different trace_id: " + f"{s.context.trace_id:032x} vs {trace_id:032x}" + ) + + # --- 3. invoke_agent span is the root --- + assert invoke_span.parent is None, ( + f"invoke_agent should be root but has parent: {invoke_span.parent.span_id:016x}" + ) + + # --- 4. invoke_agent has correct operation name --- + assert _get_span_attr(invoke_span, GEN_AI_OPERATION_NAME_KEY) == INVOKE_AGENT_OPERATION_NAME + + # --- 5. Tenant ID is set --- + assert _get_span_attr(invoke_span, TENANT_ID_KEY) == agent_details.tenant_id + + # --- 6. Inference spans are descendants of invoke_agent --- + # LangChain inference spans typically have chat operation or input messages + inference_spans = [ + s + for s in spans + if s != invoke_span + and ( + _get_span_attr(s, GEN_AI_OPERATION_NAME_KEY) == "chat" + or _get_span_attr(s, GEN_AI_INPUT_MESSAGES_KEY) is not None + ) + and not s.name.startswith("execute_tool") + ] + assert len(inference_spans) >= 1, ( + f"Expected at least 1 inference span, got: {[s.name for s in spans]}" + ) + + invoke_span_id = invoke_span.context.span_id + for inf_span in inference_spans: + self._assert_ancestor( + inf_span, + invoke_span_id, + spans, + f"Inference span '{inf_span.name}' is not a descendant of invoke_agent", + ) + + # --- 7. Tool execution spans are descendants of invoke_agent --- + tool_spans = _find_spans_by_name_prefix(spans, "execute_tool") + # Tool execution spans may or may not appear depending on whether + # the LangChain tracer emits them. If present, verify hierarchy. + if tool_spans: + for tool_span in tool_spans: + self._assert_ancestor( + tool_span, + invoke_span_id, + spans, + f"Tool span '{tool_span.name}' is not a descendant of invoke_agent", + ) + print(f"\n✓ Found {len(tool_spans)} tool execution spans") + + # --- 8. A365 message format on inference spans --- + # The A365 mapper emits the versioned format {"version": "0.1.0", "messages": [...]}. + # Older or third-party instrumentors may emit a raw JSON list instead; + # the raw-list branch is kept for backward compatibility. + for inf_span in inference_spans: + attrs = dict(inf_span.attributes or {}) + if GEN_AI_INPUT_MESSAGES_KEY in attrs: + input_data = json.loads(attrs[GEN_AI_INPUT_MESSAGES_KEY]) + if isinstance(input_data, dict) and "version" in input_data: + assert input_data["version"] == "0.1.0" + for msg in input_data["messages"]: + assert "role" in msg + assert "parts" in msg + + if GEN_AI_OUTPUT_MESSAGES_KEY in attrs: + output_data = json.loads(attrs[GEN_AI_OUTPUT_MESSAGES_KEY]) + if isinstance(output_data, dict) and "version" in output_data: + assert output_data["version"] == "0.1.0" + + print( + f"\n✓ All pipeline assertions passed: " + f"{len(spans)} spans, {len(inference_spans)} inference, " + f"{len(tool_spans)} tool" + ) + + # ------------------------------------------------------------------ + # Test: Pipeline without tools (simple inference only) + # ------------------------------------------------------------------ + + @pytest.mark.asyncio + async def test_pipeline_invoke_agent_simple_inference( + self, + llm: AzureChatOpenAI, + agent_details: AgentDetails, + ) -> None: + """Pipeline with InvokeAgentScope + simple inference (no tools). + + Verifies invoke_agent → inference span hierarchy. + """ + request = Request(content="Say hello", session_id="test-langchain-simple") + + with InvokeAgentScope.start( + request=request, + scope_details=InvokeAgentScopeDetails(), + agent_details=agent_details, + ): + messages = [ + SystemMessage(content="You are a helpful assistant. Reply in one sentence."), + HumanMessage(content="Say hello in exactly 5 words."), + ] + result = await llm.ainvoke(messages) + + assert result is not None + assert len(str(result.content)) > 0 + + spans = self._flush_and_collect() + assert len(spans) > 0, "No spans were captured" + + # All spans share the same trace_id + invoke_spans = _find_spans_by_name_prefix(spans, "invoke_agent") + assert len(invoke_spans) >= 1 + invoke_span = invoke_spans[0] + trace_id = invoke_span.context.trace_id + + for s in spans: + assert s.context.trace_id == trace_id + + # invoke_agent is root + assert invoke_span.parent is None + + # Inference spans are descendants + inference_spans = [ + s + for s in spans + if s != invoke_span and _get_span_attr(s, GEN_AI_INPUT_MESSAGES_KEY) is not None + ] + assert len(inference_spans) >= 1 + + invoke_span_id = invoke_span.context.span_id + for inf_span in inference_spans: + self._assert_ancestor( + inf_span, + invoke_span_id, + spans, + f"Inference span '{inf_span.name}' not a descendant of invoke_agent", + ) + + print(f"\n✓ Simple pipeline: {len(spans)} spans, hierarchy verified") + + # ------------------------------------------------------------------ + # Helper: assert ancestor relationship + # ------------------------------------------------------------------ + + def _assert_ancestor( + self, + span: ReadableSpan, + ancestor_span_id: int, + all_spans: list[ReadableSpan], + message: str, + ) -> None: + """Walk up the parent chain and assert that ancestor_span_id is found.""" + span_map = {s.context.span_id: s for s in all_spans} + current = span + visited: set[int] = set() + while current.parent is not None: + parent_id = current.parent.span_id + if parent_id == ancestor_span_id: + return + if parent_id in visited: + break + visited.add(parent_id) + current = span_map.get(parent_id) + if current is None: + break + raise AssertionError(message) diff --git a/tests/observability/extensions/langchain/test_message_mapper.py b/tests/observability/extensions/langchain/test_message_mapper.py new file mode 100644 index 00000000..67b2cf16 --- /dev/null +++ b/tests/observability/extensions/langchain/test_message_mapper.py @@ -0,0 +1,229 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. + +"""Unit tests for the LangChain message mapper. + +These tests cover mapping behaviour without requiring real Azure credentials. +All LangChain objects are constructed directly so no network calls are made. +""" + +import json +import unittest + +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolMessage +from microsoft_agents_a365.observability.extensions.langchain.message_mapper import ( + map_input_messages, + map_output_messages, +) + + +class TestMapInputMessages(unittest.TestCase): + """Tests for map_input_messages.""" + + def _parse(self, result: str | None) -> dict: + self.assertIsNotNone(result) + return json.loads(result) # type: ignore[arg-type] + + def test_system_message(self) -> None: + """A system message maps to role=system with a text part.""" + inputs = {"messages": [[SystemMessage(content="You are helpful.")]]} + data = self._parse(map_input_messages(inputs)) + self.assertEqual(data["version"], "0.1.0") + msgs = data["messages"] + self.assertEqual(len(msgs), 1) + self.assertEqual(msgs[0]["role"], "system") + parts = msgs[0]["parts"] + self.assertEqual(len(parts), 1) + self.assertEqual(parts[0]["type"], "text") + self.assertEqual(parts[0]["content"], "You are helpful.") + + def test_human_message(self) -> None: + """A human message maps to role=user with a text part.""" + inputs = {"messages": [[HumanMessage(content="Hello!")]]} + data = self._parse(map_input_messages(inputs)) + msgs = data["messages"] + self.assertEqual(msgs[0]["role"], "user") + self.assertEqual(msgs[0]["parts"][0]["content"], "Hello!") + + def test_assistant_message(self) -> None: + """An AI message maps to role=assistant with a text part.""" + inputs = {"messages": [[AIMessage(content="I can help.")]]} + data = self._parse(map_input_messages(inputs)) + msgs = data["messages"] + self.assertEqual(msgs[0]["role"], "assistant") + self.assertEqual(msgs[0]["parts"][0]["content"], "I can help.") + + def test_tool_message(self) -> None: + """A ToolMessage maps to role=tool with a tool_call_response part.""" + inputs = {"messages": [[ToolMessage(content="42", tool_call_id="call_abc")]]} + data = self._parse(map_input_messages(inputs)) + msgs = data["messages"] + self.assertEqual(msgs[0]["role"], "tool") + parts = msgs[0]["parts"] + self.assertEqual(len(parts), 1) + self.assertEqual(parts[0]["type"], "tool_call_response") + self.assertEqual(parts[0]["id"], "call_abc") + self.assertEqual(parts[0]["response"], "42") + + def test_tool_call_args_as_dict_kept_structured(self) -> None: + """When tool-call args are already a dict they stay structured (not stringified).""" + ai_msg = AIMessage( + content="", + tool_calls=[ + {"name": "search", "id": "call_1", "args": {"query": "hello"}, "type": "tool_use"} + ], + ) + inputs = {"messages": [[ai_msg]]} + data = self._parse(map_input_messages(inputs)) + msgs = data["messages"] + tool_part = next(p for p in msgs[0]["parts"] if p["type"] == "tool_call") + # arguments must be a dict, not a JSON string + self.assertIsInstance(tool_part["arguments"], dict) + self.assertEqual(tool_part["arguments"], {"query": "hello"}) + + def test_tool_call_args_as_json_string_parsed(self) -> None: + """When tool-call args arrive as a JSON string they are parsed to a dict. + + LangChain ``AIMessage.tool_calls`` enforces dict args via pydantic, so + this scenario is exercised via the Mapping path of ``_extract_parts``. + """ + # Simulate a serialized LangChain message where args is a JSON string + msg_mapping = { + "type": "ai", + "content": "", + "tool_calls": [{"name": "search", "id": "call_2", "args": '{"query": "world"}'}], + } + inputs = {"messages": [[msg_mapping]]} + data = self._parse(map_input_messages(inputs)) + msgs = data["messages"] + tool_part = next(p for p in msgs[0]["parts"] if p["type"] == "tool_call") + # Must be parsed to a dict + self.assertIsInstance(tool_part["arguments"], dict) + self.assertEqual(tool_part["arguments"]["query"], "world") + + def test_tool_call_args_invalid_json_string_kept_as_string(self) -> None: + """When tool-call args are an un-parseable string they are kept as-is. + + Uses the Mapping path of ``_extract_parts`` (same reason as above). + """ + msg_mapping = { + "type": "ai", + "content": "", + "tool_calls": [{"name": "search", "id": "call_3", "args": "not-valid-json"}], + } + inputs = {"messages": [[msg_mapping]]} + data = self._parse(map_input_messages(inputs)) + msgs = data["messages"] + tool_part = next(p for p in msgs[0]["parts"] if p["type"] == "tool_call") + self.assertEqual(tool_part["arguments"], "not-valid-json") + + def test_empty_content_ignored(self) -> None: + """Messages with empty or whitespace-only content produce no text part.""" + ai_msg = AIMessage(content=" ") + inputs = {"messages": [[ai_msg]]} + result = map_input_messages(inputs) + # No text part → message filtered → None returned + self.assertIsNone(result) + + def test_none_inputs_returns_none(self) -> None: + """None inputs return None without error.""" + self.assertIsNone(map_input_messages(None)) + + def test_empty_dict_returns_none(self) -> None: + """Empty inputs dict returns None.""" + self.assertIsNone(map_input_messages({})) + + def test_multiple_messages_in_sequence(self) -> None: + """Multiple messages in the list are all mapped.""" + inputs = { + "messages": [ + [ + SystemMessage(content="You are helpful."), + HumanMessage(content="Hi"), + ] + ] + } + data = self._parse(map_input_messages(inputs)) + self.assertEqual(len(data["messages"]), 2) + self.assertEqual(data["messages"][0]["role"], "system") + self.assertEqual(data["messages"][1]["role"], "user") + + def test_unknown_role_defaults_to_user(self) -> None: + """Messages with an unrecognised role string default to user.""" + from langchain_core.messages import BaseMessage + + class WeirdMessage(BaseMessage): + type: str = "xyzzy" + + def __init__(self) -> None: + super().__init__(content="strange") + + inputs = {"messages": [[WeirdMessage()]]} + data = self._parse(map_input_messages(inputs)) + self.assertEqual(data["messages"][0]["role"], "user") + + +class TestMapOutputMessages(unittest.TestCase): + """Tests for map_output_messages.""" + + def _parse(self, result: str | None) -> dict: + self.assertIsNotNone(result) + return json.loads(result) # type: ignore[arg-type] + + def test_none_outputs_returns_none(self) -> None: + """None outputs return None.""" + self.assertIsNone(map_output_messages(None)) + + def test_empty_outputs_returns_none(self) -> None: + """Empty outputs dict returns None.""" + self.assertIsNone(map_output_messages({})) + + def test_assistant_text_generation(self) -> None: + """A plain text AI generation maps to role=assistant with a text part.""" + outputs = { + "generations": [ + [ + { + "message": AIMessage(content="Paris is the capital of France."), + "generation_info": {"finish_reason": "stop"}, + "text": "Paris is the capital of France.", + } + ] + ] + } + data = self._parse(map_output_messages(outputs)) + self.assertEqual(data["version"], "0.1.0") + msgs = data["messages"] + self.assertEqual(len(msgs), 1) + self.assertEqual(msgs[0]["role"], "assistant") + self.assertEqual(msgs[0]["finish_reason"], "stop") + text_part = next(p for p in msgs[0]["parts"] if p["type"] == "text") + self.assertEqual(text_part["content"], "Paris is the capital of France.") + + def test_tool_call_generation_args_dict(self) -> None: + """A tool-call generation with dict args maps to a tool_call part with structured args.""" + ai_msg = AIMessage( + content="", + tool_calls=[{"name": "calc", "id": "c1", "args": {"expr": "1+1"}, "type": "tool_use"}], + ) + outputs = { + "generations": [ + [ + { + "message": ai_msg, + "text": "", + "generation_info": {"finish_reason": "tool_calls"}, + } + ] + ] + } + data = self._parse(map_output_messages(outputs)) + msgs = data["messages"] + tool_part = next(p for p in msgs[0]["parts"] if p["type"] == "tool_call") + self.assertEqual(tool_part["name"], "calc") + self.assertIsInstance(tool_part["arguments"], dict) + self.assertEqual(tool_part["arguments"]["expr"], "1+1") + + +if __name__ == "__main__": + unittest.main() diff --git a/uv.lock b/uv.lock index 7c6df9d5..0cc936c0 100644 --- a/uv.lock +++ b/uv.lock @@ -70,6 +70,7 @@ overrides = [{ name = "azure-ai-projects", specifier = ">=2.0.0b1" }] dev = [ { name = "agent-framework" }, { name = "azure-identity" }, + { name = "langchain-openai" }, { name = "openai" }, { name = "openai-agents" }, { name = "pytest" }, @@ -2582,7 +2583,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.2.11" +version = "1.2.28" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpatch" }, @@ -2594,9 +2595,23 @@ dependencies = [ { name = "typing-extensions" }, { name = "uuid-utils" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/12/17/1943cedfc118e04b8128e4c3e1dbf0fa0ea58eefddbb6198cfd699d19f01/langchain_core-1.2.11.tar.gz", hash = "sha256:f164bb36602dd74a3a50c1334fca75309ad5ed95767acdfdbb9fa95ce28a1e01", size = 831211, upload-time = "2026-02-10T20:35:28.35Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/a4/317a1a3ac1df33a64adb3670bf88bbe3b3d5baa274db6863a979db472897/langchain_core-1.2.28.tar.gz", hash = "sha256:271a3d8bd618f795fdeba112b0753980457fc90537c46a0c11998516a74dc2cb", size = 846119, upload-time = "2026-04-08T18:19:34.867Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/10/30/1f80e3fc674353cad975ed5294353d42512535d2094ef032c06454c2c873/langchain_core-1.2.11-py3-none-any.whl", hash = "sha256:ae11ceb8dda60d0b9d09e763116e592f1683327c17be5b715f350fd29aee65d3", size = 500062, upload-time = "2026-02-10T20:35:26.698Z" }, + { url = "https://files.pythonhosted.org/packages/a8/92/32f785f077c7e898da97064f113c73fbd9ad55d1e2169cf3a391b183dedb/langchain_core-1.2.28-py3-none-any.whl", hash = "sha256:80764232581eaf8057bcefa71dbf8adc1f6a28d257ebd8b95ba9b8b452e8c6ac", size = 508727, upload-time = "2026-04-08T18:19:32.823Z" }, +] + +[[package]] +name = "langchain-openai" +version = "1.1.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "openai" }, + { name = "tiktoken" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/fd/7dee16e882c4c1577d48db174d85aa3a0ee09ba61eb6a5d41650285ca80c/langchain_openai-1.1.12.tar.gz", hash = "sha256:ccf5ef02c896f6807b4d0e51aaf678a72ce81ae41201cae8d65e11eeff9ecb79", size = 1114119, upload-time = "2026-03-23T18:59:19.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/a6/68fb22e3604015e6f546fa1d3677d24378b482855ae74710cbf4aec44132/langchain_openai-1.1.12-py3-none-any.whl", hash = "sha256:da71ca3f2d18c16f7a2443cc306aa195ad2a07054335ac9b0626dcae02b6a0c5", size = 88487, upload-time = "2026-03-23T18:59:17.978Z" }, ] [[package]] @@ -5126,6 +5141,110 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/b1/3baf80dc6d2b7bc27a95a67752d0208e410351e3feb4eb78de5f77454d8d/referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0", size = 26775, upload-time = "2025-01-25T08:48:14.241Z" }, ] +[[package]] +name = "regex" +version = "2026.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/3a246dbf05666918bd3664d9d787f84a9108f6f43cc953a077e4a7dfdb7e/regex-2026.4.4.tar.gz", hash = "sha256:e08270659717f6973523ce3afbafa53515c4dc5dcad637dc215b6fd50f689423", size = 416000, upload-time = "2026-04-03T20:56:28.155Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/7a/617356cbecdb452812a5d42f720d6d5096b360d4a4c1073af700ea140ad2/regex-2026.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4c36a85b00fadb85db9d9e90144af0a980e1a3d2ef9cd0f8a5bef88054657c6", size = 489415, upload-time = "2026-04-03T20:53:11.645Z" }, + { url = "https://files.pythonhosted.org/packages/20/e6/bf057227144d02e3ba758b66649e87531d744dda5f3254f48660f18ae9d8/regex-2026.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dcb5453ecf9cd58b562967badd1edbf092b0588a3af9e32ee3d05c985077ce87", size = 291205, upload-time = "2026-04-03T20:53:13.289Z" }, + { url = "https://files.pythonhosted.org/packages/eb/3b/637181b787dd1a820ba1c712cee2b4144cd84a32dc776ca067b12b2d70c8/regex-2026.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6aa809ed4dc3706cc38594d67e641601bd2f36d5555b2780ff074edfcb136cf8", size = 289225, upload-time = "2026-04-03T20:53:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/05/21/bac05d806ed02cd4b39d9c8e5b5f9a2998c94c3a351b7792e80671fa5315/regex-2026.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:33424f5188a7db12958246a54f59a435b6cb62c5cf9c8d71f7cc49475a5fdada", size = 792434, upload-time = "2026-04-03T20:53:17.414Z" }, + { url = "https://files.pythonhosted.org/packages/d9/17/c65d1d8ae90b772d5758eb4014e1e011bb2db353fc4455432e6cc9100df7/regex-2026.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d346fccdde28abba117cc9edc696b9518c3307fbfcb689e549d9b5979018c6d", size = 861730, upload-time = "2026-04-03T20:53:18.903Z" }, + { url = "https://files.pythonhosted.org/packages/ad/64/933321aa082a2c6ee2785f22776143ba89840189c20d3b6b1d12b6aae16b/regex-2026.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:415a994b536440f5011aa77e50a4274d15da3245e876e5c7f19da349caaedd87", size = 906495, upload-time = "2026-04-03T20:53:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/01/ea/4c8d306e9c36ac22417336b1e02e7b358152c34dc379673f2d331143725f/regex-2026.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21e5eb86179b4c67b5759d452ea7c48eb135cd93308e7a260aa489ed2eb423a4", size = 799810, upload-time = "2026-04-03T20:53:22.961Z" }, + { url = "https://files.pythonhosted.org/packages/29/ce/7605048f00e1379eba89d610c7d644d8f695dc9b26d3b6ecfa3132b872ff/regex-2026.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:312ec9dd1ae7d96abd8c5a36a552b2139931914407d26fba723f9e53c8186f86", size = 774242, upload-time = "2026-04-03T20:53:25.015Z" }, + { url = "https://files.pythonhosted.org/packages/e9/77/283e0d5023fde22cd9e86190d6d9beb21590a452b195ffe00274de470691/regex-2026.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0d2b28aa1354c7cd7f71b7658c4326f7facac106edd7f40eda984424229fd59", size = 781257, upload-time = "2026-04-03T20:53:26.918Z" }, + { url = "https://files.pythonhosted.org/packages/8b/fb/7f3b772be101373c8626ed34c5d727dcbb8abd42a7b1219bc25fd9a3cc04/regex-2026.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:349d7310eddff40429a099c08d995c6d4a4bfaf3ff40bd3b5e5cb5a5a3c7d453", size = 854490, upload-time = "2026-04-03T20:53:29.065Z" }, + { url = "https://files.pythonhosted.org/packages/85/30/56547b80f34f4dd2986e1cdd63b1712932f63b6c4ce2f79c50a6cd79d1c2/regex-2026.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:e7ab63e9fe45a9ec3417509e18116b367e89c9ceb6219222a3396fa30b147f80", size = 763544, upload-time = "2026-04-03T20:53:30.917Z" }, + { url = "https://files.pythonhosted.org/packages/ac/2f/ce060fdfea8eff34a8997603532e44cdb7d1f35e3bc253612a8707a90538/regex-2026.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fe896e07a5a2462308297e515c0054e9ec2dd18dfdc9427b19900b37dfe6f40b", size = 844442, upload-time = "2026-04-03T20:53:32.463Z" }, + { url = "https://files.pythonhosted.org/packages/e5/44/810cb113096a1dacbe82789fbfab2823f79d19b7f1271acecb7009ba9b88/regex-2026.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eb59c65069498dbae3c0ef07bbe224e1eaa079825a437fb47a479f0af11f774f", size = 789162, upload-time = "2026-04-03T20:53:34.039Z" }, + { url = "https://files.pythonhosted.org/packages/20/96/9647dd7f2ecf6d9ce1fb04dfdb66910d094e10d8fe53e9c15096d8aa0bd2/regex-2026.4.4-cp311-cp311-win32.whl", hash = "sha256:2a5d273181b560ef8397c8825f2b9d57013de744da9e8257b8467e5da8599351", size = 266227, upload-time = "2026-04-03T20:53:35.601Z" }, + { url = "https://files.pythonhosted.org/packages/33/80/74e13262460530c3097ff343a17de9a34d040a5dc4de9cf3a8241faab51c/regex-2026.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:9542ccc1e689e752594309444081582f7be2fdb2df75acafea8a075108566735", size = 278399, upload-time = "2026-04-03T20:53:37.021Z" }, + { url = "https://files.pythonhosted.org/packages/1c/3c/39f19f47f19dcefa3403f09d13562ca1c0fd07ab54db2bc03148f3f6b46a/regex-2026.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:b5f9fb784824a042be3455b53d0b112655686fdb7a91f88f095f3fee1e2a2a54", size = 270473, upload-time = "2026-04-03T20:53:38.633Z" }, + { url = "https://files.pythonhosted.org/packages/e5/28/b972a4d3df61e1d7bcf1b59fdb3cddef22f88b6be43f161bb41ebc0e4081/regex-2026.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c07ab8794fa929e58d97a0e1796b8b76f70943fa39df225ac9964615cf1f9d52", size = 490434, upload-time = "2026-04-03T20:53:40.219Z" }, + { url = "https://files.pythonhosted.org/packages/84/20/30041446cf6dc3e0eab344fc62770e84c23b6b68a3b657821f9f80cb69b4/regex-2026.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2c785939dc023a1ce4ec09599c032cc9933d258a998d16ca6f2b596c010940eb", size = 292061, upload-time = "2026-04-03T20:53:41.862Z" }, + { url = "https://files.pythonhosted.org/packages/62/c8/3baa06d75c98c46d4cc4262b71fd2edb9062b5665e868bca57859dadf93a/regex-2026.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b1ce5c81c9114f1ce2f9288a51a8fd3aeea33a0cc440c415bf02da323aa0a76", size = 289628, upload-time = "2026-04-03T20:53:43.701Z" }, + { url = "https://files.pythonhosted.org/packages/31/87/3accf55634caad8c0acab23f5135ef7d4a21c39f28c55c816ae012931408/regex-2026.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:760ef21c17d8e6a4fe8cf406a97cf2806a4df93416ccc82fc98d25b1c20425be", size = 796651, upload-time = "2026-04-03T20:53:45.379Z" }, + { url = "https://files.pythonhosted.org/packages/f6/0c/aaa2c83f34efedbf06f61cb1942c25f6cf1ee3b200f832c4d05f28306c2e/regex-2026.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7088fcdcb604a4417c208e2169715800d28838fefd7455fbe40416231d1d47c1", size = 865916, upload-time = "2026-04-03T20:53:47.064Z" }, + { url = "https://files.pythonhosted.org/packages/d9/f6/8c6924c865124643e8f37823eca845dc27ac509b2ee58123685e71cd0279/regex-2026.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:07edca1ba687998968f7db5bc355288d0c6505caa7374f013d27356d93976d13", size = 912287, upload-time = "2026-04-03T20:53:49.422Z" }, + { url = "https://files.pythonhosted.org/packages/11/0e/a9f6f81013e0deaf559b25711623864970fe6a098314e374ccb1540a4152/regex-2026.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:993f657a7c1c6ec51b5e0ba97c9817d06b84ea5fa8d82e43b9405de0defdc2b9", size = 801126, upload-time = "2026-04-03T20:53:51.096Z" }, + { url = "https://files.pythonhosted.org/packages/71/61/3a0cc8af2dc0c8deb48e644dd2521f173f7e6513c6e195aad9aa8dd77ac5/regex-2026.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:2b69102a743e7569ebee67e634a69c4cb7e59d6fa2e1aa7d3bdbf3f61435f62d", size = 776788, upload-time = "2026-04-03T20:53:52.889Z" }, + { url = "https://files.pythonhosted.org/packages/64/0b/8bb9cbf21ef7dee58e49b0fdb066a7aded146c823202e16494a36777594f/regex-2026.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dac006c8b6dda72d86ea3d1333d45147de79a3a3f26f10c1cf9287ca4ca0ac3", size = 785184, upload-time = "2026-04-03T20:53:55.627Z" }, + { url = "https://files.pythonhosted.org/packages/99/c2/d3e80e8137b25ee06c92627de4e4d98b94830e02b3e6f81f3d2e3f504cf5/regex-2026.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:50a766ee2010d504554bfb5f578ed2e066898aa26411d57e6296230627cdefa0", size = 859913, upload-time = "2026-04-03T20:53:57.249Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/9d5d876157d969c804622456ef250017ac7a8f83e0e14f903b9e6df5ce95/regex-2026.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9e2f5217648f68e3028c823df58663587c1507a5ba8419f4fdfc8a461be76043", size = 765732, upload-time = "2026-04-03T20:53:59.428Z" }, + { url = "https://files.pythonhosted.org/packages/82/80/b568935b4421388561c8ed42aff77247285d3ae3bb2a6ca22af63bae805e/regex-2026.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:39d8de85a08e32632974151ba59c6e9140646dcc36c80423962b1c5c0a92e244", size = 852152, upload-time = "2026-04-03T20:54:01.505Z" }, + { url = "https://files.pythonhosted.org/packages/39/29/f0f81217e21cd998245da047405366385d5c6072048038a3d33b37a79dc0/regex-2026.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:55d9304e0e7178dfb1e106c33edf834097ddf4a890e2f676f6c5118f84390f73", size = 789076, upload-time = "2026-04-03T20:54:03.323Z" }, + { url = "https://files.pythonhosted.org/packages/49/1d/1d957a61976ab9d4e767dd4f9d04b66cc0c41c5e36cf40e2d43688b5ae6f/regex-2026.4.4-cp312-cp312-win32.whl", hash = "sha256:04bb679bc0bde8a7bfb71e991493d47314e7b98380b083df2447cda4b6edb60f", size = 266700, upload-time = "2026-04-03T20:54:05.639Z" }, + { url = "https://files.pythonhosted.org/packages/c5/5c/bf575d396aeb58ea13b06ef2adf624f65b70fafef6950a80fc3da9cae3bc/regex-2026.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:db0ac18435a40a2543dbb3d21e161a6c78e33e8159bd2e009343d224bb03bb1b", size = 277768, upload-time = "2026-04-03T20:54:07.312Z" }, + { url = "https://files.pythonhosted.org/packages/c9/27/049df16ec6a6828ccd72add3c7f54b4df029669bea8e9817df6fff58be90/regex-2026.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:4ce255cc05c1947a12989c6db801c96461947adb7a59990f1360b5983fab4983", size = 270568, upload-time = "2026-04-03T20:54:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/9d/83/c4373bc5f31f2cf4b66f9b7c31005bd87fe66f0dce17701f7db4ee79ee29/regex-2026.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:62f5519042c101762509b1d717b45a69c0139d60414b3c604b81328c01bd1943", size = 490273, upload-time = "2026-04-03T20:54:11.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/f8/fe62afbcc3cf4ad4ac9adeaafd98aa747869ae12d3e8e2ac293d0593c435/regex-2026.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3790ba9fb5dd76715a7afe34dbe603ba03f8820764b1dc929dd08106214ed031", size = 291954, upload-time = "2026-04-03T20:54:13.412Z" }, + { url = "https://files.pythonhosted.org/packages/5a/92/4712b9fe6a33d232eeb1c189484b80c6c4b8422b90e766e1195d6e758207/regex-2026.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8fae3c6e795d7678963f2170152b0d892cf6aee9ee8afc8c45e6be38d5107fe7", size = 289487, upload-time = "2026-04-03T20:54:15.824Z" }, + { url = "https://files.pythonhosted.org/packages/88/2c/f83b93f85e01168f1070f045a42d4c937b69fdb8dd7ae82d307253f7e36e/regex-2026.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:298c3ec2d53225b3bf91142eb9691025bab610e0c0c51592dde149db679b3d17", size = 796646, upload-time = "2026-04-03T20:54:18.229Z" }, + { url = "https://files.pythonhosted.org/packages/df/55/61a2e17bf0c4dc57e11caf8dd11771280d8aaa361785f9e3bc40d653f4a7/regex-2026.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e9638791082eaf5b3ac112c587518ee78e083a11c4b28012d8fe2a0f536dfb17", size = 865904, upload-time = "2026-04-03T20:54:20.019Z" }, + { url = "https://files.pythonhosted.org/packages/45/32/1ac8ed1b5a346b5993a3d256abe0a0f03b0b73c8cc88d928537368ac65b6/regex-2026.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae3e764bd4c5ff55035dc82a8d49acceb42a5298edf6eb2fc4d328ee5dd7afae", size = 912304, upload-time = "2026-04-03T20:54:22.403Z" }, + { url = "https://files.pythonhosted.org/packages/26/47/2ee5c613ab546f0eddebf9905d23e07beb933416b1246c2d8791d01979b4/regex-2026.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ffa81f81b80047ba89a3c69ae6a0f78d06f4a42ce5126b0eb2a0a10ad44e0b2e", size = 801126, upload-time = "2026-04-03T20:54:24.308Z" }, + { url = "https://files.pythonhosted.org/packages/75/cd/41dacd129ca9fd20bd7d02f83e0fad83e034ac8a084ec369c90f55ef37e2/regex-2026.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f56ebf9d70305307a707911b88469213630aba821e77de7d603f9d2f0730687d", size = 776772, upload-time = "2026-04-03T20:54:26.319Z" }, + { url = "https://files.pythonhosted.org/packages/89/6d/5af0b588174cb5f46041fa7dd64d3fd5cd2fe51f18766703d1edc387f324/regex-2026.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:773d1dfd652bbffb09336abf890bfd64785c7463716bf766d0eb3bc19c8b7f27", size = 785228, upload-time = "2026-04-03T20:54:28.387Z" }, + { url = "https://files.pythonhosted.org/packages/b7/3b/f5a72b7045bd59575fc33bf1345f156fcfd5a8484aea6ad84b12c5a82114/regex-2026.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d51d20befd5275d092cdffba57ded05f3c436317ee56466c8928ac32d960edaf", size = 860032, upload-time = "2026-04-03T20:54:30.641Z" }, + { url = "https://files.pythonhosted.org/packages/39/a4/72a317003d6fcd7a573584a85f59f525dfe8f67e355ca74eb6b53d66a5e2/regex-2026.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:0a51cdb3c1e9161154f976cb2bef9894bc063ac82f31b733087ffb8e880137d0", size = 765714, upload-time = "2026-04-03T20:54:32.789Z" }, + { url = "https://files.pythonhosted.org/packages/25/1e/5672e16f34dbbcb2560cc7e6a2fbb26dfa8b270711e730101da4423d3973/regex-2026.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ae5266a82596114e41fb5302140e9630204c1b5f325c770bec654b95dd54b0aa", size = 852078, upload-time = "2026-04-03T20:54:34.546Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0d/c813f0af7c6cc7ed7b9558bac2e5120b60ad0fa48f813e4d4bd55446f214/regex-2026.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c882cd92ec68585e9c1cf36c447ec846c0d94edd706fe59e0c198e65822fd23b", size = 789181, upload-time = "2026-04-03T20:54:36.642Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a344608d1adbd2a95090ddd906cec09a11be0e6517e878d02a5123e0917f/regex-2026.4.4-cp313-cp313-win32.whl", hash = "sha256:05568c4fbf3cb4fa9e28e3af198c40d3237cf6041608a9022285fe567ec3ad62", size = 266690, upload-time = "2026-04-03T20:54:38.343Z" }, + { url = "https://files.pythonhosted.org/packages/31/07/54049f89b46235ca6f45cd6c88668a7050e77d4a15555e47dd40fde75263/regex-2026.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:3384df51ed52db0bea967e21458ab0a414f67cdddfd94401688274e55147bb81", size = 277733, upload-time = "2026-04-03T20:54:40.11Z" }, + { url = "https://files.pythonhosted.org/packages/0e/21/61366a8e20f4d43fb597708cac7f0e2baadb491ecc9549b4980b2be27d16/regex-2026.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:acd38177bd2c8e69a411d6521760806042e244d0ef94e2dd03ecdaa8a3c99427", size = 270565, upload-time = "2026-04-03T20:54:41.883Z" }, + { url = "https://files.pythonhosted.org/packages/f1/1e/3a2b9672433bef02f5d39aa1143ca2c08f311c1d041c464a42be9ae648dc/regex-2026.4.4-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f94a11a9d05afcfcfa640e096319720a19cc0c9f7768e1a61fceee6a3afc6c7c", size = 494126, upload-time = "2026-04-03T20:54:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/4e/4b/c132a4f4fe18ad3340d89fcb56235132b69559136036b845be3c073142ed/regex-2026.4.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:36bcb9d6d1307ab629edc553775baada2aefa5c50ccc0215fbfd2afcfff43141", size = 293882, upload-time = "2026-04-03T20:54:45.41Z" }, + { url = "https://files.pythonhosted.org/packages/f4/5f/eaa38092ce7a023656280f2341dbbd4ad5f05d780a70abba7bb4f4bea54c/regex-2026.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:261c015b3e2ed0919157046d768774ecde57f03d8fa4ba78d29793447f70e717", size = 292334, upload-time = "2026-04-03T20:54:47.051Z" }, + { url = "https://files.pythonhosted.org/packages/5f/f6/dd38146af1392dac33db7074ab331cec23cced3759167735c42c5460a243/regex-2026.4.4-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c228cf65b4a54583763645dcd73819b3b381ca8b4bb1b349dee1c135f4112c07", size = 811691, upload-time = "2026-04-03T20:54:49.074Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f0/dc54c2e69f5eeec50601054998ec3690d5344277e782bd717e49867c1d29/regex-2026.4.4-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dd2630faeb6876fb0c287f664d93ddce4d50cd46c6e88e60378c05c9047e08ca", size = 871227, upload-time = "2026-04-03T20:54:51.035Z" }, + { url = "https://files.pythonhosted.org/packages/a1/af/cb16bd5dc61621e27df919a4449bbb7e5a1034c34d307e0a706e9cc0f3e3/regex-2026.4.4-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6a50ab11b7779b849472337191f3a043e27e17f71555f98d0092fa6d73364520", size = 917435, upload-time = "2026-04-03T20:54:52.994Z" }, + { url = "https://files.pythonhosted.org/packages/5c/71/8b260897f22996b666edd9402861668f45a2ca259f665ac029e6104a2d7d/regex-2026.4.4-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0734f63afe785138549fbe822a8cfeaccd1bae814c5057cc0ed5b9f2de4fc883", size = 816358, upload-time = "2026-04-03T20:54:54.884Z" }, + { url = "https://files.pythonhosted.org/packages/1c/60/775f7f72a510ef238254906c2f3d737fc80b16ca85f07d20e318d2eea894/regex-2026.4.4-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c4ee50606cb1967db7e523224e05f32089101945f859928e65657a2cbb3d278b", size = 785549, upload-time = "2026-04-03T20:54:57.01Z" }, + { url = "https://files.pythonhosted.org/packages/58/42/34d289b3627c03cf381e44da534a0021664188fa49ba41513da0b4ec6776/regex-2026.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6c1818f37be3ca02dcb76d63f2c7aaba4b0dc171b579796c6fbe00148dfec6b1", size = 801364, upload-time = "2026-04-03T20:54:58.981Z" }, + { url = "https://files.pythonhosted.org/packages/fc/20/f6ecf319b382a8f1ab529e898b222c3f30600fcede7834733c26279e7465/regex-2026.4.4-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f5bfc2741d150d0be3e4a0401a5c22b06e60acb9aa4daa46d9e79a6dcd0f135b", size = 866221, upload-time = "2026-04-03T20:55:00.88Z" }, + { url = "https://files.pythonhosted.org/packages/92/6a/9f16d3609d549bd96d7a0b2aee1625d7512ba6a03efc01652149ef88e74d/regex-2026.4.4-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:504ffa8a03609a087cad81277a629b6ce884b51a24bd388a7980ad61748618ff", size = 772530, upload-time = "2026-04-03T20:55:03.213Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f6/aa9768bc96a4c361ac96419fbaf2dcdc33970bb813df3ba9b09d5d7b6d96/regex-2026.4.4-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70aadc6ff12e4b444586e57fc30771f86253f9f0045b29016b9605b4be5f7dfb", size = 856989, upload-time = "2026-04-03T20:55:05.087Z" }, + { url = "https://files.pythonhosted.org/packages/4d/b4/c671db3556be2473ae3e4bb7a297c518d281452871501221251ea4ecba57/regex-2026.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f4f83781191007b6ef43b03debc35435f10cad9b96e16d147efe84a1d48bdde4", size = 803241, upload-time = "2026-04-03T20:55:07.162Z" }, + { url = "https://files.pythonhosted.org/packages/2a/5c/83e3b1d89fa4f6e5a1bc97b4abd4a9a97b3c1ac7854164f694f5f0ba98a0/regex-2026.4.4-cp313-cp313t-win32.whl", hash = "sha256:e014a797de43d1847df957c0a2a8e861d1c17547ee08467d1db2c370b7568baa", size = 269921, upload-time = "2026-04-03T20:55:09.62Z" }, + { url = "https://files.pythonhosted.org/packages/28/07/077c387121f42cdb4d92b1301133c0d93b5709d096d1669ab847dda9fe2e/regex-2026.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:b15b88b0d52b179712632832c1d6e58e5774f93717849a41096880442da41ab0", size = 281240, upload-time = "2026-04-03T20:55:11.521Z" }, + { url = "https://files.pythonhosted.org/packages/9d/22/ead4a4abc7c59a4d882662aa292ca02c8b617f30b6e163bc1728879e9353/regex-2026.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:586b89cdadf7d67bf86ae3342a4dcd2b8d70a832d90c18a0ae955105caf34dbe", size = 272440, upload-time = "2026-04-03T20:55:13.365Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f5/ed97c2dc47b5fbd4b73c0d7d75f9ebc8eca139f2bbef476bba35f28c0a77/regex-2026.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2da82d643fa698e5e5210e54af90181603d5853cf469f5eedf9bfc8f59b4b8c7", size = 490343, upload-time = "2026-04-03T20:55:15.241Z" }, + { url = "https://files.pythonhosted.org/packages/80/e9/de4828a7385ec166d673a5790ad06ac48cdaa98bc0960108dd4b9cc1aef7/regex-2026.4.4-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:54a1189ad9d9357760557c91103d5e421f0a2dabe68a5cdf9103d0dcf4e00752", size = 291909, upload-time = "2026-04-03T20:55:17.558Z" }, + { url = "https://files.pythonhosted.org/packages/b4/d6/5cfbfc97f3201a4d24b596a77957e092030dcc4205894bc035cedcfce62f/regex-2026.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:76d67d5afb1fe402d10a6403bae668d000441e2ab115191a804287d53b772951", size = 289692, upload-time = "2026-04-03T20:55:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/8e/ac/f2212d9fd56fe897e36d0110ba30ba2d247bd6410c5bd98499c7e5a1e1f2/regex-2026.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e7cd3e4ee8d80447a83bbc9ab0c8459781fa77087f856c3e740d7763be0df27f", size = 796979, upload-time = "2026-04-03T20:55:22.56Z" }, + { url = "https://files.pythonhosted.org/packages/c9/e3/a016c12675fbac988a60c7e1c16e67823ff0bc016beb27bd7a001dbdabc6/regex-2026.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e19e18c568d2866d8b6a6dfad823db86193503f90823a8f66689315ba28fbe8", size = 866744, upload-time = "2026-04-03T20:55:24.646Z" }, + { url = "https://files.pythonhosted.org/packages/af/a4/0b90ca4cf17adc3cb43de80ec71018c37c88ad64987e8d0d481a95ca60b5/regex-2026.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7698a6f38730fd1385d390d1ed07bb13dce39aa616aca6a6d89bea178464b9a4", size = 911613, upload-time = "2026-04-03T20:55:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/8e/3b/2b3dac0b82d41ab43aa87c6ecde63d71189d03fe8854b8ca455a315edac3/regex-2026.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:173a66f3651cdb761018078e2d9487f4cf971232c990035ec0eb1cdc6bf929a9", size = 800551, upload-time = "2026-04-03T20:55:29.532Z" }, + { url = "https://files.pythonhosted.org/packages/25/fe/5365eb7aa0e753c4b5957815c321519ecab033c279c60e1b1ae2367fa810/regex-2026.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa7922bbb2cc84fa062d37723f199d4c0cd200245ce269c05db82d904db66b83", size = 776911, upload-time = "2026-04-03T20:55:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b3/7fb0072156bba065e3b778a7bc7b0a6328212be5dd6a86fd207e0c4f2dab/regex-2026.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:59f67cd0a0acaf0e564c20bbd7f767286f23e91e2572c5703bf3e56ea7557edb", size = 785751, upload-time = "2026-04-03T20:55:33.797Z" }, + { url = "https://files.pythonhosted.org/packages/02/1a/9f83677eb699273e56e858f7bd95acdbee376d42f59e8bfca2fd80d79df3/regex-2026.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:475e50f3f73f73614f7cba5524d6de49dee269df00272a1b85e3d19f6d498465", size = 860484, upload-time = "2026-04-03T20:55:35.745Z" }, + { url = "https://files.pythonhosted.org/packages/3b/7a/93937507b61cfcff8b4c5857f1b452852b09f741daa9acae15c971d8554e/regex-2026.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:a1c0c7d67b64d85ac2e1879923bad2f08a08f3004055f2f406ef73c850114bd4", size = 765939, upload-time = "2026-04-03T20:55:37.972Z" }, + { url = "https://files.pythonhosted.org/packages/86/ea/81a7f968a351c6552b1670ead861e2a385be730ee28402233020c67f9e0f/regex-2026.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:1371c2ccbb744d66ee63631cc9ca12aa233d5749972626b68fe1a649dd98e566", size = 851417, upload-time = "2026-04-03T20:55:39.92Z" }, + { url = "https://files.pythonhosted.org/packages/4c/7e/323c18ce4b5b8f44517a36342961a0306e931e499febbd876bb149d900f0/regex-2026.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:59968142787042db793348a3f5b918cf24ced1f23247328530e063f89c128a95", size = 789056, upload-time = "2026-04-03T20:55:42.303Z" }, + { url = "https://files.pythonhosted.org/packages/c0/af/e7510f9b11b1913b0cd44eddb784b2d650b2af6515bfce4cffcc5bfd1d38/regex-2026.4.4-cp314-cp314-win32.whl", hash = "sha256:59efe72d37fd5a91e373e5146f187f921f365f4abc1249a5ab446a60f30dd5f8", size = 272130, upload-time = "2026-04-03T20:55:44.995Z" }, + { url = "https://files.pythonhosted.org/packages/9a/51/57dae534c915e2d3a21490e88836fa2ae79dde3b66255ecc0c0a155d2c10/regex-2026.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:e0aab3ff447845049d676827d2ff714aab4f73f340e155b7de7458cf53baa5a4", size = 280992, upload-time = "2026-04-03T20:55:47.316Z" }, + { url = "https://files.pythonhosted.org/packages/0a/5e/abaf9f4c3792e34edb1434f06717fae2b07888d85cb5cec29f9204931bf8/regex-2026.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:a7a5bb6aa0cf62208bb4fa079b0c756734f8ad0e333b425732e8609bd51ee22f", size = 273563, upload-time = "2026-04-03T20:55:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/ff/06/35da85f9f217b9538b99cbb170738993bcc3b23784322decb77619f11502/regex-2026.4.4-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:97850d0638391bdc7d35dc1c1039974dcb921eaafa8cc935ae4d7f272b1d60b3", size = 494191, upload-time = "2026-04-03T20:55:51.258Z" }, + { url = "https://files.pythonhosted.org/packages/54/5b/1bc35f479eef8285c4baf88d8c002023efdeebb7b44a8735b36195486ae7/regex-2026.4.4-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ee7337f88f2a580679f7bbfe69dc86c043954f9f9c541012f49abc554a962f2e", size = 293877, upload-time = "2026-04-03T20:55:53.214Z" }, + { url = "https://files.pythonhosted.org/packages/39/5b/f53b9ad17480b3ddd14c90da04bfb55ac6894b129e5dea87bcaf7d00e336/regex-2026.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7429f4e6192c11d659900c0648ba8776243bf396ab95558b8c51a345afeddde6", size = 292410, upload-time = "2026-04-03T20:55:55.736Z" }, + { url = "https://files.pythonhosted.org/packages/bb/56/52377f59f60a7c51aa4161eecf0b6032c20b461805aca051250da435ffc9/regex-2026.4.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4f10fbd5dd13dcf4265b4cc07d69ca70280742870c97ae10093e3d66000359", size = 811831, upload-time = "2026-04-03T20:55:57.802Z" }, + { url = "https://files.pythonhosted.org/packages/dd/63/8026310bf066f702a9c361f83a8c9658f3fe4edb349f9c1e5d5273b7c40c/regex-2026.4.4-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a152560af4f9742b96f3827090f866eeec5becd4765c8e0d3473d9d280e76a5a", size = 871199, upload-time = "2026-04-03T20:56:00.333Z" }, + { url = "https://files.pythonhosted.org/packages/20/9f/a514bbb00a466dbb506d43f187a04047f7be1505f10a9a15615ead5080ee/regex-2026.4.4-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54170b3e95339f415d54651f97df3bff7434a663912f9358237941bbf9143f55", size = 917649, upload-time = "2026-04-03T20:56:02.445Z" }, + { url = "https://files.pythonhosted.org/packages/cb/6b/8399f68dd41a2030218839b9b18360d79b86d22b9fab5ef477c7f23ca67c/regex-2026.4.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:07f190d65f5a72dcb9cf7106bfc3d21e7a49dd2879eda2207b683f32165e4d99", size = 816388, upload-time = "2026-04-03T20:56:04.595Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/103963f47c24339a483b05edd568594c2be486188f688c0170fd504b2948/regex-2026.4.4-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9a2741ce5a29d3c84b0b94261ba630ab459a1b847a0d6beca7d62d188175c790", size = 785746, upload-time = "2026-04-03T20:56:07.13Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ee/7f6054c0dec0cee3463c304405e4ff42e27cff05bf36fcb34be549ab17bd/regex-2026.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b26c30df3a28fd9793113dac7385a4deb7294a06c0f760dd2b008bd49a9139bc", size = 801483, upload-time = "2026-04-03T20:56:09.365Z" }, + { url = "https://files.pythonhosted.org/packages/30/c2/51d3d941cf6070dc00c3338ecf138615fc3cce0421c3df6abe97a08af61a/regex-2026.4.4-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:421439d1bee44b19f4583ccf42670ca464ffb90e9fdc38d37f39d1ddd1e44f1f", size = 866331, upload-time = "2026-04-03T20:56:12.039Z" }, + { url = "https://files.pythonhosted.org/packages/16/e8/76d50dcc122ac33927d939f350eebcfe3dbcbda96913e03433fc36de5e63/regex-2026.4.4-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:b40379b53ecbc747fd9bdf4a0ea14eb8188ca1bd0f54f78893a39024b28f4863", size = 772673, upload-time = "2026-04-03T20:56:14.558Z" }, + { url = "https://files.pythonhosted.org/packages/a5/6e/5f6bf75e20ea6873d05ba4ec78378c375cbe08cdec571c83fbb01606e563/regex-2026.4.4-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:08c55c13d2eef54f73eeadc33146fb0baaa49e7335eb1aff6ae1324bf0ddbe4a", size = 857146, upload-time = "2026-04-03T20:56:16.663Z" }, + { url = "https://files.pythonhosted.org/packages/0b/33/3c76d9962949e487ebba353a18e89399f292287204ac8f2f4cfc3a51c233/regex-2026.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9776b85f510062f5a75ef112afe5f494ef1635607bf1cc220c1391e9ac2f5e81", size = 803463, upload-time = "2026-04-03T20:56:18.923Z" }, + { url = "https://files.pythonhosted.org/packages/19/eb/ef32dcd2cb69b69bc0c3e55205bce94a7def48d495358946bc42186dcccc/regex-2026.4.4-cp314-cp314t-win32.whl", hash = "sha256:385edaebde5db5be103577afc8699fea73a0e36a734ba24870be7ffa61119d74", size = 275709, upload-time = "2026-04-03T20:56:20.996Z" }, + { url = "https://files.pythonhosted.org/packages/a0/86/c291bf740945acbf35ed7dbebf8e2eea2f3f78041f6bd7cdab80cb274dc0/regex-2026.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:5d354b18839328927832e2fa5f7c95b7a3ccc39e7a681529e1685898e6436d45", size = 285622, upload-time = "2026-04-03T20:56:23.641Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e7/ec846d560ae6a597115153c02ca6138a7877a1748b2072d9521c10a93e58/regex-2026.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:af0384cb01a33600c49505c27c6c57ab0b27bf84a74e28524c92ca897ebdac9d", size = 275773, upload-time = "2026-04-03T20:56:26.07Z" }, +] + [[package]] name = "requests" version = "2.32.5" @@ -5641,6 +5760,60 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687", size = 28165, upload-time = "2024-07-05T07:25:29.591Z" }, ] +[[package]] +name = "tiktoken" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7d/ab/4d017d0f76ec3171d469d80fc03dfbb4e48a4bcaddaa831b31d526f05edc/tiktoken-0.12.0.tar.gz", hash = "sha256:b18ba7ee2b093863978fcb14f74b3707cdc8d4d4d3836853ce7ec60772139931", size = 37806, upload-time = "2025-10-06T20:22:45.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/46/21ea696b21f1d6d1efec8639c204bdf20fde8bafb351e1355c72c5d7de52/tiktoken-0.12.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e227c7f96925003487c33b1b32265fad2fbcec2b7cf4817afb76d416f40f6bb", size = 1051565, upload-time = "2025-10-06T20:21:44.566Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d9/35c5d2d9e22bb2a5f74ba48266fb56c63d76ae6f66e02feb628671c0283e/tiktoken-0.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c06cf0fcc24c2cb2adb5e185c7082a82cba29c17575e828518c2f11a01f445aa", size = 995284, upload-time = "2025-10-06T20:21:45.622Z" }, + { url = "https://files.pythonhosted.org/packages/01/84/961106c37b8e49b9fdcf33fe007bb3a8fdcc380c528b20cc7fbba80578b8/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f18f249b041851954217e9fd8e5c00b024ab2315ffda5ed77665a05fa91f42dc", size = 1129201, upload-time = "2025-10-06T20:21:47.074Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/3d9275198e067f8b65076a68894bb52fd253875f3644f0a321a720277b8a/tiktoken-0.12.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:47a5bc270b8c3db00bb46ece01ef34ad050e364b51d406b6f9730b64ac28eded", size = 1152444, upload-time = "2025-10-06T20:21:48.139Z" }, + { url = "https://files.pythonhosted.org/packages/78/db/a58e09687c1698a7c592e1038e01c206569b86a0377828d51635561f8ebf/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:508fa71810c0efdcd1b898fda574889ee62852989f7c1667414736bcb2b9a4bd", size = 1195080, upload-time = "2025-10-06T20:21:49.246Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/a9e4d2bf91d515c0f74afc526fd773a812232dd6cda33ebea7f531202325/tiktoken-0.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1af81a6c44f008cba48494089dd98cccb8b313f55e961a52f5b222d1e507967", size = 1255240, upload-time = "2025-10-06T20:21:50.274Z" }, + { url = "https://files.pythonhosted.org/packages/9d/15/963819345f1b1fb0809070a79e9dd96938d4ca41297367d471733e79c76c/tiktoken-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e68e3e593637b53e56f7237be560f7a394451cb8c11079755e80ae64b9e6def", size = 879422, upload-time = "2025-10-06T20:21:51.734Z" }, + { url = "https://files.pythonhosted.org/packages/a4/85/be65d39d6b647c79800fd9d29241d081d4eeb06271f383bb87200d74cf76/tiktoken-0.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b97f74aca0d78a1ff21b8cd9e9925714c15a9236d6ceacf5c7327c117e6e21e8", size = 1050728, upload-time = "2025-10-06T20:21:52.756Z" }, + { url = "https://files.pythonhosted.org/packages/4a/42/6573e9129bc55c9bf7300b3a35bef2c6b9117018acca0dc760ac2d93dffe/tiktoken-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b90f5ad190a4bb7c3eb30c5fa32e1e182ca1ca79f05e49b448438c3e225a49b", size = 994049, upload-time = "2025-10-06T20:21:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/66/c5/ed88504d2f4a5fd6856990b230b56d85a777feab84e6129af0822f5d0f70/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:65b26c7a780e2139e73acc193e5c63ac754021f160df919add909c1492c0fb37", size = 1129008, upload-time = "2025-10-06T20:21:54.832Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/3dae6cc5436137ebd38944d396b5849e167896fc2073da643a49f372dc4f/tiktoken-0.12.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:edde1ec917dfd21c1f2f8046b86348b0f54a2c0547f68149d8600859598769ad", size = 1152665, upload-time = "2025-10-06T20:21:56.129Z" }, + { url = "https://files.pythonhosted.org/packages/a3/fe/26df24ce53ffde419a42f5f53d755b995c9318908288c17ec3f3448313a3/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:35a2f8ddd3824608b3d650a000c1ef71f730d0c56486845705a8248da00f9fe5", size = 1194230, upload-time = "2025-10-06T20:21:57.546Z" }, + { url = "https://files.pythonhosted.org/packages/20/cc/b064cae1a0e9fac84b0d2c46b89f4e57051a5f41324e385d10225a984c24/tiktoken-0.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83d16643edb7fa2c99eff2ab7733508aae1eebb03d5dfc46f5565862810f24e3", size = 1254688, upload-time = "2025-10-06T20:21:58.619Z" }, + { url = "https://files.pythonhosted.org/packages/81/10/b8523105c590c5b8349f2587e2fdfe51a69544bd5a76295fc20f2374f470/tiktoken-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ffc5288f34a8bc02e1ea7047b8d041104791d2ddbf42d1e5fa07822cbffe16bd", size = 878694, upload-time = "2025-10-06T20:21:59.876Z" }, + { url = "https://files.pythonhosted.org/packages/00/61/441588ee21e6b5cdf59d6870f86beb9789e532ee9718c251b391b70c68d6/tiktoken-0.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:775c2c55de2310cc1bc9a3ad8826761cbdc87770e586fd7b6da7d4589e13dab3", size = 1050802, upload-time = "2025-10-06T20:22:00.96Z" }, + { url = "https://files.pythonhosted.org/packages/1f/05/dcf94486d5c5c8d34496abe271ac76c5b785507c8eae71b3708f1ad9b45a/tiktoken-0.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a01b12f69052fbe4b080a2cfb867c4de12c704b56178edf1d1d7b273561db160", size = 993995, upload-time = "2025-10-06T20:22:02.788Z" }, + { url = "https://files.pythonhosted.org/packages/a0/70/5163fe5359b943f8db9946b62f19be2305de8c3d78a16f629d4165e2f40e/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:01d99484dc93b129cd0964f9d34eee953f2737301f18b3c7257bf368d7615baa", size = 1128948, upload-time = "2025-10-06T20:22:03.814Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/c028aa0babf77315e1cef357d4d768800c5f8a6de04d0eac0f377cb619fa/tiktoken-0.12.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:4a1a4fcd021f022bfc81904a911d3df0f6543b9e7627b51411da75ff2fe7a1be", size = 1151986, upload-time = "2025-10-06T20:22:05.173Z" }, + { url = "https://files.pythonhosted.org/packages/a0/5a/886b108b766aa53e295f7216b509be95eb7d60b166049ce2c58416b25f2a/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:981a81e39812d57031efdc9ec59fa32b2a5a5524d20d4776574c4b4bd2e9014a", size = 1194222, upload-time = "2025-10-06T20:22:06.265Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f8/4db272048397636ac7a078d22773dd2795b1becee7bc4922fe6207288d57/tiktoken-0.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9baf52f84a3f42eef3ff4e754a0db79a13a27921b457ca9832cf944c6be4f8f3", size = 1255097, upload-time = "2025-10-06T20:22:07.403Z" }, + { url = "https://files.pythonhosted.org/packages/8e/32/45d02e2e0ea2be3a9ed22afc47d93741247e75018aac967b713b2941f8ea/tiktoken-0.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:b8a0cd0c789a61f31bf44851defbd609e8dd1e2c8589c614cc1060940ef1f697", size = 879117, upload-time = "2025-10-06T20:22:08.418Z" }, + { url = "https://files.pythonhosted.org/packages/ce/76/994fc868f88e016e6d05b0da5ac24582a14c47893f4474c3e9744283f1d5/tiktoken-0.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d5f89ea5680066b68bcb797ae85219c72916c922ef0fcdd3480c7d2315ffff16", size = 1050309, upload-time = "2025-10-06T20:22:10.939Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b8/57ef1456504c43a849821920d582a738a461b76a047f352f18c0b26c6516/tiktoken-0.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b4e7ed1c6a7a8a60a3230965bdedba8cc58f68926b835e519341413370e0399a", size = 993712, upload-time = "2025-10-06T20:22:12.115Z" }, + { url = "https://files.pythonhosted.org/packages/72/90/13da56f664286ffbae9dbcfadcc625439142675845baa62715e49b87b68b/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:fc530a28591a2d74bce821d10b418b26a094bf33839e69042a6e86ddb7a7fb27", size = 1128725, upload-time = "2025-10-06T20:22:13.541Z" }, + { url = "https://files.pythonhosted.org/packages/05/df/4f80030d44682235bdaecd7346c90f67ae87ec8f3df4a3442cb53834f7e4/tiktoken-0.12.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:06a9f4f49884139013b138920a4c393aa6556b2f8f536345f11819389c703ebb", size = 1151875, upload-time = "2025-10-06T20:22:14.559Z" }, + { url = "https://files.pythonhosted.org/packages/22/1f/ae535223a8c4ef4c0c1192e3f9b82da660be9eb66b9279e95c99288e9dab/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:04f0e6a985d95913cabc96a741c5ffec525a2c72e9df086ff17ebe35985c800e", size = 1194451, upload-time = "2025-10-06T20:22:15.545Z" }, + { url = "https://files.pythonhosted.org/packages/78/a7/f8ead382fce0243cb625c4f266e66c27f65ae65ee9e77f59ea1653b6d730/tiktoken-0.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0ee8f9ae00c41770b5f9b0bb1235474768884ae157de3beb5439ca0fd70f3e25", size = 1253794, upload-time = "2025-10-06T20:22:16.624Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/6cc82a562bc6365785a3ff0af27a2a092d57c47d7a81d9e2295d8c36f011/tiktoken-0.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dc2dd125a62cb2b3d858484d6c614d136b5b848976794edfb63688d539b8b93f", size = 878777, upload-time = "2025-10-06T20:22:18.036Z" }, + { url = "https://files.pythonhosted.org/packages/72/05/3abc1db5d2c9aadc4d2c76fa5640134e475e58d9fbb82b5c535dc0de9b01/tiktoken-0.12.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a90388128df3b3abeb2bfd1895b0681412a8d7dc644142519e6f0a97c2111646", size = 1050188, upload-time = "2025-10-06T20:22:19.563Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7b/50c2f060412202d6c95f32b20755c7a6273543b125c0985d6fa9465105af/tiktoken-0.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:da900aa0ad52247d8794e307d6446bd3cdea8e192769b56276695d34d2c9aa88", size = 993978, upload-time = "2025-10-06T20:22:20.702Z" }, + { url = "https://files.pythonhosted.org/packages/14/27/bf795595a2b897e271771cd31cb847d479073497344c637966bdf2853da1/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:285ba9d73ea0d6171e7f9407039a290ca77efcdb026be7769dccc01d2c8d7fff", size = 1129271, upload-time = "2025-10-06T20:22:22.06Z" }, + { url = "https://files.pythonhosted.org/packages/f5/de/9341a6d7a8f1b448573bbf3425fa57669ac58258a667eb48a25dfe916d70/tiktoken-0.12.0-cp314-cp314-manylinux_2_28_x86_64.whl", hash = "sha256:d186a5c60c6a0213f04a7a802264083dea1bbde92a2d4c7069e1a56630aef830", size = 1151216, upload-time = "2025-10-06T20:22:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/75/0d/881866647b8d1be4d67cb24e50d0c26f9f807f994aa1510cb9ba2fe5f612/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:604831189bd05480f2b885ecd2d1986dc7686f609de48208ebbbddeea071fc0b", size = 1194860, upload-time = "2025-10-06T20:22:24.602Z" }, + { url = "https://files.pythonhosted.org/packages/b3/1e/b651ec3059474dab649b8d5b69f5c65cd8fcd8918568c1935bd4136c9392/tiktoken-0.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8f317e8530bb3a222547b85a58583238c8f74fd7a7408305f9f63246d1a0958b", size = 1254567, upload-time = "2025-10-06T20:22:25.671Z" }, + { url = "https://files.pythonhosted.org/packages/80/57/ce64fd16ac390fafde001268c364d559447ba09b509181b2808622420eec/tiktoken-0.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:399c3dd672a6406719d84442299a490420b458c44d3ae65516302a99675888f3", size = 921067, upload-time = "2025-10-06T20:22:26.753Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a4/72eed53e8976a099539cdd5eb36f241987212c29629d0a52c305173e0a68/tiktoken-0.12.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2c714c72bc00a38ca969dae79e8266ddec999c7ceccd603cc4f0d04ccd76365", size = 1050473, upload-time = "2025-10-06T20:22:27.775Z" }, + { url = "https://files.pythonhosted.org/packages/e6/d7/0110b8f54c008466b19672c615f2168896b83706a6611ba6e47313dbc6e9/tiktoken-0.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cbb9a3ba275165a2cb0f9a83f5d7025afe6b9d0ab01a22b50f0e74fee2ad253e", size = 993855, upload-time = "2025-10-06T20:22:28.799Z" }, + { url = "https://files.pythonhosted.org/packages/5f/77/4f268c41a3957c418b084dd576ea2fad2e95da0d8e1ab705372892c2ca22/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:dfdfaa5ffff8993a3af94d1125870b1d27aed7cb97aa7eb8c1cefdbc87dbee63", size = 1129022, upload-time = "2025-10-06T20:22:29.981Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2b/fc46c90fe5028bd094cd6ee25a7db321cb91d45dc87531e2bdbb26b4867a/tiktoken-0.12.0-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:584c3ad3d0c74f5269906eb8a659c8bfc6144a52895d9261cdaf90a0ae5f4de0", size = 1150736, upload-time = "2025-10-06T20:22:30.996Z" }, + { url = "https://files.pythonhosted.org/packages/28/c0/3c7a39ff68022ddfd7d93f3337ad90389a342f761c4d71de99a3ccc57857/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:54c891b416a0e36b8e2045b12b33dd66fb34a4fe7965565f1b482da50da3e86a", size = 1194908, upload-time = "2025-10-06T20:22:32.073Z" }, + { url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" }, + { url = "https://files.pythonhosted.org/packages/af/df/c7891ef9d2712ad774777271d39fdef63941ffba0a9d59b7ad1fd2765e57/tiktoken-0.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:f61c0aea5565ac82e2ec50a05e02a6c44734e91b51c10510b084ea1b8e633a71", size = 920667, upload-time = "2025-10-06T20:22:34.444Z" }, +] + [[package]] name = "tomli-w" version = "1.2.0"