diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py index 2b987cdcf3f5..20a41df7ef73 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/__init__.py @@ -3,22 +3,30 @@ # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) -from typing import TYPE_CHECKING, Optional, Any +from typing import TYPE_CHECKING, Any, Optional -from .agent_framework import AgentFrameworkCBAgent -from .tool_client import ToolClient -from ._version import VERSION +from azure.ai.agentserver.agentframework._version import VERSION +from azure.ai.agentserver.agentframework._agent_framework import AgentFrameworkCBAgent +from azure.ai.agentserver.agentframework._foundry_tools import FoundryToolsChatMiddleware +from azure.ai.agentserver.core.application import PackageMetadata, set_current_app if TYPE_CHECKING: # pragma: no cover from azure.core.credentials_async import AsyncTokenCredential -def from_agent_framework(agent, - credentials: Optional["AsyncTokenCredential"] = None, - **kwargs: Any) -> "AgentFrameworkCBAgent": +def from_agent_framework( + agent, + credentials: Optional["AsyncTokenCredential"] = None, + **kwargs: Any, +) -> "AgentFrameworkCBAgent": return AgentFrameworkCBAgent(agent, credentials=credentials, **kwargs) -__all__ = ["from_agent_framework", "ToolClient"] +__all__ = [ + "from_agent_framework", + "FoundryToolsChatMiddleware", +] __version__ = VERSION + +set_current_app(PackageMetadata.from_dist("azure-ai-agentserver-agentframework")) \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py similarity index 63% rename from sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py rename to sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py index 233436ac84ea..9eb649a38f19 100644 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/agent_framework.py +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_agent_framework.py @@ -6,13 +6,12 @@ import os from typing import TYPE_CHECKING, Any, AsyncGenerator, Awaitable, Optional, Protocol, Union, List -import inspect from agent_framework import AgentProtocol, AIFunction from agent_framework.azure import AzureAIClient # pylint: disable=no-name-in-module from opentelemetry import trace -from azure.ai.agentserver.core.client.tools import OAuthConsentRequiredError +from ..core.tools._exceptions import OAuthConsentRequiredError from azure.ai.agentserver.core import AgentRunContext, FoundryCBAgent from azure.ai.agentserver.core.constants import Constants as AdapterConstants from azure.ai.agentserver.core.logger import APPINSIGHT_CONNSTR_ENV_NAME, get_logger @@ -21,6 +20,7 @@ Response as OpenAIResponse, ResponseStreamEvent, ) +from azure.ai.agentserver.core.models.projects import ResponseErrorEvent, ResponseFailedEvent from .models.agent_framework_input_converters import AgentFrameworkInputConverter from .models.agent_framework_output_non_streaming_converter import ( @@ -28,7 +28,6 @@ ) from .models.agent_framework_output_streaming_converter import AgentFrameworkOutputStreamingConverter from .models.constants import Constants -from .tool_client import ToolClient if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -39,12 +38,12 @@ class AgentFactory(Protocol): """Protocol for agent factory functions. - An agent factory is a callable that takes a ToolClient and returns + An agent factory is a callable that takes a list of tools and returns an AgentProtocol, either synchronously or asynchronously. """ def __call__(self, tools: List[AIFunction]) -> Union[AgentProtocol, Awaitable[AgentProtocol]]: - """Create an AgentProtocol using the provided ToolClient. + """Create an AgentProtocol using the provided tools. :param tools: The list of AIFunction tools available to the agent. :type tools: List[AIFunction] @@ -71,7 +70,7 @@ class AgentFrameworkCBAgent(FoundryCBAgent): - Supports both streaming and non-streaming responses based on the `stream` flag. """ - def __init__(self, agent: Union[AgentProtocol, AgentFactory], + def __init__(self, agent: AgentProtocol, credentials: "Optional[AsyncTokenCredential]" = None, **kwargs: Any): """Initialize the AgentFrameworkCBAgent with an AgentProtocol or a factory function. @@ -83,14 +82,7 @@ def __init__(self, agent: Union[AgentProtocol, AgentFactory], :type credentials: Optional[AsyncTokenCredential] """ super().__init__(credentials=credentials, **kwargs) # pylint: disable=unexpected-keyword-arg - self._agent_or_factory: Union[AgentProtocol, AgentFactory] = agent - self._resolved_agent: "Optional[AgentProtocol]" = None - # If agent is already instantiated, use it directly - if isinstance(agent, AgentProtocol): - self._resolved_agent = agent - logger.info(f"Initialized AgentFrameworkCBAgent with agent: {type(agent).__name__}") - else: - logger.info("Initialized AgentFrameworkCBAgent with agent factory") + self._agent: AgentProtocol = agent @property def agent(self) -> "Optional[AgentProtocol]": @@ -99,7 +91,7 @@ def agent(self) -> "Optional[AgentProtocol]": :return: The resolved AgentProtocol if available, None otherwise. :rtype: Optional[AgentProtocol] """ - return self._resolved_agent + return self._agent def _resolve_stream_timeout(self, request_body: CreateResponse) -> float: """Resolve idle timeout for streaming updates. @@ -121,51 +113,6 @@ def _resolve_stream_timeout(self, request_body: CreateResponse) -> float: env_val = os.getenv(Constants.AGENTS_ADAPTER_STREAM_TIMEOUT_S) return float(env_val) if env_val is not None else float(Constants.DEFAULT_STREAM_TIMEOUT_S) - async def _resolve_agent(self, context: AgentRunContext): - """Resolve the agent if it's a factory function (for single-use/first-time resolution). - Creates a ToolClient and calls the factory function with it. - This is used for the initial resolution. - - :param context: The agent run context containing tools and user information. - :type context: AgentRunContext - """ - if callable(self._agent_or_factory): - logger.debug("Resolving agent from factory function") - - # Create ToolClient with credentials - tool_client = self.get_tool_client(tools=context.get_tools(), user_info=context.get_user_info()) # pylint: disable=no-member - tool_client_wrapper = ToolClient(tool_client) - tools = await tool_client_wrapper.list_tools() - - result = self._agent_or_factory(tools) - if inspect.iscoroutine(result): - self._resolved_agent = await result - else: - self._resolved_agent = result - - logger.debug("Agent resolved successfully") - else: - # Should not reach here, but just in case - self._resolved_agent = self._agent_or_factory - - async def _resolve_agent_for_request(self, context: AgentRunContext): - - logger.debug("Resolving fresh agent from factory function for request") - - # Create ToolClient with credentials - tool_client = self.get_tool_client(tools=context.get_tools(), user_info=context.get_user_info()) # pylint: disable=no-member - tool_client_wrapper = ToolClient(tool_client) - tools = await tool_client_wrapper.list_tools() - - result = self._agent_or_factory(tools) - if inspect.iscoroutine(result): - agent = await result - else: - agent = result - - logger.debug("Fresh agent resolved successfully for request") - return agent, tool_client_wrapper - def init_tracing(self): try: exporter = os.environ.get(AdapterConstants.OTEL_EXPORTER_ENDPOINT) @@ -209,18 +156,7 @@ async def agent_run( # pylint: disable=too-many-statements OpenAIResponse, AsyncGenerator[ResponseStreamEvent, Any], ]: - # Resolve agent - always resolve if it's a factory function to get fresh agent each time - # For factories, get a new agent instance per request to avoid concurrency issues - tool_client = None try: - if callable(self._agent_or_factory): - agent, tool_client = await self._resolve_agent_for_request(context) - elif self._resolved_agent is None: - await self._resolve_agent(context) - agent = self._resolved_agent - else: - agent = self._resolved_agent - logger.info(f"Starting agent_run with stream={context.stream}") request_input = context.request.get("input") @@ -236,27 +172,56 @@ async def agent_run( # pylint: disable=too-many-statements async def stream_updates(): try: update_count = 0 - updates = agent.run_stream(message) - async for event in streaming_converter.convert(updates): - update_count += 1 - yield event - - logger.info("Streaming completed with %d updates", update_count) + try: + updates = self.agent.run_stream(message) + async for event in streaming_converter.convert(updates): + update_count += 1 + yield event + + logger.info("Streaming completed with %d updates", update_count) + except OAuthConsentRequiredError as e: + logger.info("OAuth consent required during streaming updates") + if update_count == 0: + async for event in self.respond_with_oauth_consent_astream(context, e): + yield event + else: + # If we've already emitted events, we cannot safely restart a new + # OAuth-consent stream (it would reset sequence numbers). + yield ResponseErrorEvent( + sequence_number=streaming_converter.next_sequence(), + code="server_error", + message=f"OAuth consent required: {e.consent_url}", + param="agent_run", + ) + yield ResponseFailedEvent( + sequence_number=streaming_converter.next_sequence(), + response=streaming_converter._build_response(status="failed"), # pylint: disable=protected-access + ) + except Exception as e: # pylint: disable=broad-exception-caught + logger.error("Unhandled exception during streaming updates: %s", e, exc_info=True) + + # Emit well-formed error events instead of terminating the stream. + yield ResponseErrorEvent( + sequence_number=streaming_converter.next_sequence(), + code="server_error", + message=str(e), + param="agent_run", + ) + yield ResponseFailedEvent( + sequence_number=streaming_converter.next_sequence(), + response=streaming_converter._build_response(status="failed"), # pylint: disable=protected-access + ) finally: - # Close tool_client if it was created for this request - if tool_client is not None: - try: - await tool_client.close() - logger.debug("Closed tool_client after streaming completed") - except Exception as ex: # pylint: disable=broad-exception-caught - logger.warning(f"Error closing tool_client in stream: {ex}") + # No request-scoped resources to clean up here today. + # Keep this block as a hook for future request-scoped cleanup. + pass return stream_updates() # Non-streaming path logger.info("Running agent in non-streaming mode") non_streaming_converter = AgentFrameworkOutputNonStreamingConverter(context) - result = await agent.run(message) + result = await self.agent.run(message) logger.debug(f"Agent run completed, result type: {type(result)}") transformed_result = non_streaming_converter.transform_output_for_response(result) logger.info("Agent run and transformation completed successfully") @@ -272,10 +237,4 @@ async def oauth_consent_stream(error=e): return oauth_consent_stream() return await self.respond_with_oauth_consent(context, e) finally: - # Close tool_client if it was created for this request (non-streaming only, streaming handles in generator) - if not context.stream and tool_client is not None: - try: - await tool_client.close() - logger.debug("Closed tool_client after request processing") - except Exception as ex: # pylint: disable=broad-exception-caught - logger.warning(f"Error closing tool_client: {ex}") + pass diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_foundry_tools.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_foundry_tools.py new file mode 100644 index 000000000000..875c1de24e8c --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/_foundry_tools.py @@ -0,0 +1,150 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +import inspect +from typing import Any, Awaitable, Callable, Dict, List, Optional, Sequence + +from agent_framework import AIFunction, ChatContext, ChatOptions, ChatMiddleware +from pydantic import Field, create_model + +from azure.ai.agentserver.core import AgentServerContext +from azure.ai.agentserver.core.logger import get_logger +from azure.ai.agentserver.core.tools import FoundryToolLike, ResolvedFoundryTool + +logger = get_logger() + + +def _attach_signature_from_pydantic_model(func, input_model) -> None: + params = [] + annotations: Dict[str, Any] = {} + + for name, field in input_model.model_fields.items(): + ann = field.annotation or Any + annotations[name] = ann + + default = inspect._empty if field.is_required() else field.default + params.append( + inspect.Parameter( + name=name, + kind=inspect.Parameter.KEYWORD_ONLY, + default=default, + annotation=ann, + ) + ) + + func.__signature__ = inspect.Signature(parameters=params, return_annotation=Any) + func.__annotations__ = {**annotations, "return": Any} + +class FoundryToolClient: + + def __init__( + self, + tools: Sequence[FoundryToolLike], + ) -> None: + self._allowed_tools: List[FoundryToolLike] = list(tools) + + async def list_tools(self) -> List[AIFunction]: + server_context = AgentServerContext.get() + foundry_tool_catalog = server_context.tools.catalog + resolved_tools = await foundry_tool_catalog.list(self._allowed_tools) + return [self._to_aifunction(tool) for tool in resolved_tools] + + def _to_aifunction(self, foundry_tool: "ResolvedFoundryTool") -> AIFunction: + """Convert an FoundryTool to an Agent Framework AI Function + + :param foundry_tool: The FoundryTool to convert. + :type foundry_tool: ~azure.ai.agentserver.core.client.tools.aio.FoundryTool + :return: An AI Function Tool. + :rtype: AIFunction + """ + # Get the input schema from the tool descriptor + input_schema = foundry_tool.input_schema or {} + + # Create a Pydantic model from the input schema + properties = input_schema.properties or {} + required_fields = set(input_schema.required or []) + + # Build field definitions for the Pydantic model + field_definitions: Dict[str, Any] = {} + for field_name, field_info in properties.items(): + field_type = self._json_schema_type_to_python(field_info.type or "string") + field_description = field_info.description or "" + is_required = field_name in required_fields + + if is_required: + field_definitions[field_name] = (field_type, Field(description=field_description)) + else: + field_definitions[field_name] = (Optional[field_type], + Field(default=None, description=field_description)) + + # Create the Pydantic model dynamically + input_model = create_model( + f"{foundry_tool.name}_input", + **field_definitions + ) + + # Create a wrapper function that calls the Azure tool + async def tool_func(**kwargs: Any) -> Any: + """Dynamically generated function to invoke the Azure AI tool. + + :return: The result from the tool invocation. + :rtype: Any + """ + server_context = AgentServerContext.get() + logger.debug("Invoking tool: %s with input: %s", foundry_tool.name, kwargs) + return await server_context.tools.invoke(foundry_tool, kwargs) + _attach_signature_from_pydantic_model(tool_func, input_model) + + # Create and return the AIFunction + return AIFunction( + name=foundry_tool.name, + description=foundry_tool.description or "No description available", + func=tool_func, + input_model=input_model + ) + + def _json_schema_type_to_python(self, json_type: str) -> type: + """Convert JSON schema type to Python type. + + :param json_type: The JSON schema type string. + :type json_type: str + :return: The corresponding Python type. + :rtype: type + """ + type_map = { + "string": str, + "number": float, + "integer": int, + "boolean": bool, + "array": list, + "object": dict, + } + return type_map.get(json_type, str) + + +class FoundryToolsChatMiddleware(ChatMiddleware): + """Chat middleware to inject Foundry tools into ChatOptions on each call.""" + + def __init__( + self, + tools: Sequence[FoundryToolLike]) -> None: + self._foundry_tool_client = FoundryToolClient(tools=tools) + + async def process( + self, + context: ChatContext, + next: Callable[[ChatContext], Awaitable[None]], + ) -> None: + tools = await self._foundry_tool_client.list_tools() + base_chat_options = context.chat_options + if not base_chat_options: + logger.debug("No existing ChatOptions found, creating new one with Foundry tools.") + base_chat_options = ChatOptions(tools=tools) + context.chat_options = base_chat_options + else: + logger.debug("Adding Foundry tools to existing ChatOptions.") + base_tools = base_chat_options.tools or [] + context.chat_options.tools = base_tools + tools + await next(context) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py b/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py deleted file mode 100644 index 8b7142f0862a..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/azure/ai/agentserver/agentframework/tool_client.py +++ /dev/null @@ -1,183 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# mypy: disable-error-code="assignment" -"""Tool client for integrating AzureAIToolClient with Agent Framework.""" - -from typing import TYPE_CHECKING, Any, Dict, List, Optional -from agent_framework import AIFunction -from pydantic import Field, create_model -from azure.ai.agentserver.core.logger import get_logger -if TYPE_CHECKING: - from azure.ai.agentserver.core.client.tools.aio import AzureAIToolClient, FoundryTool - -logger = get_logger() - -# pylint: disable=client-accepts-api-version-keyword,missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -class ToolClient: - """Client that integrates AzureAIToolClient with Agent Framework. - - This class provides methods to list tools from AzureAIToolClient and invoke them - in a format compatible with Agent Framework agents. - - :param tool_client: The AzureAIToolClient instance to use for tool operations. - :type tool_client: ~azure.ai.agentserver.core.client.tools.aio.AzureAIToolClient - - .. admonition:: Example: - - .. code-block:: python - - from azure.ai.agentserver.core.client.tools.aio import AzureAIToolClient - from azure.ai.agentserver.agentframework import ToolClient - from azure.identity.aio import DefaultAzureCredential - - async with DefaultAzureCredential() as credential: - tool_client = AzureAIToolClient( - endpoint="https://", - credential=credential - ) - - client = ToolClient(tool_client) - - # List tools as Agent Framework tool definitions - tools = await client.list_tools() - - # Invoke a tool directly - result = await client.invoke_tool( - tool_name="my_tool", - tool_input={"param": "value"} - ) - - :meta private: - """ - - def __init__(self, tool_client: "AzureAIToolClient") -> None: - """Initialize the ToolClient. - - :param tool_client: The AzureAIToolClient instance to use for tool operations. - :type tool_client: ~azure.ai.agentserver.core.client.tools.aio.AzureAIToolClient - """ - self._tool_client = tool_client - self._aifunction_cache: List[AIFunction] = None - - async def list_tools(self) -> List[AIFunction]: - """List all available tools as Agent Framework tool definitions. - - Retrieves tools from AzureAIToolClient and returns them in a format - compatible with Agent Framework. - - :return: List of tool definitions. - :rtype: List[AIFunction] - :raises ~azure.core.exceptions.HttpResponseError: - Raised for HTTP communication failures. - - .. admonition:: Example: - - .. code-block:: python - - client = ToolClient(tool_client) - tools = await client.list_tools() - """ - # Get tools from AzureAIToolClient - if self._aifunction_cache is not None: - return self._aifunction_cache - - azure_tools = await self._tool_client.list_tools() - self._aifunction_cache = [] - - # Convert to Agent Framework tool definitions - for azure_tool in azure_tools: - ai_function_tool = self._convert_to_agent_framework_tool(azure_tool) - self._aifunction_cache.append(ai_function_tool) - - return self._aifunction_cache - - def _convert_to_agent_framework_tool(self, azure_tool: "FoundryTool") -> AIFunction: - """Convert an AzureAITool to an Agent Framework AI Function - - :param azure_tool: The AzureAITool to convert. - :type azure_tool: ~azure.ai.agentserver.core.client.tools.aio.FoundryTool - :return: An AI Function Tool. - :rtype: AIFunction - """ - # Get the input schema from the tool descriptor - input_schema = azure_tool.input_schema or {} - - # Create a Pydantic model from the input schema - properties = input_schema.get("properties") or {} - required_fields = set(input_schema.get("required") or []) - - # Build field definitions for the Pydantic model - field_definitions: Dict[str, Any] = {} - for field_name, field_info in properties.items(): - field_type = self._json_schema_type_to_python(field_info.get("type", "string")) - field_description = field_info.get("description", "") - is_required = field_name in required_fields - - if is_required: - field_definitions[field_name] = (field_type, Field(description=field_description)) - else: - field_definitions[field_name] = (Optional[field_type], - Field(default=None, description=field_description)) - - # Create the Pydantic model dynamically - input_model = create_model( - f"{azure_tool.name}_input", - **field_definitions - ) - - # Create a wrapper function that calls the Azure tool - async def tool_func(**kwargs: Any) -> Any: - """Dynamically generated function to invoke the Azure AI tool. - - :return: The result from the tool invocation. - :rtype: Any - """ - logger.debug("Invoking tool: %s with input: %s", azure_tool.name, kwargs) - return await azure_tool.ainvoke(kwargs) - - # Create and return the AIFunction - return AIFunction( - name=azure_tool.name, - description=azure_tool.description or "No description available", - func=tool_func, - input_model=input_model - ) - - def _json_schema_type_to_python(self, json_type: str) -> type: - """Convert JSON schema type to Python type. - - :param json_type: The JSON schema type string. - :type json_type: str - :return: The corresponding Python type. - :rtype: type - """ - type_map = { - "string": str, - "number": float, - "integer": int, - "boolean": bool, - "array": list, - "object": dict, - } - return type_map.get(json_type, str) - - async def close(self) -> None: - """Close the tool client and release resources.""" - await self._tool_client.close() - - async def __aenter__(self) -> "ToolClient": - """Async context manager entry. - - :return: The ToolClient instance. - :rtype: ToolClient - """ - return self - - async def __aexit__(self, *exc_details: Any) -> None: - """Async context manager exit. - - :param exc_details: Exception details if an exception occurred. - :type exc_details: Any - """ - await self.close() diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/README.md b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/README.md new file mode 100644 index 000000000000..956fc634eb11 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/README.md @@ -0,0 +1,81 @@ +# Chat Client With Foundry Tools + +This sample demonstrates how to attach `FoundryToolsChatMiddleware` to an Agent Framework chat client so that: + +- Foundry tools configured in your Azure AI Project are converted into Agent Framework `AIFunction` tools. +- The tools are injected automatically for each agent run. + +## What this sample does + +The script creates an Agent Framework agent using: + +- `AzureOpenAIChatClient` for model inference +- `FoundryToolsChatMiddleware` to resolve and inject Foundry tools +- `from_agent_framework(agent).run()` to start an AgentServer-compatible HTTP server + +## Prerequisites + +- Python 3.10+ +- An Azure AI Project endpoint +- A tool connection configured in that project (e.g. an MCP connection) +- Azure credentials available to `DefaultAzureCredential` + +## Setup + +1. Install dependencies: + +```bash +pip install -r requirements.txt +``` + +2. Update `.env` in this folder with your values. At minimum you need: + +```dotenv +AZURE_OPENAI_ENDPOINT=https://.openai.azure.com/ +AZURE_OPENAI_CHAT_DEPLOYMENT_NAME= +OPENAI_API_VERSION= + +AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/ +AZURE_AI_PROJECT_TOOL_CONNECTION_ID= +``` + +Notes: + +- This sample uses `DefaultAzureCredential()`. Make sure you are signed in (e.g. `az login`) or otherwise configured. + +## Run + +```bash +python chat_client_with_foundry_tool.py +``` + +This starts a local Uvicorn server (it will keep running and wait for requests). If it looks "stuck" at startup, it may just be waiting for requests. + +## Key code + +The core pattern used by this sample: + +```python +agent = AzureOpenAIChatClient( + credential=DefaultAzureCredential(), + middleware=FoundryToolsChatMiddleware( + tools=[{"type": "mcp", "project_connection_id": tool_connection_id}], + ), +).create_agent( + name="FoundryToolAgent", + instructions="You are a helpful assistant with access to various tools.", +) + +from_agent_framework(agent).run() +``` + +## Troubleshooting + +- **No tools found**: verify `AZURE_AI_PROJECT_TOOL_CONNECTION_ID` points at an existing tool connection in your project. +- **Auth failures**: confirm `DefaultAzureCredential` can acquire a token (try `az login`). +- **Import errors / weird agent_framework circular import**: ensure you are running the sample from this folder (not from inside the package module directory) so the external `agent_framework` dependency is imported correctly. + +## Learn more + +- Azure AI Agent Service: https://learn.microsoft.com/azure/ai-services/agents/ +- Agent Framework: https://github.com/microsoft/agent-framework diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/chat_client_with_foundry_tool.py b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/chat_client_with_foundry_tool.py new file mode 100644 index 000000000000..cb9c3cd2c9c6 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/chat_client_with_foundry_tool.py @@ -0,0 +1,34 @@ +# Copyright (c) Microsoft. All rights reserved. +"""Example showing how to use an agent factory function with ToolClient. + +This sample demonstrates how to pass a factory function to from_agent_framework +that receives a ToolClient and returns an AgentProtocol. This pattern allows +the agent to be created dynamically with access to tools from Azure AI Tool +Client at runtime. +""" + +import os +from dotenv import load_dotenv +from agent_framework.azure import AzureOpenAIChatClient + +from azure.ai.agentserver.agentframework import from_agent_framework, FoundryToolsChatMiddleware +from azure.identity import DefaultAzureCredential + +load_dotenv() + +def main(): + tool_connection_id = os.getenv("AZURE_AI_PROJECT_TOOL_CONNECTION_ID") + + agent = AzureOpenAIChatClient( + credential=DefaultAzureCredential(), + middleware=FoundryToolsChatMiddleware( + tools=[{"type": "mcp", "project_connection_id": tool_connection_id}] + )).create_agent( + name="FoundryToolAgent", + instructions="You are a helpful assistant with access to various tools.", + ) + + from_agent_framework(agent).run() + +if __name__ == "__main__": + main() diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/requirements.txt b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/requirements.txt similarity index 100% rename from sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/requirements.txt rename to sdk/agentserver/azure-ai-agentserver-agentframework/samples/chat_client_with_foundry_tool/requirements.txt diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/README.md b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/README.md deleted file mode 100644 index 019e388975ff..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/README.md +++ /dev/null @@ -1,113 +0,0 @@ -# Tool Client Example - -This example demonstrates how to use the `ToolClient` with Agent Framework to dynamically access tools from Azure AI Tool Client. - -## Overview - -The `ToolClient` provides a bridge between Azure AI Tool Client and Agent Framework, allowing agents to access tools configured in your Azure AI project. This example shows how to use a factory function pattern to create agents dynamically with access to tools at runtime. - -## Features - -- **Dynamic Tool Access**: Agents can list and invoke tools from Azure AI Tool Client -- **Factory Pattern**: Create fresh agent instances per request to avoid concurrency issues -- **Tool Integration**: Seamlessly integrate Azure AI tools with Agent Framework agents - -## Prerequisites - -- Python 3.10 or later -- Azure AI project with configured tools -- Azure credentials (DefaultAzureCredential) - -## Setup - -1. Install dependencies: -```bash -pip install -r requirements.txt -``` - -2. Configure environment variables in `.env`: -``` -AZURE_AI_PROJECT_ENDPOINT=https://.services.ai.azure.com/api/projects/ -``` - -3. Ensure your Azure AI project has tools configured (e.g., MCP connections) - -## Running the Example - -```bash -python agent_factory_example.py -``` - -## How It Works - -1. **Factory Function**: The example creates a factory function that: - - Receives a `ToolClient` instance - - Lists available tools from Azure AI Tool Client - - Creates an Agent Framework agent with those tools - - Returns the agent instance - -2. **Dynamic Agent Creation**: The factory is called for each request, ensuring: - - Fresh agent instances per request - - Latest tool configurations - - No concurrency issues - -3. **Tool Access**: The agent can use tools like: - - MCP (Model Context Protocol) connections - - Function tools - - Other Azure AI configured tools - -## Key Code Patterns - -### Creating a Factory Function - -```python -async def agent_factory(tool_client: ToolClient): - # List tools from Azure AI - tools = await tool_client.list_tools() - - # Create agent with tools - agent = Agent( - name="MyAgent", - model="gpt-4o", - instructions="You are a helpful assistant.", - tools=tools - ) - return agent -``` - -### Using the Factory - -```python -from azure.ai.agentserver.agentframework import from_agent_framework - -adapter = from_agent_framework( - agent_factory, - credentials=credential, - tools=[{"type": "mcp", "project_connection_id": "my-mcp"}] -) -``` - -## Alternative: Direct Agent Usage - -You can also use a pre-created agent instead of a factory: - -```python -agent = Agent( - name="MyAgent", - model="gpt-4o", - instructions="You are a helpful assistant." -) - -adapter = from_agent_framework(agent, credentials=credential) -``` - -## Troubleshooting - -- **No tools found**: Ensure your Azure AI project has tools configured -- **Authentication errors**: Check your Azure credentials and project endpoint -- **Import errors**: Verify all dependencies are installed - -## Learn More - -- [Azure AI Agent Service Documentation](https://learn.microsoft.com/azure/ai-services/agents/) -- [Agent Framework Documentation](https://github.com/microsoft/agent-framework) diff --git a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/agent_factory_example.py b/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/agent_factory_example.py deleted file mode 100644 index bc4d6bf8806d..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-agentframework/samples/tool_client_example/agent_factory_example.py +++ /dev/null @@ -1,109 +0,0 @@ -# Copyright (c) Microsoft. All rights reserved. -"""Example showing how to use an agent factory function with ToolClient. - -This sample demonstrates how to pass a factory function to from_agent_framework -that receives a ToolClient and returns an AgentProtocol. This pattern allows -the agent to be created dynamically with access to tools from Azure AI Tool -Client at runtime. -""" - -import asyncio -import os -from typing import List -from dotenv import load_dotenv -from agent_framework import AIFunction -from agent_framework.azure import AzureOpenAIChatClient - -from azure.ai.agentserver.agentframework import from_agent_framework -from azure.identity.aio import DefaultAzureCredential - -load_dotenv() - - -def create_agent_factory(): - """Create a factory function that builds an agent with ToolClient. - - This function returns a factory that takes a ToolClient and returns - an AgentProtocol. The agent is created at runtime for every request, - allowing it to access the latest tool configuration dynamically. - """ - - async def agent_factory(tools: List[AIFunction]) -> AzureOpenAIChatClient: - """Factory function that creates an agent using the provided tools. - - :param tools: The list of AIFunction tools available to the agent. - :type tools: List[AIFunction] - :return: An Agent Framework ChatAgent instance. - :rtype: ChatAgent - """ - # List all available tools from the ToolClient - print("Fetching tools from Azure AI Tool Client via factory...") - print(f"Found {len(tools)} tools:") - for tool in tools: - print(f" - tool: {tool.name}, description: {tool.description}") - - if not tools: - print("\nNo tools found!") - print("Make sure your Azure AI project has tools configured.") - raise ValueError("No tools available to create agent") - - # Create the Agent Framework agent with the tools - print("\nCreating Agent Framework agent with tools from factory...") - agent = AzureOpenAIChatClient(credential=DefaultAzureCredential()).create_agent( - name="ToolClientAgent", - instructions="You are a helpful assistant with access to various tools.", - tools=tools, - ) - - print("Agent created successfully!") - return agent - - return agent_factory - - -async def quickstart(): - """Build and return an AgentFrameworkCBAgent using an agent factory function.""" - - # Get configuration from environment - project_endpoint = os.getenv("AZURE_AI_PROJECT_ENDPOINT") - - if not project_endpoint: - raise ValueError( - "AZURE_AI_PROJECT_ENDPOINT environment variable is required. " - "Set it to your Azure AI project endpoint, e.g., " - "https://.services.ai.azure.com/api/projects/" - ) - - # Create Azure credentials - credential = DefaultAzureCredential() - - # Create a factory function that will build the agent at runtime - # The factory will receive a ToolClient when the agent first runs - agent_factory = create_agent_factory() - - tool_connection_id = os.getenv("AZURE_AI_PROJECT_TOOL_CONNECTION_ID") - # Pass the factory function to from_agent_framework instead of a compiled agent - # The agent will be created on every agent run with access to ToolClient - print("Creating Agent Framework adapter with factory function...") - adapter = from_agent_framework( - agent_factory, - credentials=credential, - tools=[{"type": "mcp", "project_connection_id": tool_connection_id}] - ) - - print("Adapter created! Agent will be built on every request.") - return adapter - - -async def main(): # pragma: no cover - sample entrypoint - """Main function to run the agent.""" - adapter = await quickstart() - - if adapter: - print("\nStarting agent server...") - print("The agent factory will be called for every request that arrives.") - await adapter.run_async() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py index 895074d32ae3..88a13741bbac 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/__init__.py @@ -7,8 +7,9 @@ from .logger import configure as config_logging from .server.base import FoundryCBAgent from .server.common.agent_run_context import AgentRunContext +from .server._context import AgentServerContext config_logging() -__all__ = ["FoundryCBAgent", "AgentRunContext"] +__all__ = ["FoundryCBAgent", "AgentRunContext", "AgentServerContext"] __version__ = VERSION diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/__init__.py new file mode 100644 index 000000000000..ccf4062cce31 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/__init__.py @@ -0,0 +1,12 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) + +__all__ = [ + "PackageMetadata", + "set_current_app" +] + +from ._package_metadata import PackageMetadata, set_current_app \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_builder.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_builder.py new file mode 100644 index 000000000000..c09c253ab09f --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_builder.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +class AgentServerBuilder: + pass diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_configuration.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_configuration.py new file mode 100644 index 000000000000..fe05dae18a67 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_configuration.py @@ -0,0 +1,42 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from dataclasses import dataclass, field + +from azure.core.credentials_async import AsyncTokenCredential + + +@dataclass(frozen=True) +class HttpServerConfiguration: + """Resolved configuration for the HTTP server. + + :ivar str host: The host address the server listens on. Defaults to '0.0.0.0'. + :ivar int port: The port number the server listens on. Defaults to 8088. + """ + + host: str = "0.0.0.0" + port: int = 8088 + + +class ToolsConfiguration: + """Resolved configuration for the Tools subsystem. + + :ivar int catalog_cache_ttl: The time-to-live (TTL) for the tool catalog cache in seconds. + Defaults to 600 seconds (10 minutes). + :ivar int catalog_cache_max_size: The maximum size of the tool catalog cache. + Defaults to 1024 entries. + """ + + catalog_cache_ttl: int = 600 + catalog_cache_max_size: int = 1024 + + +@dataclass(frozen=True, kw_only=True) +class AgentServerConfiguration: + """Resolved configuration for the Agent Server application.""" + + agent_name: str = "$default" + project_endpoint: str + credential: AsyncTokenCredential + http: HttpServerConfiguration = field(default_factory=HttpServerConfiguration) + tools: ToolsConfiguration = field(default_factory=ToolsConfiguration) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_options.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_options.py new file mode 100644 index 000000000000..cb4e8bde0bfd --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_options.py @@ -0,0 +1,44 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import Literal, NotRequired, TypedDict, Union + +from azure.core.credentials import TokenCredential +from azure.core.credentials_async import AsyncTokenCredential + + +class AgentServerOptions(TypedDict): + """Configuration options for the Agent Server. + + Attributes: + project_endpoint (str, optional): The endpoint URL for the project. Defaults to current project. + credential (Union[AsyncTokenCredential, TokenCredential], optional): The credential used for authentication. + Defaults to current project's managed identity. + """ + project_endpoint: NotRequired[str] + credential: NotRequired[Union[AsyncTokenCredential, TokenCredential]] + http: NotRequired["HttpServerOptions"] + toos: NotRequired["ToolsOptions"] + + +class HttpServerOptions(TypedDict): + """Configuration options for the HTTP server. + + Attributes: + host (str, optional): The host address the server listens on. + """ + host: NotRequired[Literal["127.0.0.1", "localhost", "0.0.0.0"]] + + +class ToolsOptions(TypedDict): + """Configuration options for the Tools subsystem. + + Attributes: + catalog_cache_ttl (int, optional): The time-to-live (TTL) for the tool catalog cache in seconds. + Defaults to 600 seconds (10 minutes). + catalog_cache_max_size (int, optional): The maximum size of the tool catalog cache. + Defaults to 1024 entries. + """ + catalog_cache_ttl: NotRequired[int] + catalog_cache_max_size: NotRequired[int] + diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_package_metadata.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_package_metadata.py new file mode 100644 index 000000000000..36ff9313a6a2 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/application/_package_metadata.py @@ -0,0 +1,50 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +import platform +from dataclasses import dataclass +from importlib.metadata import Distribution, PackageNotFoundError + + +@dataclass(frozen=True) +class PackageMetadata: + name: str + version: str + python_version: str + platform: str + + @staticmethod + def from_dist(dist_name: str): + try: + ver = Distribution.from_name(dist_name).version + except PackageNotFoundError: + ver = "" + + return PackageMetadata( + name=dist_name, + version=ver, + python_version=platform.python_version(), + platform=platform.platform(), + ) + + def as_user_agent(self, component: str | None = None) -> str: + return (f"{self.name}/{self.version} " + f"Python {self.python_version} " + f"{component} " if component else "" + f"({self.platform})") + + +_default = PackageMetadata.from_dist("azure-ai-agentserver-core") +_app: PackageMetadata = _default + + +def set_current_app(app: PackageMetadata) -> None: + global _app + _app = app + + +def get_current_app() -> PackageMetadata: + global _app + return _app diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/__init__.py deleted file mode 100644 index 3800740fb464..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from ._client import AzureAIToolClient, FoundryTool -from ._exceptions import OAuthConsentRequiredError, MCPToolApprovalRequiredError - -__all__ = [ - "AzureAIToolClient", - "FoundryTool", - "OAuthConsentRequiredError", - "MCPToolApprovalRequiredError", -] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_client.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_client.py deleted file mode 100644 index ee56a4d44a94..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_client.py +++ /dev/null @@ -1,195 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# pylint: disable=protected-access -from typing import Any, List, Mapping, Union -from azure.core import PipelineClient -from azure.core.pipeline import policies -from azure.core.credentials import TokenCredential -from azure.core.tracing.decorator import distributed_trace - -from ._configuration import AzureAIToolClientConfiguration -from .operations._operations import MCPToolsOperations, RemoteToolsOperations -from ._utils._model_base import InvocationPayloadBuilder -from ._model_base import FoundryTool, ToolSource - -class AzureAIToolClient: - """Synchronous client for aggregating tools from Azure AI MCP and Tools APIs. - - This client provides access to tools from both MCP (Model Context Protocol) servers - and Azure AI Tools API endpoints, enabling unified tool discovery and invocation. - - :param str endpoint: - The fully qualified endpoint for the Azure AI Agents service. - Example: "https://.api.azureml.ms" - :param credential: - Credential for authenticating requests to the service. - Use credentials from azure-identity like DefaultAzureCredential. - :type credential: ~azure.core.credentials.TokenCredential - :keyword str agent_name: - Name of the agent to use for tool operations. Default is "$default". - :keyword List[Mapping[str, Any]] tools: - List of tool configurations defining which tools to include. - :keyword Mapping[str, Any] user: - User information for tool invocations (object_id, tenant_id). - :keyword str api_version: - API version to use when communicating with the service. - Default is the latest supported version. - :keyword transport: - Custom transport implementation. Default is RequestsTransport. - :paramtype transport: ~azure.core.pipeline.transport.HttpTransport - - """ - - def __init__( - self, - endpoint: str, - credential: "TokenCredential", - **kwargs: Any, - ) -> None: - """Initialize the synchronous Azure AI Tool Client. - - :param str endpoint: The service endpoint URL. - :param credential: Credentials for authenticating requests. - :type credential: ~azure.core.credentials.TokenCredential - :keyword kwargs: Additional keyword arguments for client configuration. - """ - self._config = AzureAIToolClientConfiguration( - endpoint, - credential, - **kwargs, - ) - - _policies = kwargs.pop("policies", None) - if _policies is None: - _policies = [ - policies.RequestIdPolicy(**kwargs), - self._config.headers_policy, - self._config.user_agent_policy, - self._config.proxy_policy, - policies.ContentDecodePolicy(**kwargs), - self._config.redirect_policy, - self._config.retry_policy, - self._config.authentication_policy, - self._config.custom_hook_policy, - self._config.logging_policy, - policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, - self._config.http_logging_policy, - ] - self._client: PipelineClient = PipelineClient(base_url=endpoint, policies=_policies, **kwargs) - - # Initialize specialized clients with client and config - self._mcp_tools = MCPToolsOperations(client=self._client, config=self._config) - self._remote_tools = RemoteToolsOperations(client=self._client, config=self._config) - - def list_tools(self) -> List[FoundryTool]: - """List all available tools from configured sources. - - Retrieves tools from both MCP servers and Azure AI Tools API endpoints, - returning them as FoundryTool instances ready for invocation. - :return: List of available tools from all configured sources. - :rtype: List[~AzureAITool] - :raises ~exceptions.OAuthConsentRequiredError: - Raised when the service requires user OAuth consent. - :raises ~exceptions.MCPToolApprovalRequiredError: - Raised when tool access requires human approval. - :raises ~azure.core.exceptions.HttpResponseError: - Raised for HTTP communication failures. - - """ - - existing_names: set[str] = set() - - tools: List[FoundryTool] = [] - - # Fetch MCP tools - if ( - self._config.tool_config._named_mcp_tools - and len(self._config.tool_config._named_mcp_tools) > 0 - ): - mcp_tools = self._mcp_tools.list_tools(existing_names) - tools.extend(mcp_tools) - - # Fetch Tools API tools - if ( - self._config.tool_config._remote_tools - and len(self._config.tool_config._remote_tools) > 0 - ): - tools_api_tools = self._remote_tools.resolve_tools(existing_names) - tools.extend(tools_api_tools) - - for tool in tools: - # Capture tool in a closure to avoid shadowing issues - def make_invoker(captured_tool): - return lambda *args, **kwargs: self.invoke_tool(captured_tool, *args, **kwargs) - tool.invoker = make_invoker(tool) - return tools - - @distributed_trace - def invoke_tool( - self, - tool: Union[str, FoundryTool], - *args: Any, - **kwargs: Any, - ) -> Any: - """Invoke a tool by instance, name, or descriptor. - - :param tool: Tool to invoke, specified as an AzureAITool instance, - tool name string, or FoundryTool. - :type tool: Union[str, ~FoundryTool] - :param args: Positional arguments to pass to the tool. - :type args: Any - :return: The result of invoking the tool. - :rtype: Any - """ - descriptor = self._resolve_tool_descriptor(tool) - payload = InvocationPayloadBuilder.build_payload(args, kwargs, configuration={}) - return self._invoke_tool(descriptor, payload, **kwargs) - - def _resolve_tool_descriptor( - self, tool: Union[str, FoundryTool] - ) -> FoundryTool: - """Resolve a tool reference to a descriptor. - - :param tool: Tool to resolve, either a FoundryTool instance or a string name/key. - :type tool: Union[str, FoundryTool] - :return: The resolved FoundryTool descriptor. - :rtype: FoundryTool - """ - if isinstance(tool, FoundryTool): - return tool - if isinstance(tool, str): - # Fetch all tools and find matching descriptor - descriptors = self.list_tools() - for descriptor in descriptors: - if tool in (descriptor.name, descriptor.key): - return descriptor - raise KeyError(f"Unknown tool: {tool}") - raise TypeError("Tool must be an AzureAITool, FoundryTool, or registered name/key") - - def _invoke_tool(self, descriptor: FoundryTool, arguments: Mapping[str, Any], **kwargs: Any) -> Any: - """Invoke a tool descriptor. - - :param descriptor: The tool descriptor to invoke. - :type descriptor: FoundryTool - :param arguments: Arguments to pass to the tool. - :type arguments: Mapping[str, Any] - :return: The result of the tool invocation. - :rtype: Any - """ - if descriptor.source is ToolSource.MCP_TOOLS: - return self._mcp_tools.invoke_tool(descriptor, arguments, **kwargs) - if descriptor.source is ToolSource.REMOTE_TOOLS: - return self._remote_tools.invoke_tool(descriptor, arguments, **kwargs) - raise ValueError(f"Unsupported tool source: {descriptor.source}") - - def close(self) -> None: - self._client.close() - - def __enter__(self) -> "AzureAIToolClient": - self._client.__enter__() - return self - - def __exit__(self, *exc_details: Any) -> None: - self._client.__exit__(*exc_details) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_configuration.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_configuration.py deleted file mode 100644 index 71cbdebec911..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_configuration.py +++ /dev/null @@ -1,85 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from typing import Any, List, Optional, TYPE_CHECKING - -from azure.core.pipeline import policies -from ._utils._model_base import ToolConfigurationParser, UserInfo, ToolDefinition - -if TYPE_CHECKING: - from azure.core.credentials import TokenCredential - -class AzureAIToolClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for Azure AI Tool Client. - - Manages authentication, endpoint configuration, and policy settings for the - Azure AI Tool Client. This class is used internally by the client and should - not typically be instantiated directly. - - :param str endpoint: - Fully qualified endpoint for the Azure AI Agents service. - :param credential: - Azure TokenCredential for authentication. - :type credential: ~azure.core.credentials.TokenCredential - :keyword str api_version: - API version to use. Default is the latest supported version. - :keyword List[str] credential_scopes: - OAuth2 scopes for token requests. Default is ["https://ai.azure.com/.default"]. - :keyword str agent_name: - Name of the agent. Default is "$default". - :keyword List[Mapping[str, Any]] tools: - List of tool configurations. - :keyword Mapping[str, Any] user: - User information for tool invocations. - """ - - def __init__( - self, - endpoint: str, - credential: "TokenCredential", - **kwargs: Any, - ) -> None: - """Initialize the configuration. - - :param str endpoint: The service endpoint URL. - :param credential: Credentials for authenticating requests. - :type credential: ~azure.core.credentials.TokenCredential - :keyword kwargs: Additional configuration options. - """ - api_version: str = kwargs.pop("api_version", "2025-05-15-preview") - - self.endpoint = endpoint - self.credential = credential - self.api_version = api_version - self.credential_scopes = kwargs.pop("credential_scopes", ["https://ai.azure.com/.default"]) - - # Tool configuration - self.agent_name: str = kwargs.pop("agent_name", "$default") - self.tools: Optional[List[ToolDefinition]] = kwargs.pop("tools", None) - self.user: Optional[UserInfo] = kwargs.pop("user", None) - - # Initialize tool configuration parser - self.tool_config = ToolConfigurationParser(self.tools) - - self._configure(**kwargs) - - # Warn about unused kwargs - if kwargs: - import warnings - warnings.warn(f"Unused configuration parameters: {list(kwargs.keys())}", UserWarning) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.BearerTokenCredentialPolicy( - self.credential, *self.credential_scopes, **kwargs - ) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_exceptions.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_exceptions.py deleted file mode 100644 index 41515592d698..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_exceptions.py +++ /dev/null @@ -1,52 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from typing import Any, Mapping, Optional - - -class OAuthConsentRequiredError(RuntimeError): - """Raised when the service requires end-user OAuth consent. - - This exception is raised when a tool or service operation requires explicit - OAuth consent from the end user before the operation can proceed. - - :ivar str message: Human-readable guidance returned by the service. - :ivar str consent_url: Link that the end user must visit to provide consent. - :ivar dict payload: Full response payload from the service. - - :param str message: Human-readable guidance returned by the service. - :param str consent_url: Link that the end user must visit to provide the required consent. - :param dict payload: Full response payload supplied by the service. - """ - - def __init__(self, message: str, consent_url: Optional[str], payload: Mapping[str, Any]): - super().__init__(message) - self.message = message - self.consent_url = consent_url - self.payload = dict(payload) - - -class MCPToolApprovalRequiredError(RuntimeError): - """Raised when an MCP tool invocation needs human approval. - - This exception is raised when an MCP (Model Context Protocol) tool requires - explicit human approval before the invocation can proceed, typically for - security or compliance reasons. - - :ivar str message: Human-readable guidance returned by the service. - :ivar dict approval_arguments: - Arguments that must be approved or amended before continuing. - :ivar dict payload: Full response payload from the service. - - :param str message: Human-readable guidance returned by the service. - :param dict approval_arguments: - Arguments that must be approved or amended before continuing. - :param dict payload: Full response payload supplied by the service. - """ - - def __init__(self, message: str, approval_arguments: Mapping[str, Any], payload: Mapping[str, Any]): - super().__init__(message) - self.message = message - self.approval_arguments = dict(approval_arguments) - self.payload = dict(payload) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_model_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_model_base.py deleted file mode 100644 index 7e20b20edeb0..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_model_base.py +++ /dev/null @@ -1,174 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from enum import Enum -import json - -from typing import Any, Awaitable, Callable, Mapping, Optional -from dataclasses import dataclass -import asyncio # pylint: disable=do-not-import-asyncio -import inspect -from azure.core import CaseInsensitiveEnumMeta - -class ToolSource(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Identifies the origin of a tool. - - Specifies whether a tool comes from an MCP (Model Context Protocol) server - or from the Azure AI Tools API (remote tools). - """ - - MCP_TOOLS = "mcp_tools" - REMOTE_TOOLS = "remote_tools" - -class ToolDefinition: - """Definition of a tool including its parameters. - - :ivar str type: JSON schema type (e.g., "mcp", "a2", other tools). - """ - - def __init__(self, type: str, **kwargs: Any) -> None: - """Initialize ToolDefinition with type and any additional properties. - - :param str type: JSON schema type (e.g., "mcp", "a2", other tools). - :param kwargs: Any additional properties to set on the tool definition. - """ - self.type = type - # Store all additional properties as attributes - for key, value in kwargs.items(): - setattr(self, key, value) - - def __repr__(self) -> str: - """Return a detailed string representation of the ToolDefinition. - - :return: JSON string representation of the ToolDefinition. - :rtype: str - """ - return json.dumps(self.__dict__, default=str) - - def __str__(self) -> str: - """Return a human-readable string representation. - - :return: JSON string representation of the ToolDefinition. - :rtype: str - """ - return json.dumps(self.__dict__, default=str) - -@dataclass -class FoundryTool: - """Lightweight description of a tool that can be invoked. - - Represents metadata and configuration for a single tool, including its - name, description, input schema, and source information. - - :ivar str key: Unique identifier for this tool. - :ivar str name: Display name of the tool. - :ivar str description: Human-readable description of what the tool does. - :ivar ~ToolSource source: - Origin of the tool (MCP_TOOLS or REMOTE_TOOLS). - :ivar dict metadata: Raw metadata from the API response. - :ivar dict input_schema: - JSON schema describing the tool's input parameters, or None. - :ivar ToolDefinition tool_definition: - Optional tool definition object, or None. - """ - - key: str - name: str - description: str - source: ToolSource - metadata: Mapping[str, Any] - input_schema: Optional[Mapping[str, Any]] = None - tool_definition: Optional[ToolDefinition] = None - invoker: Optional[Callable[..., Awaitable[Any]]] = None - - def invoke(self, *args: Any, **kwargs: Any) -> Any: - """Invoke the tool synchronously. - - :param args: Positional arguments to pass to the tool. - :type args: Any - :return: The result from the tool invocation. - :rtype: Any - """ - - if not self.invoker: - raise NotImplementedError("No invoker function defined for this tool.") - if inspect.iscoroutinefunction(self.invoker): - # If the invoker is async, check if we're already in an event loop - try: - asyncio.get_running_loop() - # We're in a running loop, can't use asyncio.run() - raise RuntimeError( - "Cannot call invoke() on an async tool from within an async context. " - "Use 'await tool.ainvoke(...)' or 'await tool(...)' instead." - ) - except RuntimeError as e: - if "no running event loop" in str(e).lower(): - # No running loop, safe to use asyncio.run() - return asyncio.run(self.invoker(*args, **kwargs)) - # Re-raise our custom error - raise - else: - return self.invoker(*args, **kwargs) - - async def ainvoke(self, *args: Any, **kwargs: Any) -> Any: - """Invoke the tool asynchronously. - - :param args: Positional arguments to pass to the tool. - :type args: Any - :return: The result from the tool invocation. - :rtype: Any - """ - - if not self.invoker: - raise NotImplementedError("No invoker function defined for this tool.") - if inspect.iscoroutinefunction(self.invoker): - return await self.invoker(*args, **kwargs) - - result = self.invoker(*args, **kwargs) - # If the result is awaitable (e.g., a coroutine), await it - if inspect.iscoroutine(result) or hasattr(result, '__await__'): - return await result - return result - - def __call__(self, *args: Any, **kwargs: Any) -> Any: - - # Check if the invoker is async - if self.invoker and inspect.iscoroutinefunction(self.invoker): - # Return coroutine for async context - return self.ainvoke(*args, **kwargs) - - # Use sync invoke - return self.invoke(*args, **kwargs) - - -class UserInfo: - """Represents user information. - - :ivar str objectId: User's object identifier. - :ivar str tenantId: Tenant identifier. - """ - - def __init__(self, objectId: str, tenantId: str, **kwargs: Any) -> None: - """Initialize UserInfo with user details. - - :param str objectId: User's object identifier. - :param str tenantId: Tenant identifier. - :param kwargs: Any additional properties to set on the user. - """ - self.objectId = objectId - self.tenantId = tenantId - # Store all additional properties as attributes - for key, value in kwargs.items(): - setattr(self, key, value) - - def to_dict(self) -> dict: - """Convert to dictionary for JSON serialization. - - :return: Dictionary containing objectId and tenantId. - :rtype: dict - """ - return { - "objectId": self.objectId, - "tenantId": self.tenantId - } diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_utils/_model_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_utils/_model_base.py deleted file mode 100644 index e06ef576264e..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/_utils/_model_base.py +++ /dev/null @@ -1,796 +0,0 @@ - -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# mypy: ignore-errors - -from dataclasses import dataclass, asdict, is_dataclass -from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Set, Tuple - -from .._model_base import ToolDefinition, FoundryTool, ToolSource, UserInfo - - - -class ToolDescriptorBuilder: - """Builds FoundryTool objects from raw tool data.""" - - @staticmethod - def build_descriptors( - raw_tools: Iterable[Mapping[str, Any]], - source: ToolSource, - existing_names: Set[str], - ) -> List[FoundryTool]: - """Build tool descriptors from raw tool data. - - Parameters - ---------- - raw_tools : Iterable[Mapping[str, Any]] - Raw tool data from API (can be dicts or dataclass objects) - source : ToolSource - Source of the tools - existing_names : Set[str] - Set of existing tool names to avoid conflicts - - Returns - ------- - List[FoundryTool] - List of built tool descriptors - """ - descriptors: List[FoundryTool] = [] - for raw in raw_tools: - # Convert dataclass objects to dictionaries - if is_dataclass(raw) and not isinstance(raw, type): - raw = asdict(raw) - - name, description = ToolMetadataExtractor.extract_name_description(raw) - if not name: - continue - - key = ToolMetadataExtractor.derive_tool_key(raw, source) - description = description or "" - resolved_name = NameResolver.ensure_unique_name(name, existing_names) - - descriptor = FoundryTool( - key=key, - name=resolved_name, - description=description, - source=source, - metadata=dict(raw), - input_schema=ToolMetadataExtractor.extract_input_schema(raw), - tool_definition= raw.get("tool_definition") - ) - descriptors.append(descriptor) - existing_names.add(resolved_name) - - return descriptors - - -class ToolMetadataExtractor: - """Extracts metadata from raw tool data.""" - - @staticmethod - def extract_name_description(raw: Mapping[str, Any]) -> Tuple[Optional[str], Optional[str]]: - """Extract name and description from raw tool data. - - Parameters - ---------- - raw : Mapping[str, Any] - Raw tool data - - Returns - ------- - Tuple[Optional[str], Optional[str]] - Tuple of (name, description) - """ - name = ( - raw.get("name") - or raw.get("id") - or raw.get("tool_name") - or raw.get("definition", {}).get("name") - or raw.get("tool", {}).get("name") - ) - description = ( - raw.get("description") - or raw.get("long_description") - or raw.get("definition", {}).get("description") - or raw.get("tool", {}).get("description") - ) - return name, description - - @staticmethod - def derive_tool_key(raw: Mapping[str, Any], source: ToolSource) -> str: - """Derive unique key for a tool. - - Parameters - ---------- - raw : Mapping[str, Any] - Raw tool data - source : ToolSource - Source of the tool - - Returns - ------- - str - Unique tool key - """ - for candidate in (raw.get("id"), raw.get("name"), raw.get("tool_name")): - if candidate: - return f"{source.value}:{candidate}" - return f"{source.value}:{id(raw)}" - - @staticmethod - def extract_input_schema(raw: Mapping[str, Any]) -> Optional[Mapping[str, Any]]: - """Extract input schema from raw tool data. - - Parameters - ---------- - raw : Mapping[str, Any] - Raw tool data - - Returns - ------- - Optional[Mapping[str, Any]] - Input schema if found - """ - for key in ("input_schema", "inputSchema", "schema", "parameters"): - if key in raw and isinstance(raw[key], Mapping): - return raw[key] - nested = raw.get("definition") or raw.get("tool") - if isinstance(nested, Mapping): - return ToolMetadataExtractor.extract_input_schema(nested) - return None - - @staticmethod - def extract_metadata_schema(raw: Mapping[str, Any]) -> Optional[Mapping[str, Any]]: - """Extract input schema from raw tool data. - - Parameters - ---------- - raw : Mapping[str, Any] - Raw tool data - - Returns - ------- - Optional[Mapping[str, Any]] - _metadata if found - """ - for key in ("_meta", "metadata", "meta"): - if key in raw and isinstance(raw[key], Mapping): - return raw[key] - return None - - -class NameResolver: - """Resolves tool names to ensure uniqueness.""" - - @staticmethod - def ensure_unique_name(proposed_name: str, existing_names: Set[str]) -> str: - """Ensure a tool name is unique. - - Parameters - ---------- - proposed_name : str - Proposed tool name - existing_names : Set[str] - Set of existing tool names - - Returns - ------- - str - Unique tool name - """ - if proposed_name not in existing_names: - return proposed_name - - suffix = 1 - while True: - candidate = f"{proposed_name}_{suffix}" - if candidate not in existing_names: - return candidate - suffix += 1 - - -class MetadataMapper: - """Maps tool metadata from _meta schema to tool configuration.""" - - # Default key mapping: meta_schema_key -> output_key - # Note: When used with key_overrides, the direction is reversed internally - # to support tool_def_key -> meta_schema_key mapping - DEFAULT_KEY_MAPPING = { - "imagegen_model_deployment_name": "model_deployment_name", - "model_deployment_name": "model", - "deployment_name": "model", - } - - @staticmethod - def extract_metadata_config( - tool_metadata: Mapping[str, Any], - tool_definition: Optional[Mapping[str, Any]] = None, - key_overrides: Optional[Mapping[str, str]] = None, - ) -> Dict[str, Any]: - """Extract metadata configuration from _meta schema and tool definition. - - This method extracts properties defined in the _meta schema and attempts - to find matching values in the tool definition. Key overrides allow mapping - from tool definition property names to _meta schema property names. - - Parameters - ---------- - tool_metadata : Mapping[str, Any] - The _meta schema containing property definitions - tool_definition : Optional[Mapping[str, Any]] - The tool definition containing actual values - key_overrides : Optional[Mapping[str, str]] - Mapping from tool definition keys to _meta schema keys. - Format: {"tool_def_key": "meta_schema_key"} - Example: {"model": "imagegen_model_deployment_name"} - - Returns - ------- - Dict[str, Any] - Dictionary with mapped metadata configuration - - Examples - -------- - >>> meta_schema = { - ... "properties": { - ... "quality": {"type": "string", "default": "auto"}, - ... "model_deployment_name": {"type": "string"} - ... } - ... } - >>> tool_def = {"quality": "high", "model": "gpt-4"} - >>> overrides = {"model": "model_deployment_name"} # tool_def -> meta - >>> MetadataMapper.extract_metadata_config(meta_schema, tool_def, overrides) - {'quality': 'high', 'model_deployment_name': 'gpt-4'} - """ - result: Dict[str, Any] = {} - - # Build reverse mapping: tool_definition_key -> meta_property_name - # Start with default mappings (also reversed) - reverse_default_mapping = {v: k for k, v in MetadataMapper.DEFAULT_KEY_MAPPING.items()} - - # Add user overrides (these are already tool_def -> meta format) - tool_to_meta_mapping = dict(reverse_default_mapping) - if key_overrides: - tool_to_meta_mapping.update(key_overrides) - - # Extract properties from _meta schema - properties = tool_metadata.get("properties", {}) - if not isinstance(properties, Mapping): - return result - - for meta_prop_name, prop_schema in properties.items(): - if not isinstance(prop_schema, Mapping): - continue - - is_required = meta_prop_name in tool_metadata.get("required", []) - - # Try to find value in tool definition - value = None - value_from_definition = False - - if tool_definition: - # First check if tool definition has this exact key - if meta_prop_name in tool_definition: - value = tool_definition[meta_prop_name] - value_from_definition = True - else: - # Check if any tool definition key maps to this meta property - for tool_key, mapped_meta_key in tool_to_meta_mapping.items(): - if mapped_meta_key == meta_prop_name and tool_key in tool_definition: - value = tool_definition[tool_key] - value_from_definition = True - break - - # If no value from definition, check for default (only use if required) - if value is None and is_required and "default" in prop_schema: - value = prop_schema["default"] - - # Only add if: - # 1. Value is from tool definition, OR - # 2. Value is required and has a default - if value is not None and (value_from_definition or is_required): - result[meta_prop_name] = value - - return result - - @staticmethod - def prepare_metadata_dict( - tool_metadata_raw: Mapping[str, Any], - tool_definition: Optional[Mapping[str, Any]] = None, - key_overrides: Optional[Mapping[str, str]] = None, - ) -> Dict[str, Any]: - """Prepare a _meta dictionary from tool metadata and definition. - - This is a convenience method that extracts the _meta schema from - raw tool metadata and maps it to configuration values. - - Parameters - ---------- - tool_metadata_raw : Mapping[str, Any] - Raw tool metadata containing _meta or similar fields - tool_definition : Optional[Mapping[str, Any]] - The tool definition containing actual values - key_overrides : Optional[Mapping[str, str]] - Mapping from tool definition keys to _meta schema keys. - Format: {"tool_def_key": "meta_schema_key"} - - Returns - ------- - Dict[str, Any] - Dictionary with mapped metadata configuration - """ - # Extract _meta schema using existing utility - meta_schema = ToolMetadataExtractor.extract_metadata_schema(tool_metadata_raw) - if not meta_schema: - return {} - - return MetadataMapper.extract_metadata_config( - meta_schema, - tool_definition, - key_overrides - ) - - -class InvocationPayloadBuilder: - """Builds invocation payloads for tool calls.""" - - @staticmethod - def build_payload( - args: Tuple[Any, ...], - kwargs: Dict[str, Any], - configuration: Dict[str, Any], - ) -> Dict[str, Any]: - """Build invocation payload from args and kwargs. - - Parameters - ---------- - args : Tuple[Any, ...] - Positional arguments - kwargs : Dict[str, Any] - Keyword arguments - configuration : Dict[str, Any] - Tool configuration defaults - - Returns - ------- - Dict[str, Any] - Complete invocation payload - """ - user_arguments = InvocationPayloadBuilder._normalize_input(args, kwargs) - merged = dict(configuration) - merged.update(user_arguments) - return merged - - @staticmethod - def _normalize_input( - args: Tuple[Any, ...], - kwargs: Dict[str, Any] - ) -> Dict[str, Any]: - """Normalize invocation input to a dictionary. - - Parameters - ---------- - args : Tuple[Any, ...] - Positional arguments - kwargs : Dict[str, Any] - Keyword arguments - - Returns - ------- - Dict[str, Any] - Normalized input dictionary - - Raises - ------ - ValueError - If mixing positional and keyword arguments or providing multiple positional args - """ - if args and kwargs: - raise ValueError("Mixing positional and keyword arguments is not supported") - - if args: - if len(args) > 1: - raise ValueError("Multiple positional arguments are not supported") - candidate = next(iter(args)) - if candidate is None: - return {} - if isinstance(candidate, Mapping): - return dict(candidate) - return {"input": candidate} - - if kwargs: - return dict(kwargs) - - return {} - - -@dataclass -class ToolProperty: - """Represents a single property/parameter in a tool's schema. - - :ivar str type: JSON schema type (e.g., "string", "object", "array"). - :ivar Optional[str] description: Human-readable description of the property. - :ivar Optional[Mapping[str, Any]] properties: Nested properties for object types. - :ivar Any default: Default value for the property. - :ivar List[str] required: List of required nested properties. - """ - - type: str - description: Optional[str] = None - properties: Optional[Mapping[str, Any]] = None - default: Any = None - required: Optional[List[str]] = None - -@dataclass -class ToolParameters: - """Represents the parameters schema for a tool. - - :ivar str type: JSON schema type, typically "object". - :ivar Mapping[str, ToolProperty] properties: Dictionary of parameter properties. - :ivar List[str] required: List of required parameter names. - """ - - type: str - properties: Mapping[str, ToolProperty] - required: Optional[List[str]] = None - -@dataclass -class ToolManifest: - """Represents a tool manifest with metadata and parameters. - - :ivar str name: Unique name of the tool. - :ivar str description: Detailed description of the tool's functionality. - :ivar ToolParameters parameters: Schema defining the tool's input parameters. - """ - - name: str - description: str - parameters: ToolParameters - -@dataclass -class RemoteServer: - """Represents remote server configuration for a tool. - - :ivar str projectConnectionId: Identifier for the project connection. - :ivar str protocol: Communication protocol (e.g., "mcp"). - """ - - projectConnectionId: str - protocol: str - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for JSON serialization.""" - return { - "projectConnectionId": self.projectConnectionId, - "protocol": self.protocol - } - -@dataclass -class EnrichedToolEntry(ToolManifest): - """Enriched tool representation with input schema. - - :ivar str name: Name of the tool. - :ivar str description: Description of the tool. - """ - remoteServer: RemoteServer - projectConnectionId: str - protocol: str - inputSchema: Optional[Mapping[str, Any]] = None - tool_definition: Optional[ToolDefinition] = None - -@dataclass -class ToolEntry: - """Represents a single tool entry in the API response. - - :ivar RemoteServer remoteServer: Configuration for the remote server. - :ivar List[ToolManifest] manifest: List of tool manifests provided by this entry. - """ - - remoteServer: RemoteServer - manifest: List[ToolManifest] - -@dataclass -class ToolsResponse: - """Root response model for the tools API. - - :ivar List[ToolEntry] tools: List of tool entries from the API. - """ - - tools: List[ToolEntry] - enriched_tools: List[EnrichedToolEntry] - - @classmethod - def from_dict(cls, data: Mapping[str, Any], tool_definitions: List[ToolDefinition]) -> "ToolsResponse": - """Create a ToolsResponse from a dictionary. - - :param Mapping[str, Any] data: Dictionary representation of the API response. - :return: Parsed ToolsResponse instance. - :rtype: ToolsResponse - """ - tool_defintions_map = {f"{td.type.lower()}_{td.project_connection_id.lower()}": td for td in tool_definitions} - - def tool_definition_lookup(remote_server: RemoteServer) -> Optional[ToolDefinition]: - return tool_defintions_map.get(f"{remote_server.protocol.lower()}_{remote_server.projectConnectionId.lower()}") - - - tools = [] - flattend_tools = [] - for tool_data in data.get("tools", []): - remote_server = RemoteServer( - projectConnectionId=tool_data["remoteServer"]["projectConnectionId"], - protocol=tool_data["remoteServer"]["protocol"] - ) - - manifests = [] - for manifest_data in tool_data.get("manifest", []): - params_data = manifest_data.get("parameters", {}) - properties = {} - - for prop_name, prop_data in params_data.get("properties", {}).items(): - properties[prop_name] = ToolProperty( - type=prop_data.get("type"), - description=prop_data.get("description"), - properties=prop_data.get("properties"), - default=prop_data.get("default"), - required=prop_data.get("required") - ) - - parameters = ToolParameters( - type=params_data.get("type", "object"), - properties=properties, - required=params_data.get("required") - ) - manifest = ToolManifest( - name=manifest_data["name"], - description=manifest_data["description"], - parameters=parameters - ) - manifests.append(manifest) - tool_definition = tool_definition_lookup(remote_server) - flattend_tools.append(EnrichedToolEntry( - projectConnectionId=remote_server.projectConnectionId, - protocol=remote_server.protocol, - name=manifest.name, - description=manifest.description, - parameters=parameters, - remoteServer=remote_server, - inputSchema=parameters, - tool_definition=tool_definition - )) - - tools.append(ToolEntry( - remoteServer=remote_server, - manifest=manifests - )) - - return cls(tools=tools, enriched_tools=flattend_tools) - -class ResolveToolsRequest: - """Represents a request containing remote servers and user information. - - :ivar List[RemoteServer] remoteservers: List of remote server configurations. - :ivar UserInfo user: User information. - """ - - def __init__(self, remoteservers: List[RemoteServer], user: UserInfo) -> None: - """Initialize RemoteServersRequest with servers and user info. - - :param List[RemoteServer] remoteservers: List of remote server configurations. - :param UserInfo user: User information. - """ - self.remoteservers = remoteservers - self.user: UserInfo = user - - def to_dict(self) -> Dict[str, Any]: - """Convert to dictionary for JSON serialization.""" - result = { - "remoteservers": [rs.to_dict() for rs in self.remoteservers] - } - if self.user: - # Handle both UserInfo objects and dictionaries - if isinstance(self.user, dict): - # Validate required fields for dict - if self.user.get("objectId") and self.user.get("tenantId"): - result["user"] = { - "objectId": self.user["objectId"], - "tenantId": self.user["tenantId"] - } - elif hasattr(self.user, "objectId") and hasattr(self.user, "tenantId"): - # UserInfo object - if self.user.objectId and self.user.tenantId: - result["user"] = { - "objectId": self.user.objectId, - "tenantId": self.user.tenantId - } - return result - - -class ToolConfigurationParser: - """Parses and processes tool configuration. - - This class handles parsing and categorizing tool configurations into - remote tools (MCP/A2A) and named MCP tools. - - :param List[Mapping[str, Any]] tools_config: - List of tool configurations to parse. Can be None. - """ - - def __init__(self, tools_definitions: Optional[List[Any]] = None): - """Initialize the parser. - - :param tools_definitions: List of tool configurations (can be dicts or ToolDefinition objects), or None. - :type tools_definitions: Optional[List[Any]] - """ - # Convert dictionaries to ToolDefinition objects if needed - self._tools_definitions = [] - for tool_def in (tools_definitions or []): - if isinstance(tool_def, dict): - # Convert dict to ToolDefinition - tool_type = tool_def.get("type") - if tool_type: - self._tools_definitions.append(ToolDefinition(type=tool_type, **{k: v for k, v in tool_def.items() if k != "type"})) - elif isinstance(tool_def, ToolDefinition): - self._tools_definitions.append(tool_def) - - self._remote_tools: List[ToolDefinition] = [] - self._named_mcp_tools: List[ToolDefinition] = [] - self._parse_tools_config() - - def _parse_tools_config(self) -> None: - """Parse tools configuration into categorized lists. - - Separates tool configurations into remote tools (MCP/A2A types) and - named MCP tools based on the 'type' field in each configuration. - """ - for tool_definition in self._tools_definitions: - tool_type = tool_definition.type.lower() - if tool_type in ["mcp", "a2a"]: - self._remote_tools.append(tool_definition) - else: - self._named_mcp_tools.append(tool_definition) - -def to_remote_server(tool_definition: ToolDefinition) -> RemoteServer: - """Convert ToolDefinition to RemoteServer. - - :param ToolDefinition tool_definition: - Tool definition to convert. - :return: Converted RemoteServer instance. - :rtype: RemoteServer - """ - return RemoteServer( - projectConnectionId=tool_definition.project_connection_id, - protocol=tool_definition.type.lower() - ) - - -@dataclass -class MCPToolSchema: - """Represents the input schema for an MCP tool. - - :ivar str type: JSON schema type, typically "object". - :ivar Mapping[str, Any] properties: Dictionary of parameter properties. - :ivar List[str] required: List of required parameter names. - """ - - type: str - properties: Mapping[str, Any] - required: Optional[List[str]] = None - - -@dataclass -class MCPToolMetadata: - """Represents the _meta field for an MCP tool. - - :ivar str type: JSON schema type, typically "object". - :ivar Mapping[str, Any] properties: Dictionary of metadata properties. - :ivar List[str] required: List of required metadata parameter names. - """ - - type: str - properties: Mapping[str, Any] - required: Optional[List[str]] = None - - -@dataclass -class MCPTool: - """Represents a single MCP tool from the tools/list response. - - :ivar str name: Unique name of the tool. - :ivar str title: Display title of the tool. - :ivar str description: Detailed description of the tool's functionality. - :ivar MCPToolSchema inputSchema: Schema defining the tool's input parameters. - :ivar Optional[MCPToolMetadata] _meta: Optional metadata schema for the tool. - """ - - name: str - title: str - description: str - inputSchema: MCPToolSchema - _meta: Optional[MCPToolMetadata] = None - -@dataclass -class EnrichedMCPTool(MCPTool): - """Represents an enriched MCP tool with additional metadata. - - :ivar ToolDefinition tool_definition: Associated tool definition. - """ - tool_definition: Optional[ToolDefinition] = None - -@dataclass -class MCPToolsListResult: - """Represents the result field of an MCP tools/list response. - - :ivar List[MCPTool] tools: List of available MCP tools. - """ - - tools: List[MCPTool] - - -@dataclass -class MCPToolsListResponse: - """Root response model for the MCP tools/list JSON-RPC response. - - :ivar str jsonrpc: JSON-RPC protocol version (e.g., "2.0"). - :ivar int id: Request identifier. - :ivar MCPToolsListResult result: Result containing the list of tools. - """ - - jsonrpc: str - id: int - result: MCPToolsListResult - - @classmethod - def from_dict(cls, data: Mapping[str, Any], tool_definitions: List[ToolDefinition]) -> "MCPToolsListResponse": - """Create an MCPToolsListResponse from a dictionary. - - :param Mapping[str, Any] data: Dictionary representation of the JSON-RPC response. - :return: Parsed MCPToolsListResponse instance. - :rtype: MCPToolsListResponse - """ - result_data = data.get("result", {}) - tools_list = [] - tool_definitions_map = {f"{td.type.lower()}": td for td in tool_definitions} - filter_tools = len(tool_definitions_map) > 0 - for tool_data in result_data.get("tools", []): - - if filter_tools and tool_data["name"].lower() not in tool_definitions_map: - continue - # Parse inputSchema - input_schema_data = tool_data.get("inputSchema", {}) - input_schema = MCPToolSchema( - type=input_schema_data.get("type", "object"), - properties=input_schema_data.get("properties", {}), - required=input_schema_data.get("required") - ) - - # Parse _meta if present - meta = None - meta_data = tool_data.get("_meta") - if meta_data: - meta = MCPToolMetadata( - type=meta_data.get("type", "object"), - properties=meta_data.get("properties", {}), - required=meta_data.get("required") - ) - - # Create MCPTool - mcp_tool = EnrichedMCPTool( - name=tool_data["name"], - title=tool_data.get("title", tool_data["name"]), - description=tool_data.get("description", ""), - inputSchema=input_schema, - _meta=meta, - tool_definition=tool_definitions_map.get(tool_data["name"].lower()) - ) - - tools_list.append(mcp_tool) - - # Create result - result = MCPToolsListResult(tools=tools_list) - - return cls( - jsonrpc=data.get("jsonrpc", "2.0"), - id=data.get("id", 0), - result=result - ) \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/__init__.py deleted file mode 100644 index 047a3b7919e7..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from ._client import AzureAIToolClient, FoundryTool -from .._exceptions import OAuthConsentRequiredError, MCPToolApprovalRequiredError - -__all__ = [ - "AzureAIToolClient", - "FoundryTool", - "OAuthConsentRequiredError", - "MCPToolApprovalRequiredError", -] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_client.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_client.py deleted file mode 100644 index 986e8756e1b6..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_client.py +++ /dev/null @@ -1,207 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# pylint: disable=protected-access,do-not-import-asyncio -from typing import Any, List, Mapping, Union, TYPE_CHECKING -from asyncio import gather -from azure.core import AsyncPipelineClient -from azure.core.pipeline import policies -from azure.core.tracing.decorator_async import distributed_trace_async - -from ._configuration import AzureAIToolClientConfiguration -from .._utils._model_base import InvocationPayloadBuilder -from .._model_base import FoundryTool, ToolSource - -from .operations._operations import MCPToolsOperations, RemoteToolsOperations - -if TYPE_CHECKING: - from azure.core.credentials_async import AsyncTokenCredential - -class AzureAIToolClient: - """Asynchronous client for aggregating tools from Azure AI MCP and Tools APIs. - - This client provides access to tools from both MCP (Model Context Protocol) servers - and Azure AI Tools API endpoints, enabling unified tool discovery and invocation. - - :param str endpoint: - The fully qualified endpoint for the Azure AI Agents service. - Example: "https://.api.azureml.ms" - :param credential: - Credential for authenticating requests to the service. - Use credentials from azure-identity like DefaultAzureCredential. - :type credential: ~azure.core.credentials.TokenCredential - :keyword str agent_name: - Name of the agent to use for tool operations. Default is "$default". - :keyword List[Mapping[str, Any]] tools: - List of tool configurations defining which tools to include. - :keyword Mapping[str, Any] user: - User information for tool invocations (object_id, tenant_id). - :keyword str api_version: - API version to use when communicating with the service. - Default is the latest supported version. - :keyword transport: - Custom transport implementation. Default is RequestsTransport. - :paramtype transport: ~azure.core.pipeline.transport.HttpTransport - - """ - - def __init__( - self, - endpoint: str, - credential: "AsyncTokenCredential", - **kwargs: Any, - ) -> None: - """Initialize the asynchronous Azure AI Tool Client. - - :param str endpoint: The service endpoint URL. - :param credential: Credentials for authenticating requests. - :type credential: ~azure.core.credentials.TokenCredential - :keyword kwargs: Additional keyword arguments for client configuration. - """ - self._config = AzureAIToolClientConfiguration( - endpoint, - credential, - **kwargs, - ) - - _policies = kwargs.pop("policies", None) - if _policies is None: - _policies = [ - policies.RequestIdPolicy(**kwargs), - self._config.headers_policy, - self._config.user_agent_policy, - self._config.proxy_policy, - policies.ContentDecodePolicy(**kwargs), - self._config.redirect_policy, - self._config.retry_policy, - self._config.authentication_policy, - self._config.custom_hook_policy, - self._config.logging_policy, - policies.DistributedTracingPolicy(**kwargs), - policies.SensitiveHeaderCleanupPolicy(**kwargs) if self._config.redirect_policy else None, - self._config.http_logging_policy, - ] - self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=endpoint, policies=_policies, **kwargs) - - # Initialize specialized clients with client and config - self._mcp_tools = MCPToolsOperations(client=self._client, config=self._config) - self._remote_tools = RemoteToolsOperations(client=self._client, config=self._config) - - async def list_tools(self) -> List[FoundryTool]: - """List all available tools from configured sources. - - Retrieves tools from both MCP servers and Azure AI Tools API endpoints, - returning them as AzureAITool instances ready for invocation. - :return: List of available tools from all configured sources. - :rtype: List[~AzureAITool] - :raises ~Tool_Client.exceptions.OAuthConsentRequiredError: - Raised when the service requires user OAuth consent. - :raises ~Tool_Client.exceptions.MCPToolApprovalRequiredError: - Raised when tool access requires human approval. - :raises ~azure.core.exceptions.HttpResponseError: - Raised for HTTP communication failures. - - """ - - existing_names: set[str] = set() - - tools: List[FoundryTool] = [] - - # Fetch MCP tools and Tools API tools in parallel - # Build list of coroutines to gather based on configuration - tasks = [] - if ( - self._config.tool_config._named_mcp_tools - and len(self._config.tool_config._named_mcp_tools) > 0 - ): - tasks.append(self._mcp_tools.list_tools(existing_names)) - if ( - self._config.tool_config._remote_tools - and len(self._config.tool_config._remote_tools) > 0 - ): - tasks.append(self._remote_tools.resolve_tools(existing_names)) - - # Execute all tasks in parallel if any exist - if tasks: - results = await gather(*tasks) - for result in results: - tools.extend(result) - - for tool in tools: - # Capture tool in a closure to avoid shadowing issues - def make_invoker(captured_tool): - async def _invoker(*args, **kwargs): - return await self.invoke_tool(captured_tool, *args, **kwargs) - return _invoker - tool.invoker = make_invoker(tool) - - return tools - - @distributed_trace_async - async def invoke_tool( - self, - tool: Union[str, FoundryTool], - *args: Any, - **kwargs: Any, - ) -> Any: - """Invoke a tool by instance, name, or descriptor. - - :param tool: Tool to invoke, specified as an AzureAITool instance, - tool name string, or FoundryTool. - :type tool: Union[~AzureAITool, str, ~Tool_Client.models.FoundryTool] - :param args: Positional arguments to pass to the tool. - :type args: Any - :return: The result of invoking the tool. - :rtype: Any - """ - descriptor = await self._resolve_tool_descriptor(tool) - payload = InvocationPayloadBuilder.build_payload(args, kwargs, configuration={}) - return await self._invoke_tool(descriptor, payload, **kwargs) - - async def _resolve_tool_descriptor( - self, tool: Union[str, FoundryTool] - ) -> FoundryTool: - """Resolve a tool reference to a descriptor. - - :param tool: Tool to resolve, either a FoundryTool instance or a string name/key. - :type tool: Union[str, FoundryTool] - :return: The resolved FoundryTool descriptor. - :rtype: FoundryTool - """ - if isinstance(tool, FoundryTool): - return tool - if isinstance(tool, str): - # Fetch all tools and find matching descriptor - descriptors = await self.list_tools() - for descriptor in descriptors: - if tool in (descriptor.name, descriptor.key): - return descriptor - raise KeyError(f"Unknown tool: {tool}") - raise TypeError("Tool must be an AsyncAzureAITool, FoundryTool, or registered name/key") - - async def _invoke_tool(self, descriptor: FoundryTool, arguments: Mapping[str, Any], **kwargs: Any) -> Any: #pylint: disable=unused-argument - """Invoke a tool descriptor. - - :param descriptor: The tool descriptor to invoke. - :type descriptor: FoundryTool - :param arguments: Arguments to pass to the tool. - :type arguments: Mapping[str, Any] - :return: The result of the tool invocation. - :rtype: Any - """ - if descriptor.source is ToolSource.MCP_TOOLS: - return await self._mcp_tools.invoke_tool(descriptor, arguments) - if descriptor.source is ToolSource.REMOTE_TOOLS: - return await self._remote_tools.invoke_tool(descriptor, arguments) - raise ValueError(f"Unsupported tool source: {descriptor.source}") - - async def close(self) -> None: - """Close the underlying HTTP pipeline.""" - await self._client.close() - - async def __aenter__(self) -> "AzureAIToolClient": - await self._client.__aenter__() - return self - - async def __aexit__(self, *exc_details: Any) -> None: - await self._client.__aexit__(*exc_details) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_configuration.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_configuration.py deleted file mode 100644 index 4eb5503dee8d..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/_configuration.py +++ /dev/null @@ -1,86 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- - -from typing import Any, Mapping, List, Optional, TYPE_CHECKING - -from azure.core.pipeline import policies - -from .._utils._model_base import ToolConfigurationParser - -if TYPE_CHECKING: - from azure.core.credentials_async import AsyncTokenCredential - -class AzureAIToolClientConfiguration: # pylint: disable=too-many-instance-attributes - """Configuration for Azure AI Tool Client. - - Manages authentication, endpoint configuration, and policy settings for the - Azure AI Tool Client. This class is used internally by the client and should - not typically be instantiated directly. - - :param str endpoint: - Fully qualified endpoint for the Azure AI Agents service. - :param credential: - Azure TokenCredential for authentication. - :type credential: ~azure.core.credentials.TokenCredential - :keyword str api_version: - API version to use. Default is the latest supported version. - :keyword List[str] credential_scopes: - OAuth2 scopes for token requests. Default is ["https://ai.azure.com/.default"]. - :keyword str agent_name: - Name of the agent. Default is "$default". - :keyword List[Mapping[str, Any]] tools: - List of tool configurations. - :keyword Mapping[str, Any] user: - User information for tool invocations. - """ - - def __init__( - self, - endpoint: str, - credential: "AsyncTokenCredential", - **kwargs: Any, - ) -> None: - """Initialize the configuration. - - :param str endpoint: The service endpoint URL. - :param credential: Credentials for authenticating requests. - :type credential: ~azure.core.credentials.TokenCredential - :keyword kwargs: Additional configuration options. - """ - api_version: str = kwargs.pop("api_version", "2025-05-15-preview") - - self.endpoint = endpoint - self.credential = credential - self.api_version = api_version - self.credential_scopes = kwargs.pop("credential_scopes", ["https://ai.azure.com/.default"]) - - # Tool configuration - self.agent_name: str = kwargs.pop("agent_name", "$default") - self.tools: Optional[List[Mapping[str, Any]]] = kwargs.pop("tools", None) - self.user: Optional[Mapping[str, Any]] = kwargs.pop("user", None) - - # Initialize tool configuration parser - self.tool_config = ToolConfigurationParser(self.tools) - - self._configure(**kwargs) - - # Warn about unused kwargs - if kwargs: - import warnings - warnings.warn(f"Unused configuration parameters: {list(kwargs.keys())}", UserWarning) - - def _configure(self, **kwargs: Any) -> None: - self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get("http_logging_policy") or policies.HttpLoggingPolicy(**kwargs) - self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) - self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) - self.authentication_policy = kwargs.get("authentication_policy") - if self.credential and not self.authentication_policy: - self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( - self.credential, *self.credential_scopes, **kwargs - ) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/operations/_operations.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/operations/_operations.py deleted file mode 100644 index 7d1310518519..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/aio/operations/_operations.py +++ /dev/null @@ -1,187 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# mypy: ignore-errors - -import json -from typing import Any, Dict, List, Mapping, MutableMapping - -from azure.core import AsyncPipelineClient -from ..._exceptions import OAuthConsentRequiredError -from .._configuration import AzureAIToolClientConfiguration - -from ...operations._operations import ( - build_remotetools_invoke_tool_request, - build_remotetools_resolve_tools_request, - prepare_remotetools_invoke_tool_request_content, - prepare_remotetools_resolve_tools_request_content, - build_mcptools_list_tools_request, - prepare_mcptools_list_tools_request_content, - build_mcptools_invoke_tool_request, - prepare_mcptools_invoke_tool_request_content, - API_VERSION, - MCP_ENDPOINT_PATH, - TOOL_PROPERTY_OVERRIDES, - DEFAULT_ERROR_MAP, - MCP_HEADERS, - REMOTE_TOOLS_HEADERS, - prepare_request_headers, - prepare_error_map, - handle_response_error, - build_list_tools_request, - process_list_tools_response, - build_invoke_mcp_tool_request, - build_resolve_tools_request, - process_resolve_tools_response, - build_invoke_remote_tool_request, - process_invoke_remote_tool_response, -) -from ..._model_base import FoundryTool, ToolSource, UserInfo - -from ..._utils._model_base import ToolsResponse, ToolDescriptorBuilder, ToolConfigurationParser, ResolveToolsRequest -from ..._utils._model_base import to_remote_server, MCPToolsListResponse, MetadataMapper - -from azure.core.rest import HttpRequest, HttpResponse -from azure.core.pipeline import PipelineResponse - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) - -class MCPToolsOperations: - - def __init__(self, *args, **kwargs) -> None: - """Initialize MCP client. - - Parameters - ---------- - client : AsyncPipelineClient - Azure AsyncPipelineClient for HTTP requests - config : AzureAIToolClientConfiguration - Configuration object - """ - input_args = list(args) - self._client : AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config : AzureAIToolClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - - if self._client is None or self._config is None: - raise ValueError("Both 'client' and 'config' must be provided") - - self._endpoint_path = MCP_ENDPOINT_PATH - self._api_version = API_VERSION - - async def list_tools(self, existing_names: set, **kwargs: Any) -> List[FoundryTool]: - """List MCP tools. - - :return: List of tool descriptors from MCP server. - :rtype: List[FoundryTool] - """ - _request, error_map, remaining_kwargs = build_list_tools_request(self._api_version, kwargs) - - path_format_arguments = {"endpoint": self._config.endpoint} - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - pipeline_response: PipelineResponse = await self._client._pipeline.run(_request, **remaining_kwargs) - response = pipeline_response.http_response - - handle_response_error(response, error_map) - return process_list_tools_response(response, self._config.tool_config._named_mcp_tools, existing_names) - - async def invoke_tool( - self, - tool: FoundryTool, - arguments: Mapping[str, Any], - **kwargs: Any - ) -> Any: - """Invoke an MCP tool. - - :param tool: Tool descriptor for the tool to invoke. - :type tool: FoundryTool - :param arguments: Input arguments for the tool. - :type arguments: Mapping[str, Any] - :return: Result of the tool invocation. - :rtype: Any - """ - _request, error_map = build_invoke_mcp_tool_request(self._api_version, tool, arguments) - - path_format_arguments = {"endpoint": self._config.endpoint} - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - pipeline_response: PipelineResponse = await self._client._pipeline.run(_request, **kwargs) - response = pipeline_response.http_response - - handle_response_error(response, error_map) - return response.json().get("result") - -class RemoteToolsOperations: - def __init__(self, *args, **kwargs) -> None: - """Initialize Tools API client. - - :param client: Azure PipelineClient for HTTP requests. - :type client: ~azure.core.PipelineClient - :param config: Configuration object. - :type config: ~Tool_Client.models.AzureAIToolClientConfiguration - :raises ValueError: If required parameters are not provided. - """ - input_args = list(args) - self._client : AsyncPipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config : AzureAIToolClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - - if self._client is None or self._config is None: - raise ValueError("Both 'client' and 'config' must be provided") - - - # Apply agent name substitution to endpoint paths - self.agent = self._config.agent_name.strip() if self._config.agent_name and self._config.agent_name.strip() else "$default" - self._api_version = API_VERSION - - async def resolve_tools(self, existing_names: set, **kwargs: Any) -> List[FoundryTool]: - """Resolve remote tools from Azure AI Tools API. - - :return: List of tool descriptors from Tools API. - :rtype: List[FoundryTool] - """ - result = build_resolve_tools_request(self.agent, self._api_version, self._config.tool_config, self._config.user, kwargs) - if result[0] is None: - return [] - - _request, error_map, remaining_kwargs = result - - path_format_arguments = {"endpoint": self._config.endpoint} - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - pipeline_response: PipelineResponse = await self._client._pipeline.run(_request, **remaining_kwargs) - response = pipeline_response.http_response - - handle_response_error(response, error_map) - return process_resolve_tools_response(response, self._config.tool_config._remote_tools, existing_names) - - async def invoke_tool( - self, - tool: FoundryTool, - arguments: Mapping[str, Any], - ) -> Any: - """Invoke a remote tool. - - :param tool: Tool descriptor to invoke. - :type tool: FoundryTool - :param arguments: Input arguments for the tool. - :type arguments: Mapping[str, Any] - :return: Result of the tool invocation. - :rtype: Any - """ - _request, error_map = build_invoke_remote_tool_request(self.agent, self._api_version, tool, self._config.user, arguments) - - path_format_arguments = {"endpoint": self._config.endpoint} - _request.url = self._client.format_url(_request.url, **path_format_arguments) - - pipeline_response: PipelineResponse = await self._client._pipeline.run(_request) - response = pipeline_response.http_response - - handle_response_error(response, error_map) - return process_invoke_remote_tool_response(response) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/operations/_operations.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/operations/_operations.py deleted file mode 100644 index 0a84ef2e6409..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/tools/operations/_operations.py +++ /dev/null @@ -1,551 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -# mypy: ignore-errors - -import json -from typing import Any, Dict, List, Mapping, MutableMapping, Tuple, Union -from azure.core import PipelineClient -from .._configuration import AzureAIToolClientConfiguration -from .._model_base import FoundryTool, ToolSource, UserInfo - -from .._utils._model_base import ToolsResponse, ToolDescriptorBuilder, ToolConfigurationParser, ResolveToolsRequest -from .._utils._model_base import to_remote_server, MCPToolsListResponse, MetadataMapper -from azure.core.pipeline import PipelineResponse -from azure.core.rest import HttpRequest, HttpResponse - -from .._exceptions import OAuthConsentRequiredError - -from azure.core.exceptions import ( - ClientAuthenticationError, - HttpResponseError, - ResourceExistsError, - ResourceNotFoundError, - ResourceNotModifiedError, - map_error, -) - - -# Shared constants -API_VERSION = "2025-11-15-preview" -MCP_ENDPOINT_PATH = "/mcp_tools" - -# Tool-specific property key overrides -# Format: {"tool_name": {"tool_def_key": "meta_schema_key"}} -TOOL_PROPERTY_OVERRIDES: Dict[str, Dict[str, str]] = { - "image_generation": { - "model": "imagegen_model_deployment_name" - }, - # Add more tool-specific mappings as needed -} - -# Shared error map -DEFAULT_ERROR_MAP: MutableMapping = { - 401: ClientAuthenticationError, - 404: ResourceNotFoundError, - 409: ResourceExistsError, - 304: ResourceNotModifiedError, -} - -# Shared header configurations -MCP_HEADERS = { - "Content-Type": "application/json", - "Accept": "application/json,text/event-stream", - "Connection": "keep-alive", - "Cache-Control": "no-cache", -} - -REMOTE_TOOLS_HEADERS = { - "Content-Type": "application/json", - "Accept": "application/json", -} - -# Helper functions for request/response processing -def prepare_request_headers(base_headers: Dict[str, str], custom_headers: Mapping[str, str] = None) -> Dict[str, str]: - """Prepare request headers by merging base and custom headers. - - :param base_headers: Base headers to use - :param custom_headers: Custom headers to merge - :return: Merged headers dictionary - """ - headers = base_headers.copy() - if custom_headers: - headers.update(custom_headers) - return headers - -def prepare_error_map(custom_error_map: Mapping[int, Any] = None) -> MutableMapping: - """Prepare error map by merging default and custom error mappings. - - :param custom_error_map: Custom error mappings to merge - :return: Merged error map - """ - error_map = DEFAULT_ERROR_MAP.copy() - if custom_error_map: - error_map.update(custom_error_map) - return error_map - -def format_and_execute_request( - client: PipelineClient, - request: HttpRequest, - endpoint: str, - **kwargs: Any -) -> HttpResponse: - """Format request URL and execute pipeline. - - :param client: Pipeline client - :param request: HTTP request to execute - :param endpoint: Endpoint URL for formatting - :return: HTTP response - """ - path_format_arguments = {"endpoint": endpoint} - request.url = client.format_url(request.url, **path_format_arguments) - pipeline_response: PipelineResponse = client._pipeline.run(request, **kwargs) - return pipeline_response.http_response - -def handle_response_error(response: HttpResponse, error_map: MutableMapping) -> None: - """Handle HTTP response errors. - - :param response: HTTP response to check - :param error_map: Error map for status code mapping - :raises HttpResponseError: If response status is not 200 - """ - if response.status_code not in [200]: - map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response) - -def process_list_tools_response( - response: HttpResponse, - named_mcp_tools: Any, - existing_names: set -) -> List[FoundryTool]: - """Process list_tools response and build descriptors. - - :param response: HTTP response with MCP tools - :param named_mcp_tools: Named MCP tools configuration - :param existing_names: Set of existing tool names - :return: List of tool descriptors - """ - mcp_response = MCPToolsListResponse.from_dict(response.json(), named_mcp_tools) - raw_tools = mcp_response.result.tools - return ToolDescriptorBuilder.build_descriptors( - raw_tools, - ToolSource.MCP_TOOLS, - existing_names, - ) - -def process_resolve_tools_response( - response: HttpResponse, - remote_tools: Any, - existing_names: set -) -> List[FoundryTool]: - """Process resolve_tools response and build descriptors. - - :param response: HTTP response with remote tools - :param remote_tools: Remote tools configuration - :param existing_names: Set of existing tool names - :return: List of tool descriptors - """ - payload = response.json() - response_type = payload.get("type") - result = payload.get("toolResult") - - if response_type == "OAuthConsentRequired": - consent_url = result.get("consentUrl") - message = result.get("message") - if not consent_url: - consent_url = message - raise OAuthConsentRequiredError(message, consent_url=consent_url, payload=payload) - - toolResponse = ToolsResponse.from_dict(payload, remote_tools) - return ToolDescriptorBuilder.build_descriptors( - toolResponse.enriched_tools, - ToolSource.REMOTE_TOOLS, - existing_names, - ) - -def build_list_tools_request( - api_version: str, - kwargs: Dict[str, Any] -) -> Tuple[HttpRequest, MutableMapping, Dict[str, str]]: - """Build request for listing MCP tools. - - :param api_version: API version - :param kwargs: Additional arguments (headers, params, error_map) - :return: Tuple of (request, error_map, params) - """ - error_map = prepare_error_map(kwargs.pop("error_map", None)) - _headers = prepare_request_headers(MCP_HEADERS, kwargs.pop("headers", None)) - _params = kwargs.pop("params", {}) or {} - - _content = prepare_mcptools_list_tools_request_content() - content = json.dumps(_content) - _request = build_mcptools_list_tools_request(api_version=api_version, headers=_headers, params=_params, content=content) - - return _request, error_map, kwargs - -def build_invoke_mcp_tool_request( - api_version: str, - tool: FoundryTool, - arguments: Mapping[str, Any], - **kwargs: Any -) -> Tuple[HttpRequest, MutableMapping]: - """Build request for invoking MCP tool. - - :param api_version: API version - :param tool: Tool descriptor - :param arguments: Tool arguments - :return: Tuple of (request, error_map) - """ - error_map = prepare_error_map() - _headers = prepare_request_headers(MCP_HEADERS) - _params = {} - - _content = prepare_mcptools_invoke_tool_request_content(tool, arguments, TOOL_PROPERTY_OVERRIDES) - - content = json.dumps(_content) - _request = build_mcptools_invoke_tool_request(api_version=api_version, headers=_headers, params=_params, content=content) - - return _request, error_map - -def build_resolve_tools_request( - agent_name: str, - api_version: str, - tool_config: ToolConfigurationParser, - user: UserInfo, - kwargs: Dict[str, Any] -) -> Union[Tuple[HttpRequest, MutableMapping, Dict[str, Any]], Tuple[None, None, None]]: - """Build request for resolving remote tools. - - :param agent_name: Agent name - :param api_version: API version - :param tool_config: Tool configuration - :param user: User info - :param kwargs: Additional arguments - :return: Tuple of (request, error_map, remaining_kwargs) or (None, None, None) - """ - error_map = prepare_error_map(kwargs.pop("error_map", None)) - _headers = prepare_request_headers(REMOTE_TOOLS_HEADERS, kwargs.pop("headers", None)) - _params = kwargs.pop("params", {}) or {} - - _content = prepare_remotetools_resolve_tools_request_content(tool_config, user) - if _content is None: - return None, None, None - - content = json.dumps(_content.to_dict()) - _request = build_remotetools_resolve_tools_request(agent_name, api_version=api_version, headers=_headers, params=_params, content=content) - - return _request, error_map, kwargs - -def build_invoke_remote_tool_request( - agent_name: str, - api_version: str, - tool: FoundryTool, - user: UserInfo, - arguments: Mapping[str, Any] -) -> Tuple[HttpRequest, MutableMapping]: - """Build request for invoking remote tool. - - :param agent_name: Agent name - :param api_version: API version - :param tool: Tool descriptor - :param user: User info - :param arguments: Tool arguments - :return: Tuple of (request, error_map) - """ - error_map = prepare_error_map() - _headers = prepare_request_headers(REMOTE_TOOLS_HEADERS) - _params = {} - - _content = prepare_remotetools_invoke_tool_request_content(tool, user, arguments) - content = json.dumps(_content) - _request = build_remotetools_invoke_tool_request(agent_name, api_version=api_version, headers=_headers, params=_params, content=content) - - return _request, error_map - -def process_invoke_remote_tool_response(response: HttpResponse) -> Any: - """Process remote tool invocation response. - - :param response: HTTP response - :return: Tool result - :raises OAuthConsentRequiredError: If OAuth consent is required - """ - payload = response.json() - response_type = payload.get("type") - result = payload.get("toolResult") - - if response_type == "OAuthConsentRequired": - raise OAuthConsentRequiredError(result.get("message"), consent_url=result.get("consentUrl"), payload=payload) - return result - -class MCPToolsOperations: - - def __init__(self, *args, **kwargs) -> None: - """Initialize MCP client. - - Parameters - ---------- - client : PipelineClient - Azure PipelineClient for HTTP requests - config : AzureAIToolClientConfiguration - Configuration object - """ - input_args = list(args) - self._client : PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config : AzureAIToolClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - - if self._client is None or self._config is None: - raise ValueError("Both 'client' and 'config' must be provided") - - self._endpoint_path = MCP_ENDPOINT_PATH - self._api_version = API_VERSION - - def list_tools(self, existing_names: set, **kwargs: Any) -> List[FoundryTool]: - """List MCP tools. - - :return: List of tool descriptors from MCP server. - :rtype: List[FoundryTool] - """ - _request, error_map, remaining_kwargs = build_list_tools_request(self._api_version, kwargs) - response = format_and_execute_request(self._client, _request, self._config.endpoint, **remaining_kwargs) - handle_response_error(response, error_map) - return process_list_tools_response(response, self._config.tool_config._named_mcp_tools, existing_names) - - def invoke_tool( - self, - tool: FoundryTool, - arguments: Mapping[str, Any], - ) -> Any: - """Invoke an MCP tool. - - :param tool: Tool descriptor for the tool to invoke. - :type tool: FoundryTool - :param arguments: Input arguments for the tool. - :type arguments: Mapping[str, Any] - :return: Result of the tool invocation. - :rtype: Any - """ - _request, error_map = build_invoke_mcp_tool_request(self._api_version, tool, arguments) - response = format_and_execute_request(self._client, _request, self._config.endpoint) - handle_response_error(response, error_map) - return response.json().get("result") - -def prepare_mcptools_list_tools_request_content() -> Any: - return { - "jsonrpc": "2.0", - "id": 1, - "method": "tools/list", - "params": {} - } - -def build_mcptools_list_tools_request( - api_version: str, - headers: Mapping[str, str] = None, - params: Mapping[str, str] = None, - **kwargs: Any - ) -> HttpRequest: - """Build the HTTP request for listing MCP tools. - - :param api_version: API version to use. - :type api_version: str - :param headers: Additional headers for the request. - :type headers: Mapping[str, str], optional - :param params: Query parameters for the request. - :type params: Mapping[str, str], optional - :return: Constructed HttpRequest object. - :rtype: ~azure.core.rest.HttpRequest - """ - _headers = headers or {} - _params = params or {} - _params["api-version"] = api_version - - _url = f"/mcp_tools" - return HttpRequest(method="POST", url=_url, headers=_headers, params=_params, **kwargs) - -def prepare_mcptools_invoke_tool_request_content(tool: FoundryTool, arguments: Mapping[str, Any], tool_overrides: Dict[str, Dict[str, str]]) -> Any: - - params = { - "name": tool.name, - "arguments": dict(arguments), - } - - if tool.tool_definition: - - key_overrides = tool_overrides.get(tool.name, {}) - meta_config = MetadataMapper.prepare_metadata_dict( - tool.metadata, - tool.tool_definition.__dict__ if hasattr(tool.tool_definition, '__dict__') else tool.tool_definition, - key_overrides - ) - if meta_config: - params["_meta"] = meta_config - - payload = { - "jsonrpc": "2.0", - "id": 2, - "method": "tools/call", - "params": params - } - return payload - -def build_mcptools_invoke_tool_request( - api_version: str, - headers: Mapping[str, str] = None, - params: Mapping[str, str] = None, - **kwargs: Any -) -> HttpRequest: - """Build the HTTP request for invoking an MCP tool. - - :param api_version: API version to use. - :type api_version: str - :param headers: Additional headers for the request. - :type headers: Mapping[str, str], optional - :param params: Query parameters for the request. - :type params: Mapping[str, str], optional - :return: Constructed HttpRequest object. - :rtype: ~azure.core.rest.HttpRequest - """ - _headers = headers or {} - _params = params or {} - _params["api-version"] = api_version - - _url = f"/mcp_tools" - return HttpRequest(method="POST", url=_url, headers=_headers, params=_params, **kwargs) - -class RemoteToolsOperations: - def __init__(self, *args, **kwargs) -> None: - """Initialize Tools API client. - - :param client: Azure PipelineClient for HTTP requests. - :type client: ~azure.core.PipelineClient - :param config: Configuration object. - :type config: ~Tool_Client.models.AzureAIToolClientConfiguration - :raises ValueError: If required parameters are not provided. - """ - input_args = list(args) - self._client : PipelineClient = input_args.pop(0) if input_args else kwargs.pop("client") - self._config : AzureAIToolClientConfiguration = input_args.pop(0) if input_args else kwargs.pop("config") - - if self._client is None or self._config is None: - raise ValueError("Both 'client' and 'config' must be provided") - - - # Apply agent name substitution to endpoint paths - self.agent = self._config.agent_name.strip() if self._config.agent_name and self._config.agent_name.strip() else "$default" - self._api_version = API_VERSION - - def resolve_tools(self, existing_names: set, **kwargs: Any) -> List[FoundryTool]: - """Resolve remote tools from Azure AI Tools API. - - :return: List of tool descriptors from Tools API. - :rtype: List[FoundryTool] - """ - result = build_resolve_tools_request(self.agent, self._api_version, self._config.tool_config, self._config.user, kwargs) - if result[0] is None: - return [] - - _request, error_map, remaining_kwargs = result - response = format_and_execute_request(self._client, _request, self._config.endpoint, **remaining_kwargs) - handle_response_error(response, error_map) - return process_resolve_tools_response(response, self._config.tool_config._remote_tools, existing_names) - - def invoke_tool( - self, - tool: FoundryTool, - arguments: Mapping[str, Any], - ) -> Any: - """Invoke a remote tool. - - :param tool: Tool descriptor to invoke. - :type tool: FoundryTool - :param arguments: Input arguments for the tool. - :type arguments: Mapping[str, Any] - :return: Result of the tool invocation. - :rtype: Any - """ - _request, error_map = build_invoke_remote_tool_request(self.agent, self._api_version, tool, self._config.user, arguments) - response = format_and_execute_request(self._client, _request, self._config.endpoint) - handle_response_error(response, error_map) - return process_invoke_remote_tool_response(response) - -def prepare_remotetools_invoke_tool_request_content(tool: FoundryTool, user: UserInfo, arguments: Mapping[str, Any]) -> Any: - payload = { - "toolName": tool.name, - "arguments": dict(arguments), - "remoteServer": to_remote_server(tool.tool_definition).to_dict(), - } - if user: - # Handle both UserInfo objects and dictionaries - if isinstance(user, dict): - if user.get("objectId") and user.get("tenantId"): - payload["user"] = { - "objectId": user["objectId"], - "tenantId": user["tenantId"], - } - elif hasattr(user, "objectId") and hasattr(user, "tenantId"): - if user.objectId and user.tenantId: - payload["user"] = { - "objectId": user.objectId, - "tenantId": user.tenantId, - } - return payload - -def build_remotetools_invoke_tool_request( - agent_name: str, - api_version: str, - headers: Mapping[str, str] = None, - params: Mapping[str, str] = None, - **kwargs: Any - ) -> HttpRequest: - """Build the HTTP request for invoking a remote tool. - - :param api_version: API version to use. - :type api_version: str - :param headers: Additional headers for the request. - :type headers: Mapping[str, str], optional - :param params: Query parameters for the request. - :type params: Mapping[str, str], optional - :return: Constructed HttpRequest object. - :rtype: ~azure.core.rest.HttpRequest - """ - _headers = headers or {} - _params = params or {} - _params["api-version"] = api_version - - _url = f"/agents/{agent_name}/tools/invoke" - return HttpRequest(method="POST", url=_url, headers=_headers, params=_params, **kwargs) - - -def prepare_remotetools_resolve_tools_request_content(tool_config: ToolConfigurationParser, user: UserInfo = None) -> ResolveToolsRequest: - resolve_tools_request: ResolveToolsRequest = None - if tool_config._remote_tools: - remote_servers = [] - for remote_tool in tool_config._remote_tools: - remote_servers.append(to_remote_server(remote_tool)) - resolve_tools_request = ResolveToolsRequest(remote_servers, user=user) - - return resolve_tools_request - -def build_remotetools_resolve_tools_request( - agent_name: str, - api_version: str, - headers: Mapping[str, str] = None, - params: Mapping[str, str] = None, - **kwargs: Any - ) -> HttpRequest: - """Build the HTTP request for resolving remote tools. - - :param api_version: API version to use. - :type api_version: str - :param headers: Additional headers for the request. - :type headers: Mapping[str, str], optional - :param params: Query parameters for the request. - :type params: Mapping[str, str], optional - :return: Constructed HttpRequest object. - :rtype: ~azure.core.rest.HttpRequest - """ - _headers = headers or {} - _params = params or {} - _params["api-version"] = api_version - - _url = f"/agents/{agent_name}/tools/resolve" - return HttpRequest(method="POST", url=_url, headers=_headers, params=_params, **kwargs) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py index 319e02da7e98..e15ccd86f9cc 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/logger.py @@ -43,6 +43,9 @@ def get_dimensions(): def get_project_endpoint(): + project_endpoint = os.environ.get(Constants.AZURE_AI_PROJECT_ENDPOINT) + if project_endpoint: + return project_endpoint project_resource_id = os.environ.get(Constants.AGENT_PROJECT_RESOURCE_ID) if project_resource_id: last_part = project_resource_id.split("/")[-1] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_context.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_context.py new file mode 100644 index 000000000000..f86d1ae0d4ac --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/_context.py @@ -0,0 +1,32 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import AsyncContextManager, ClassVar, Optional + +from azure.ai.agentserver.core.tools import FoundryToolRuntime + + +class AgentServerContext(AsyncContextManager["AgentServerContext"]): + _INSTANCE: ClassVar[Optional["AgentServerContext"]] = None + + def __init__(self, tool_runtime: FoundryToolRuntime): + self._tool_runtime = tool_runtime + + self.__class__._INSTANCE = self + + @classmethod + def get(cls) -> "AgentServerContext": + if cls._INSTANCE is None: + raise ValueError("AgentServerContext has not been initialized.") + return cls._INSTANCE + + @property + def tools(self) -> FoundryToolRuntime: + return self._tool_runtime + + async def __aenter__(self) -> "AgentServerContext": + await self._tool_runtime.__aenter__() + return self + + async def __aexit__(self, exc_type, exc_value, traceback) -> None: + await self._tool_runtime.__aexit__(exc_type, exc_value, traceback) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py index cf85b2fcea07..a5f69664cf66 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/base.py @@ -12,6 +12,8 @@ from typing import Any, AsyncGenerator, Generator, Optional, Union import uvicorn +from azure.core.credentials import TokenCredential +from azure.core.credentials_async import AsyncTokenCredential from opentelemetry import context as otel_context, trace from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator from starlette.applications import Starlette @@ -25,17 +27,19 @@ from azure.identity.aio import DefaultAzureCredential as AsyncDefaultTokenCredential +from ._context import AgentServerContext from ..models import projects as project_models from ..constants import Constants -from ..logger import APPINSIGHT_CONNSTR_ENV_NAME, get_logger, request_context +from ..logger import APPINSIGHT_CONNSTR_ENV_NAME, get_logger, get_project_endpoint, request_context from ..models import ( Response as OpenAIResponse, ResponseStreamEvent, ) from .common.agent_run_context import AgentRunContext -from ..client.tools.aio._client import AzureAIToolClient -from ..client.tools._utils._model_base import ToolDefinition, UserInfo +from ..tools import DefaultFoundryToolRuntime, FoundryTool, FoundryToolClient, FoundryToolRuntime, UserInfo, \ + UserInfoContextMiddleware +from ..utils._credential import AsyncTokenCredentialAdapter logger = get_logger() DEBUG_ERRORS = os.environ.get(Constants.AGENT_DEBUG_ERRORS, "false").lower() == "true" @@ -47,18 +51,15 @@ def __init__(self, app: ASGIApp, agent: Optional['FoundryCBAgent'] = None): self.agent = agent async def dispatch(self, request: Request, call_next): - user_info: Optional[UserInfo] = None if request.url.path in ("/runs", "/responses"): try: - user_info = self.set_user_info_to_context_var(request) self.set_request_id_to_context_var(request) payload = await request.json() except Exception as e: logger.error(f"Invalid JSON payload: {e}") return JSONResponse({"error": f"Invalid JSON payload: {e}"}, status_code=400) try: - agent_tools = self.agent.tools if self.agent else [] - request.state.agent_run_context = AgentRunContext(payload, user_info=user_info, agent_tools=agent_tools) + request.state.agent_run_context = AgentRunContext(payload) self.set_run_context_to_context_var(request.state.agent_run_context) except Exception as e: logger.error(f"Context build failed: {e}.", exc_info=True) @@ -93,37 +94,16 @@ def set_run_context_to_context_var(self, run_context): ctx.update(res) request_context.set(ctx) - def set_user_info_to_context_var(self, request) -> Optional[UserInfo]: - user_info: Optional[UserInfo] = None - try: - object_id_header = request.headers.get("x-aml-oid", None) - tenant_id_header = request.headers.get("x-aml-tid", None) - if not object_id_header and not tenant_id_header: - return None - user_info = UserInfo( - objectId=object_id_header, - tenantId=tenant_id_header - ) - - except Exception as e: - logger.error(f"Failed to parse X-User-Info header: {e}", exc_info=True) - if user_info: - ctx = request_context.get() or {} - for key, value in user_info.to_dict().items(): - if key == "objectId": - continue # skip user objectId - ctx[f"azure.ai.agentserver.user.{key}"] = str(value) - request_context.set(ctx) - return user_info - class FoundryCBAgent: - _cached_tools_endpoint: Optional[str] = None - _cached_agent_name: Optional[str] = None - - def __init__(self, credentials: Optional["AsyncTokenCredential"] = None, **kwargs: Any) -> None: - self.credentials = credentials or AsyncDefaultTokenCredential() - self.tools = kwargs.get("tools", []) + def __init__(self, + credentials: Optional[Union[AsyncTokenCredential, TokenCredential]] = None, + project_endpoint: Optional[str] = None) -> None: + self.credentials = AsyncTokenCredentialAdapter(credentials) if credentials else AsyncDefaultTokenCredential() + project_endpoint = get_project_endpoint() or project_endpoint + if not project_endpoint: + raise ValueError("Project endpoint is required.") + AgentServerContext(DefaultFoundryToolRuntime(project_endpoint, self.credentials)) async def runs_endpoint(request): # Set up tracing context and span @@ -202,6 +182,7 @@ async def readiness_endpoint(request): ] self.app = Starlette(routes=routes) + UserInfoContextMiddleware.install(self.app) self.app.add_middleware( CORSMiddleware, allow_origins=["*"], @@ -424,91 +405,17 @@ def setup_otlp_exporter(self, endpoint, provider): provider.add_span_processor(processor) logger.info(f"Tracing setup with OTLP exporter: {endpoint}") - @staticmethod - def _configure_endpoint() -> tuple[str, Optional[str]]: - """Configure and return the tools endpoint and agent name from environment variables. - - :return: A tuple of (tools_endpoint, agent_name). - :rtype: tuple[str, Optional[str]] - """ - if not FoundryCBAgent._cached_tools_endpoint: - project_endpoint_format: str = "https://{account_name}.services.ai.azure.com/api/projects/{project_name}" - workspace_endpoint = os.getenv(Constants.AZURE_AI_WORKSPACE_ENDPOINT) - tools_endpoint = os.getenv(Constants.AZURE_AI_TOOLS_ENDPOINT) - project_endpoint = os.getenv(Constants.AZURE_AI_PROJECT_ENDPOINT) - - if not tools_endpoint: - # project endpoint corrupted could have been an overridden environment variable - # try to reconstruct tools endpoint from workspace endpoint - # Robustly reconstruct project_endpoint from workspace_endpoint if needed. - - if workspace_endpoint: - # Expected format: - # "https://.api.azureml.ms/subscriptions//resourceGroups// - # providers/Microsoft.MachineLearningServices/workspaces/@@AML" - from urllib.parse import urlparse - parsed_url = urlparse(workspace_endpoint) - path_parts = [p for p in parsed_url.path.split('/') if p] - # Find the 'workspaces' part and extract account_name@project_name@AML - try: - workspaces_idx = path_parts.index("workspaces") - if workspaces_idx + 1 >= len(path_parts): - raise ValueError( - f"Workspace endpoint path does not contain workspace info " - f"after 'workspaces': {workspace_endpoint}" - ) - workspace_info = path_parts[workspaces_idx + 1] - workspace_parts = workspace_info.split('@') - if len(workspace_parts) < 2: - raise ValueError( - f"Workspace info '{workspace_info}' does not contain both account_name " - f"and project_name separated by '@'." - ) - account_name = workspace_parts[0] - project_name = workspace_parts[1] - # Documented expected format for PROJECT_ENDPOINT_FORMAT: - # "https://.api.azureml.ms/api/projects/{project_name}" - project_endpoint = project_endpoint_format.format( - account_name=account_name, project_name=project_name - ) - except (ValueError, IndexError) as e: - raise ValueError( - f"Failed to reconstruct project endpoint from workspace endpoint " - f"'{workspace_endpoint}': {e}" - ) from e - # should never reach here - logger.info("Reconstructed tools endpoint from project endpoint %s", project_endpoint) - tools_endpoint = project_endpoint - - tools_endpoint = project_endpoint - - if not tools_endpoint: - raise ValueError( - "Project endpoint needed for Azure AI tools endpoint is not found. " - ) - FoundryCBAgent._cached_tools_endpoint = tools_endpoint - - agent_name = os.getenv(Constants.AGENT_NAME) - if agent_name is None: - if os.getenv("CONTAINER_APP_NAME"): - raise ValueError( - "Agent name needed for Azure AI hosted agents is not found. " - ) - agent_name = "$default" - FoundryCBAgent._cached_agent_name = agent_name - - return FoundryCBAgent._cached_tools_endpoint, FoundryCBAgent._cached_agent_name - def get_tool_client( - self, tools: Optional[list[ToolDefinition]], user_info: Optional[UserInfo] - ) -> AzureAIToolClient: + self, tools: Optional[list[FoundryTool]], user_info: Optional[UserInfo] + ) -> FoundryToolClient: + # TODO: remove this method logger.debug("Creating AzureAIToolClient with tools: %s", tools) if not self.credentials: raise ValueError("Credentials are required to create Tool Client.") tools_endpoint, agent_name = self._configure_endpoint() - return AzureAIToolClient( + return FoundryToolClient( endpoint=tools_endpoint, credential=self.credentials, tools=tools, diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py index 5289df0b3524..53eb15af3550 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/server/common/agent_run_context.py @@ -7,7 +7,8 @@ from ...models.projects import AgentId, AgentReference, ResponseConversation1 from .id_generator.foundry_id_generator import FoundryIdGenerator from .id_generator.id_generator import IdGenerator -from ...client.tools._model_base import UserInfo +from ...tools import UserInfo + logger = get_logger() @@ -65,12 +66,14 @@ def get_conversation_object(self) -> ResponseConversation1: def get_tools(self) -> list: # request tools take precedence over agent tools + # TODO: remove this method request_tools = self.request.get("tools", []) if not request_tools: return self._agent_tools return request_tools def get_user_info(self) -> UserInfo: + # TODO: remove this method return self._user_info diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/__init__.py new file mode 100644 index 000000000000..f158cd370990 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/__init__.py @@ -0,0 +1,17 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) + +from .client._client import FoundryToolClient +from ._exceptions import * +from .client._models import FoundryConnectedTool, FoundryHostedMcpTool, FoundryTool, FoundryToolProtocol, \ + FoundryToolSource, ResolvedFoundryTool, SchemaDefinition, SchemaProperty, SchemaType, UserInfo +from .runtime._catalog import * +from .runtime._facade import * +from .runtime._invoker import * +from .runtime._resolver import * +from .runtime._runtime import * +from .runtime._starlette import * +from .runtime._user import * \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py new file mode 100644 index 000000000000..b91c1f71c7a3 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/_exceptions.py @@ -0,0 +1,76 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from .client._models import FoundryTool, ResolvedFoundryTool + + +class ToolInvocationError(RuntimeError): + """Raised when a tool invocation fails. + + :ivar ResolvedFoundryTool tool: The tool that failed during invocation. + + :param str message: Human-readable message describing the error. + :param ResolvedFoundryTool tool: The tool that failed during invocation. + + This exception is raised when an error occurs during the invocation of a tool, + providing details about the failure. + """ + + def __init__(self, message: str, tool: ResolvedFoundryTool): + super().__init__(message) + self.tool = tool + + +class OAuthConsentRequiredError(RuntimeError): + """Raised when the service requires end-user OAuth consent. + + This exception is raised when a tool or service operation requires explicit + OAuth consent from the end user before the operation can proceed. + + :ivar str message: Human-readable guidance returned by the service. + :ivar str consent_url: Link that the end user must visit to provide consent. + :ivar str project_connection_id: The project connection ID related to the consent request. + + :param str message: Human-readable guidance returned by the service. + :param str consent_url: Link that the end user must visit to provide the required consent. + :param str project_connection_id: The project connection ID related to the consent request. + """ + + def __init__(self, message: str, consent_url: str, project_connection_id: str): + super().__init__(message) + self.message = message + self.consent_url = consent_url + self.project_connection_id = project_connection_id + + +class UnableToResolveToolInvocationError(RuntimeError): + """Raised when a tool cannot be resolved. + + :ivar str message: Human-readable message describing the error. + :ivar FoundryTool tool: The tool that could not be resolved. + + :param str message: Human-readable message describing the error. + :param FoundryTool tool: The tool that could not be resolved. + + This exception is raised when a tool cannot be found or resolved + from the available tool sources. + """ + + def __init__(self, message: str, tool: FoundryTool): + super().__init__(message) + self.tool = tool + + +class InvalidToolFacadeError(RuntimeError): + """Raised when a tool facade is invalid. + + This exception is raised when a tool facade does not conform + to the expected structure or contains invalid data. + """ + pass + diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/__init__.py similarity index 73% rename from sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/__init__.py rename to sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/__init__.py index fdf8caba9ef5..28077537d94b 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/client/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/__init__.py @@ -2,4 +2,4 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- -__path__ = __import__("pkgutil").extend_path(__path__, __name__) +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py new file mode 100644 index 000000000000..75e6fd38e78a --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_client.py @@ -0,0 +1,134 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from asyncio import gather +from collections import defaultdict +from typing import Any, AsyncContextManager, Collection, DefaultDict, Dict, List, Mapping, Optional + +from azure.core import AsyncPipelineClient +from azure.core.credentials_async import AsyncTokenCredential +from azure.core.tracing.decorator_async import distributed_trace_async + +from ._models import FoundryTool, FoundryToolDetails, FoundryToolSource, ResolvedFoundryTool, UserInfo +from ._configuration import FoundryToolClientConfiguration +from .._exceptions import ToolInvocationError +from .operations._foundry_connected_tools import FoundryConnectedToolsOperations +from .operations._foundry_hosted_mcp_tools import FoundryMcpToolsOperations + + +class FoundryToolClient(AsyncContextManager["FoundryToolClient"]): + """Asynchronous client for aggregating tools from Azure AI MCP and Tools APIs. + + This client provides access to tools from both MCP (Model Context Protocol) servers + and Azure AI Tools API endpoints, enabling unified tool discovery and invocation. + + :param str endpoint: + The fully qualified endpoint for the Azure AI Agents service. + Example: "https://.api.azureml.ms" + :param credential: + Credential for authenticating requests to the service. + Use credentials from azure-identity like DefaultAzureCredential. + :type credential: ~azure.core.credentials.TokenCredential + """ + + def __init__(self, endpoint: str, credential: "AsyncTokenCredential"): + """Initialize the asynchronous Azure AI Tool Client. + + :param endpoint: The service endpoint URL. + :type endpoint: str + :param credential: Credentials for authenticating requests. + :type credential: ~azure.core.credentials.TokenCredential + """ + # noinspection PyTypeChecker + config = FoundryToolClientConfiguration(credential) + self._client: AsyncPipelineClient = AsyncPipelineClient(base_url=endpoint, config=config) + + self._hosted_mcp_tools = FoundryMcpToolsOperations(self._client) + self._connected_tools = FoundryConnectedToolsOperations(self._client) + + @distributed_trace_async + async def list_tools(self, + tools: Collection[FoundryTool], + agent_name, + user: Optional[UserInfo] = None) -> Mapping[FoundryTool, List[FoundryToolDetails]]: + """List all available tools from configured sources. + + Retrieves tools from both MCP servers and Azure AI Tools API endpoints, + returning them as ResolvedFoundryTool instances ready for invocation. + :param tools: Collection of FoundryTool instances to resolve. + :type tools: Collection[~FoundryTool] + :param user: Information about the user requesting the tools. + :type user: Optional[UserInfo] + :param agent_name: Name of the agent requesting the tools. + :type agent_name: str + :return: A mapping of FoundryTool to their corresponding FoundryToolDetails. + :rtype: Mapping[~FoundryTool, List[~FoundryToolDetails]] + :raises ~azure.ai.agentserver.core.tools._exceptions.OAuthConsentRequiredError: + Raised when the service requires user OAuth consent. + :raises ~azure.core.exceptions.HttpResponseError: + Raised for HTTP communication failures. + """ + tools_by_source: DefaultDict[FoundryToolSource, List[FoundryTool]] = defaultdict(list) + for t in tools: + tools_by_source[t.source].append(t) + + tasks = [] + if FoundryToolSource.HOSTED_MCP in tools_by_source: + # noinspection PyTypeChecker + tasks.append(self._hosted_mcp_tools.list_tools(tools_by_source[FoundryToolSource.HOSTED_MCP])) + if FoundryToolSource.CONNECTED in tools_by_source: + # noinspection PyTypeChecker + tasks.append(self._connected_tools.list_tools(tools_by_source[FoundryToolSource.CONNECTED], + user, + agent_name)) + + resolved_tools: Dict[FoundryTool, List[FoundryToolDetails]] = {} + if tasks: + results = await gather(*tasks) + for result in results: + resolved_tools.update(result) + + return resolved_tools + + @distributed_trace_async + async def invoke_tool(self, + tool: ResolvedFoundryTool, + arguments: Dict[str, Any], + agent_name: str, + user: Optional[UserInfo] = None) -> Any: + """Invoke a tool by instance, name, or descriptor. + + :param tool: Tool to invoke, specified as an AzureAITool instance, + tool name string, or FoundryTool. + :type tool: ResolvedFoundryTool + :param arguments: Arguments to pass to the tool. + :type arguments: Dict[str, Any] + :param user: Information about the user invoking the tool. + :type user: Optional[UserInfo] + :param agent_name: Name of the agent invoking the tool. + :type agent_name: str + :return: The result of invoking the tool. + :rtype: Any + :raises ~Tool_Client.exceptions.OAuthConsentRequiredError: + Raised when the service requires user OAuth consent. + :raises ~azure.core.exceptions.HttpResponseError: + Raised for HTTP communication failures. + :raises ~ToolInvocationError: + Raised when the tool invocation fails or source is not supported. + """ + if tool.source is FoundryToolSource.HOSTED_MCP: + return await self._hosted_mcp_tools.invoke_tool(tool, arguments) + if tool.source is FoundryToolSource.CONNECTED: + return await self._connected_tools.invoke_tool(tool, arguments, user, agent_name) + raise ToolInvocationError(f"Unsupported tool source: {tool.source}", tool=tool) + + async def close(self) -> None: + """Close the underlying HTTP pipeline.""" + await self._client.close() + + async def __aenter__(self) -> "FoundryToolClient": + await self._client.__aenter__() + return self + + async def __aexit__(self, *exc_details: Any) -> None: + await self._client.__aexit__(*exc_details) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_configuration.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_configuration.py new file mode 100644 index 000000000000..5c3f19a61d55 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_configuration.py @@ -0,0 +1,35 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from azure.core.configuration import Configuration +from azure.core.credentials_async import AsyncTokenCredential +from azure.core.pipeline import policies + +from ...application._package_metadata import get_current_app + + +class FoundryToolClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for Azure AI Tool Client. + + Manages authentication, endpoint configuration, and policy settings for the + Azure AI Tool Client. This class is used internally by the client and should + not typically be instantiated directly. + + :param credential: + Azure TokenCredential for authentication. + :type credential: ~azure.core.credentials.TokenCredential + """ + + def __init__(self, credential: "AsyncTokenCredential"): + super().__init__() + + self.retry_policy = policies.AsyncRetryPolicy() + self.logging_policy = policies.NetworkTraceLoggingPolicy() + self.request_id_policy = policies.RequestIdPolicy() + self.http_logging_policy = policies.HttpLoggingPolicy() + self.user_agent_policy = policies.UserAgentPolicy( + base_user_agent=get_current_app().as_user_agent("FoundryToolClient")) + self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy( + credential, "https://ai.azure.com/.default" + ) + self.redirect_policy = policies.AsyncRedirectPolicy() diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py new file mode 100644 index 000000000000..959adac9ff70 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/_models.py @@ -0,0 +1,560 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import asyncio +import inspect +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from enum import Enum +from typing import Annotated, Any, Awaitable, Callable, ClassVar, Dict, Iterable, List, Literal, Mapping, Optional, Set, Type, Union + +from azure.core import CaseInsensitiveEnumMeta +from pydantic import AliasChoices, AliasPath, BaseModel, Discriminator, Field, ModelWrapValidatorHandler, Tag, \ + TypeAdapter, model_validator + +from .._exceptions import OAuthConsentRequiredError + + +class FoundryToolSource(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Identifies the origin of a tool. + + Specifies whether a tool comes from an MCP (Model Context Protocol) server + or from the Azure AI Tools API (remote tools). + """ + + HOSTED_MCP = "hosted_mcp" + CONNECTED = "connected" + + +class FoundryToolProtocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Identifies the protocol used by a connected tool.""" + + MCP = "mcp" + A2A = "a2a" + + +@dataclass(frozen=True, kw_only=True) +class FoundryTool(ABC): + """Definition of a foundry tool including its parameters.""" + source: FoundryToolSource = field(init=False) + + @property + @abstractmethod + def id(self) -> str: + """Unique identifier for the tool.""" + raise NotImplementedError + + def __str__(self): + return self.id + + def __eq__(self, other: object) -> bool: + if not isinstance(other, FoundryTool): + return False + return self.id == other.id + + def __hash__(self) -> int: + return hash(self.id) + + +@dataclass(frozen=True, kw_only=True) +class FoundryHostedMcpTool(FoundryTool): + """Foundry MCP tool definition. + + :ivar str name: Name of MCP tool. + :ivar Mapping[str, Any] configuration: Tools configuration. + """ + source: Literal[FoundryToolSource.HOSTED_MCP] = field(init=False, default=FoundryToolSource.HOSTED_MCP) + name: str + configuration: Optional[Mapping[str, Any]] = field(default=None, compare=False, hash=False) + + @property + def id(self) -> str: + """Unique identifier for the tool.""" + return f"{self.source}:{self.name}" + + +@dataclass(frozen=True, kw_only=True) +class FoundryConnectedTool(FoundryTool): + """Foundry connected tool definition. + + :ivar str project_connection_id: connection name of foundry tool. + """ + source: Literal[FoundryToolSource.CONNECTED] = field(init=False, default=FoundryToolSource.CONNECTED) + protocol: str + project_connection_id: str + + @property + def id(self) -> str: + return f"{self.source}:{self.protocol}:{self.project_connection_id}" + + +@dataclass(frozen=True) +class FoundryToolDetails: + """Details about a Foundry tool. + + :ivar str name: Name of the tool. + :ivar str description: Description of the tool. + :ivar SchemaDefinition input_schema: Input schema for the tool parameters. + :ivar Optional[SchemaDefinition] metadata: Optional metadata schema for the tool. + """ + name: str + description: str + input_schema: "SchemaDefinition" + metadata: Optional["SchemaDefinition"] = None + + +@dataclass(frozen=True) +class ResolvedFoundryTool: + """Resolved Foundry tool with definition and details. + + :ivar ToolDefinition definition: + Optional tool definition object, or None. + :ivar FoundryToolDetails details: + Details about the tool, including name, description, and input schema. + """ + + definition: FoundryTool + details: FoundryToolDetails + invoker: Optional[Callable[..., Awaitable[Any]]] = None # TODO: deprecated + + @property + def id(self) -> str: + return f"{self.definition.id}:{self.details.name}" + + @property + def source(self) -> FoundryToolSource: + """Origin of the tool.""" + return self.definition.source + + @property + def name(self) -> str: + """Name of the tool.""" + return self.details.name + + @property + def description(self) -> str: + """Description of the tool.""" + return self.details.description + + @property + def input_schema(self) -> "SchemaDefinition": + """Input schema of the tool.""" + return self.details.input_schema + + @property + def metadata(self) -> Optional["SchemaDefinition"]: + """Metadata schema of the tool, if any.""" + return self.details.metadata + + +@dataclass(frozen=True) +class UserInfo: + """Represents user information. + + :ivar str object_id: User's object identifier. + :ivar str tenant_id: Tenant identifier. + """ + + object_id: str + tenant_id: str + + +class SchemaType(str, Enum): + """ + Enumeration of possible schema types. + + :ivar py_type: The corresponding Python runtime type for this schema type + (e.g., ``SchemaType.STRING.py_type is str``). + """ + + py_type: Type[Any] + """The corresponding Python runtime type for this schema type.""" + + STRING = ("string", str) + """Schema type for string values (maps to ``str``).""" + + NUMBER = ("number", float) + """Schema type for numeric values with decimals (maps to ``float``).""" + + INTEGER = ("integer", int) + """Schema type for integer values (maps to ``int``).""" + + BOOLEAN = ("boolean", bool) + """Schema type for boolean values (maps to ``bool``).""" + + ARRAY = ("array", list) + """Schema type for array values (maps to ``list``).""" + + OBJECT = ("object", dict) + """Schema type for object/dictionary values (maps to ``dict``).""" + + def __new__(cls, value: str, py_type: Type[Any]): + """ + Create an enum member whose value is the schema type string, while also + attaching the mapped Python type. + + :param value: The serialized schema type string (e.g. ``"string"``). + :param py_type: The mapped Python runtime type (e.g. ``str``). + """ + obj = str.__new__(cls, value) + obj._value_ = value + obj.py_type = py_type + return obj + + @classmethod + def from_python_type(cls, t: Type[Any]) -> "SchemaType": + """ + Get the matching :class:`SchemaType` for a given Python runtime type. + + :param t: A Python runtime type (e.g. ``str``, ``int``, ``float``). + :returns: The corresponding :class:`SchemaType`. + :raises ValueError: If ``t`` is not supported by this enumeration. + """ + for member in cls: + if member.py_type is t: + return member + raise ValueError(f"Unsupported python type: {t!r}") + + +class SchemaProperty(BaseModel): + """ + A JSON Schema-like description of a single property (field) or nested schema node. + + This model is intended to be recursively nestable via :attr:`items` (for arrays) + and :attr:`properties` (for objects). + + :ivar type: The schema node type (e.g., ``string``, ``object``, ``array``). + :ivar description: Optional human-readable description of the property. + :ivar items: The item schema for an ``array`` type. Typically set when + :attr:`type` is :data:`~SchemaType.ARRAY`. + :ivar properties: Nested properties for an ``object`` type. Typically set when + :attr:`type` is :data:`~SchemaType.OBJECT`. Keys are property names, values + are their respective schemas. + :ivar default: Optional default value for the property. + :ivar required: For an ``object`` schema node, the set of required property + names within :attr:`properties`. (This mirrors JSON Schema’s ``required`` + keyword; it is *not* “this property is required in a parent object”.) + """ + + type: SchemaType + description: Optional[str] = None + items: Optional["SchemaProperty"] = None + properties: Optional[Mapping[str, "SchemaProperty"]] = None + default: Any = None + required: Optional[Set[str]] = None + + def has_default(self) -> bool: + """ + Check if the property has a default value defined. + + :return: True if a default value is set, False otherwise. + :rtype: bool + """ + return "default" in self.model_fields_set + + +class SchemaDefinition(BaseModel): + """ + A top-level JSON Schema-like definition for an object. + + :ivar type: The schema type of the root. Typically :data:`~SchemaType.OBJECT`. + :ivar properties: Mapping of top-level property names to their schemas. + :ivar required: Set of required top-level property names within + :attr:`properties`. + """ + + type: SchemaType = SchemaType.OBJECT + properties: Mapping[str, SchemaProperty] + required: Optional[Set[str]] = None + + def extract_from(self, + datasource: Mapping[str, Any], + property_alias: Optional[Dict[str, List[str]]] = None) -> Dict[str, Any]: + return self._extract(datasource, self.properties, self.required, property_alias) + + @classmethod + def _extract(cls, + datasource: Mapping[str, Any], + properties: Mapping[str, SchemaProperty], + required: Optional[Set[str]] = None, + property_alias: Optional[Dict[str, List[str]]] = None) -> Dict[str, Any]: + result: Dict[str, Any] = {} + + for property_name, schema in properties.items(): + # Determine the keys to look for in the datasource + keys_to_check = [property_name] + if property_alias and property_name in property_alias: + keys_to_check.extend(property_alias[property_name]) + + # Find the first matching key in the datasource + value_found = False + for key in keys_to_check: + if key in datasource: + value = datasource[key] + value_found = True + break + + if not value_found and schema.has_default(): + value = schema.default + value_found = True + + if not value_found: + # If the property is required but not found, raise an error + if required and property_name in required: + raise KeyError(f"Required property '{property_name}' not found in datasource.") + # If not found and not required, skip to next property + continue + + # Process the value based on its schema type + if schema.type == SchemaType.OBJECT and schema.properties: + if isinstance(value, Mapping): + nested_value = cls._extract( + value, + schema.properties, + schema.required, + property_alias + ) + result[property_name] = nested_value + elif schema.type == SchemaType.ARRAY and schema.items: + if isinstance(value, Iterable): + nested_list = [] + for item in value: + if schema.items.type == SchemaType.OBJECT and schema.items.properties: + if isinstance(item, dict): + nested_item = SchemaDefinition._extract( + item, + schema.items.properties, + schema.items.required, + property_alias + ) + nested_list.append(nested_item) + else: + nested_list.append(item) + result[property_name] = nested_list + else: + result[property_name] = value + + return result + + +class RawFoundryHostedMcpTool(BaseModel): + """Pydantic model for a single MCP tool. + + :ivar str name: Unique name identifier of the tool. + :ivar Optional[str] title: Display title of the tool, defaults to name if not provided. + :ivar str description: Human-readable description of the tool. + :ivar SchemaDefinition input_schema: JSON schema for tool input parameters. + :ivar Optional[SchemaDefinition] meta: Optional metadata for the tool. + """ + + name: str + title: Optional[str] = None + description: str = "" + input_schema: SchemaDefinition = Field( + default_factory=SchemaDefinition, + validation_alias="inputSchema" + ) + meta: Optional[SchemaDefinition] = Field(default=None, validation_alias="_meta") + + def model_post_init(self, __context: Any) -> None: + if self.title is None: + self.title = self.name + + +class RawFoundryHostedMcpTools(BaseModel): + """Pydantic model for the result containing list of tools. + + :ivar List[RawFoundryHostedMcpTool] tools: List of MCP tool definitions. + """ + + tools: List[RawFoundryHostedMcpTool] = Field(default_factory=list) + + +class ListFoundryHostedMcpToolsResponse(BaseModel): + """Pydantic model for the complete MCP tools/list JSON-RPC response. + + :ivar str jsonrpc: JSON-RPC version, defaults to "2.0". + :ivar int id: Request identifier, defaults to 0. + :ivar RawFoundryHostedMcpTools result: Result containing the list of tools. + """ + + jsonrpc: str = "2.0" + id: int = 0 + result: RawFoundryHostedMcpTools = Field( + default_factory=RawFoundryHostedMcpTools + ) + + +class BaseConnectedToolsErrorResult(BaseModel, ABC): + """Base model for connected tools error responses.""" + + @abstractmethod + def as_exception(self) -> Exception: + """Convert the error result to an appropriate exception. + + :return: An exception representing the error. + :rtype: Exception + """ + raise NotImplementedError + + +class OAuthConsentRequiredErrorResult(BaseConnectedToolsErrorResult): + """Model for OAuth consent required error responses. + + :ivar Literal["OAuthConsentRequired"] type: Error type identifier. + :ivar Optional[str] consent_url: URL for user consent, if available. + :ivar Optional[str] message: Human-readable error message. + :ivar Optional[str] project_connection_id: Project connection ID related to the error. + """ + + type: Literal["OAuthConsentRequired"] + consent_url: str = Field( + validation_alias=AliasChoices( + AliasPath("toolResult", "consentUrl"), + AliasPath("toolResult", "message"), + ), + ) + message: str = Field( + validation_alias=AliasPath("toolResult", "message"), + ) + project_connection_id: str = Field( + validation_alias=AliasPath("toolResult", "projectConnectionId"), + ) + + def as_exception(self) -> Exception: + return OAuthConsentRequiredError(self.message, self.consent_url, self.project_connection_id) + + +class RawFoundryConnectedTool(BaseModel): + """Pydantic model for a single connected tool. + + :ivar str name: Name of the tool. + :ivar str description: Description of the tool. + :ivar Optional[SchemaDefinition] input_schema: Input schema for the tool parameters. + """ + name: str + description: str + input_schema: SchemaDefinition = Field( + default=SchemaDefinition, + validation_alias="parameters", + ) + + +class RawFoundryConnectedRemoteServer(BaseModel): + """Pydantic model for a connected remote server. + + :ivar str protocol: Protocol used by the remote server. + :ivar str project_connection_id: Project connection ID of the remote server. + :ivar List[RawFoundryConnectedTool] tools: List of connected tools from this server. + """ + protocol: str = Field( + validation_alias=AliasPath("remoteServer", "protocol"), + ) + project_connection_id: str = Field( + validation_alias=AliasPath("remoteServer", "projectConnectionId"), + ) + tools: List[RawFoundryConnectedTool] = Field( + default_factory=list, + validation_alias="manifest", + ) + + +class ListConnectedToolsResult(BaseModel): + """Pydantic model for the result of listing connected tools. + + :ivar List[ConnectedRemoteServer] servers: List of connected remote servers. + """ + servers: List[RawFoundryConnectedRemoteServer] = Field( + default_factory=list, + validation_alias="tools", + ) + + +class ListFoundryConnectedToolsResponse(BaseModel): + """Pydantic model for the response of listing the connected tools. + + :ivar Optional[ConnectedToolsResult] result: Result containing connected tool servers. + :ivar Optional[BaseConnectedToolsErrorResult] error: Error result, if any. + """ + + result: Optional[ListConnectedToolsResult] = None + error: Optional[BaseConnectedToolsErrorResult] = None + + # noinspection DuplicatedCode + _TYPE_ADAPTER: ClassVar[TypeAdapter] = TypeAdapter( + Annotated[ + Union[ + Annotated[ + Annotated[ + Union[OAuthConsentRequiredErrorResult], + Field(discriminator="type") + ], + Tag("ErrorType") + ], + Annotated[ListConnectedToolsResult, Tag("ResultType")], + ], + Discriminator( + lambda payload: "ErrorType" if isinstance(payload, dict) and "type" in payload else "ResultType" + ), + ]) + + @model_validator(mode="wrap") + @classmethod + def _validator(cls, data: Any, handler: ModelWrapValidatorHandler) -> "ListFoundryConnectedToolsResponse": + parsed = cls._TYPE_ADAPTER.validate_python(data) + normalized = {} + if isinstance(parsed, ListConnectedToolsResult): + normalized["result"] = parsed + elif isinstance(parsed, BaseConnectedToolsErrorResult): + normalized["error"] = parsed + return handler(normalized) + + +class InvokeConnectedToolsResult(BaseModel): + """Pydantic model for the result of invoking a connected tool. + + :ivar Any value: The result value from the tool invocation. + """ + value: Any = Field(validation_alias="toolResult") + + +class InvokeFoundryConnectedToolsResponse(BaseModel): + """Pydantic model for the response of invoking a connected tool. + + :ivar Optional[InvokeConnectedToolsResult] result: Result of the tool invocation. + :ivar Optional[BaseConnectedToolsErrorResult] error: Error result, if any. + """ + result: Optional[InvokeConnectedToolsResult] = None + error: Optional[BaseConnectedToolsErrorResult] = None + + # noinspection DuplicatedCode + _TYPE_ADAPTER: ClassVar[TypeAdapter] = TypeAdapter( + Annotated[ + Union[ + Annotated[ + Annotated[ + Union[OAuthConsentRequiredErrorResult], + Field(discriminator="type") + ], + Tag("ErrorType") + ], + Annotated[InvokeConnectedToolsResult, Tag("ResultType")], + ], + Discriminator( + lambda payload: "ErrorType" if isinstance(payload, dict) and + # handle other error types in the future + payload.get("type") == "OAuthConsentRequired" + else "ResultType" + ), + ]) + + @model_validator(mode="wrap") + @classmethod + def _validator(cls, data: Any, handler: ModelWrapValidatorHandler) -> "InvokeFoundryConnectedToolsResponse": + parsed = cls._TYPE_ADAPTER.validate_python(data) + normalized = {} + if isinstance(parsed, InvokeConnectedToolsResult): + normalized["result"] = parsed + elif isinstance(parsed, BaseConnectedToolsErrorResult): + normalized["error"] = parsed + return handler(normalized) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py new file mode 100644 index 000000000000..5248ab7aa7fa --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_base.py @@ -0,0 +1,73 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +from abc import ABC +import json +from typing import Any, ClassVar, MutableMapping, Type + +from azure.core import AsyncPipelineClient +from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, \ + ResourceNotFoundError, ResourceNotModifiedError, map_error +from azure.core.rest import AsyncHttpResponse, HttpRequest + +ErrorMapping = MutableMapping[int, Type[HttpResponseError]] + + +class BaseOperations(ABC): + DEFAULT_ERROR_MAP: ClassVar[ErrorMapping] = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + + def __init__(self, client: AsyncPipelineClient, error_map: ErrorMapping | None = None) -> None: + self._client = client + self._error_map = self._prepare_error_map(error_map) + + @classmethod + def _prepare_error_map(cls, custom_error_map: ErrorMapping | None = None) -> MutableMapping: + """Prepare error map by merging default and custom error mappings. + + :param custom_error_map: Custom error mappings to merge + :return: Merged error map + """ + error_map = cls.DEFAULT_ERROR_MAP + if custom_error_map: + error_map = dict(cls.DEFAULT_ERROR_MAP) + error_map.update(custom_error_map) + return error_map + + async def _send_request(self, request: HttpRequest, *, stream: bool = False, **kwargs: Any) -> AsyncHttpResponse: + """Send an HTTP request. + + :param request: HTTP request + :param stream: Stream to be used for HTTP requests + :param kwargs: Keyword arguments + + :return: Response object + """ + response: AsyncHttpResponse = await self._client.send_request(request, stream=stream, **kwargs) + self._handle_response_error(response) + return response + + def _handle_response_error(self, response: AsyncHttpResponse) -> None: + """Handle HTTP response errors. + + :param response: HTTP response to check + :raises HttpResponseError: If response status is not 200 + """ + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=self._error_map) + raise HttpResponseError(response=response) + + def _extract_response_json(self, response: AsyncHttpResponse) -> Any: + try: + payload_text = response.text() + payload_json = json.loads(payload_text) if payload_text else {} + except AttributeError as e: + payload_bytes = response.body() + payload_json = json.loads(payload_bytes.decode("utf-8")) if payload_bytes else {} + return payload_json \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py new file mode 100644 index 000000000000..f50abb7ed0cc --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_connected_tools.py @@ -0,0 +1,179 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC +from typing import Any, ClassVar, Dict, List, Mapping, Optional, cast + +from azure.core.pipeline.transport import HttpRequest + +from ._base import BaseOperations +from .._models import FoundryConnectedTool, FoundryToolDetails, FoundryToolSource, InvokeFoundryConnectedToolsResponse, \ + ListFoundryConnectedToolsResponse, ResolvedFoundryTool, UserInfo +from ..._exceptions import ToolInvocationError + + +class BaseFoundryConnectedToolsOperations(BaseOperations, ABC): + """Base operations for Foundry connected tools.""" + + _API_VERSION: ClassVar[str] = "2025-11-15-preview" + + _HEADERS: ClassVar[Dict[str, str]] = { + "Content-Type": "application/json", + "Accept": "application/json", + } + + _QUERY_PARAMS: ClassVar[Dict[str, Any]] = { + "api-version": _API_VERSION + } + + @staticmethod + def _list_tools_path(agent_name: str) -> str: + return f"/agents/{agent_name}/tools/resolve" + + @staticmethod + def _invoke_tool_path(agent_name: str) -> str: + return f"/agents/{agent_name}/tools/invoke" + + def _build_list_tools_request( + self, + tools: List[FoundryConnectedTool], + user: Optional[UserInfo], + agent_name: str,) -> HttpRequest: + payload: Dict[str, Any] = { + "remoteServers": [ + { + "projectConnectionId": tool.project_connection_id, + "protocol": tool.protocol, + } for tool in tools + ], + } + if user: + payload["user"] = { + "objectId": user.object_id, + "tenantId": user.tenant_id, + } + return self._client.post( + self._list_tools_path(agent_name), + params=self._QUERY_PARAMS, + headers=self._HEADERS, + content=payload) + + @classmethod + def _convert_listed_tools( + cls, + resp: ListFoundryConnectedToolsResponse, + input_tools: List[FoundryConnectedTool]) -> Mapping[FoundryConnectedTool, List[FoundryToolDetails]]: + if resp.error: + raise resp.error.as_exception() + if not resp.result: + return {} + + tool_map = {(tool.project_connection_id, tool.protocol): tool for tool in input_tools} + result: Dict[FoundryConnectedTool, List[FoundryToolDetails]] = {} + for server in resp.result.servers: + input_tool = tool_map.get((server.project_connection_id, server.protocol)) + if not input_tool: + continue + + for tool in server.tools: + details = FoundryToolDetails( + name=tool.name, + description=tool.description, + input_schema=tool.input_schema, + ) + result.setdefault(input_tool, []).append(details) + + return result + + def _build_invoke_tool_request( + self, + tool: ResolvedFoundryTool, + arguments: Dict[str, Any], + user: Optional[UserInfo], + agent_name: str) -> HttpRequest: + if tool.definition.source != FoundryToolSource.CONNECTED: + raise ToolInvocationError(f"Tool {tool.name} is not a Foundry connected tool.", tool=tool) + + tool_def = cast(FoundryConnectedTool, tool.definition) + payload: Dict[str, Any] = { + "toolName": tool.name, + "arguments": arguments, + "remoteServer": { + "projectConnectionId": tool_def.project_connection_id, + "protocol": tool_def.protocol, + }, + } + if user: + payload["user"] = { + "objectId": user.object_id, + "tenantId": user.tenant_id, + } + return self._client.post( + self._invoke_tool_path(agent_name), + params=self._QUERY_PARAMS, + headers=self._HEADERS, + content=payload) + + @classmethod + def _convert_invoke_result(cls, resp: InvokeFoundryConnectedToolsResponse) -> Any: + if resp.error: + raise resp.error.as_exception() + if not resp.result: + return None + return resp.result.value + + +class FoundryConnectedToolsOperations(BaseFoundryConnectedToolsOperations): + """Operations for managing Foundry connected tools.""" + + async def list_tools(self, + tools: List[FoundryConnectedTool], + user: Optional[UserInfo], + agent_name: str) -> Mapping[FoundryConnectedTool, List[FoundryToolDetails]]: + """List connected tools. + + :param tools: List of connected tool definitions. + :type tools: List[FoundryConnectedTool] + :param user: User information for the request. Value can be None if running in local. + :type user: Optional[UserInfo] + :param agent_name: Name of the agent. + :type agent_name: str + :return: Details of connected tools. + :rtype: Mapping[FoundryConnectedTool, List[FoundryToolDetails]] + """ + if not tools: + return {} + request = self._build_list_tools_request(tools, user, agent_name) + response = await self._send_request(request) + async with response: + json_response = self._extract_response_json(response) + tools_response = ListFoundryConnectedToolsResponse.model_validate(json_response) + return self._convert_listed_tools(tools_response, tools) + + + async def invoke_tool( + self, + tool: ResolvedFoundryTool, + arguments: Dict[str, Any], + user: Optional[UserInfo], + agent_name: str) -> Any: + """Invoke a connected tool. + + :param tool: Tool descriptor to invoke. + :type tool: ResolvedFoundryTool + :param arguments: Input arguments for the tool. + :type arguments: Mapping[str, Any] + :param user: User information for the request. Value can be None if running in local. + :type user: Optional[UserInfo] + :param agent_name: Name of the agent. + :type agent_name: str + :return: Result of the tool invocation. + :rtype: Any + """ + request = self._build_invoke_tool_request(tool, arguments, user, agent_name) + response = await self._send_request(request) + async with response: + json_response = self._extract_response_json(response) + invoke_response = InvokeFoundryConnectedToolsResponse.model_validate(json_response) + return self._convert_invoke_result(invoke_response) + \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py new file mode 100644 index 000000000000..471e18bf18ee --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/client/operations/_foundry_hosted_mcp_tools.py @@ -0,0 +1,171 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC +from typing import Any, ClassVar, Dict, List, Mapping, TYPE_CHECKING, cast + +from azure.core.rest import HttpRequest + +from azure.ai.agentserver.core.tools._exceptions import ToolInvocationError +from azure.ai.agentserver.core.tools.client._models import ( + FoundryHostedMcpTool, + FoundryToolSource, + ResolvedFoundryTool, + FoundryToolDetails, + ListFoundryHostedMcpToolsResponse, +) +from azure.ai.agentserver.core.tools.client.operations._base import BaseOperations + + +class BaseFoundryHostedMcpToolsOperations(BaseOperations, ABC): + """Base operations for Foundry-hosted MCP tools.""" + + _PATH: ClassVar[str] = "/mcp_tools" + + _API_VERSION: ClassVar[str] = "2025-11-15-preview" + + _HEADERS: ClassVar[Dict[str, str]] = { + "Content-Type": "application/json", + "Accept": "application/json,text/event-stream", + "Connection": "keep-alive", + "Cache-Control": "no-cache", + } + + _QUERY_PARAMS: ClassVar[Dict[str, Any]] = { + "api-version": _API_VERSION + } + + _LIST_TOOLS_REQUEST_BODY: ClassVar[Dict[str, Any]] = { + "jsonrpc": "2.0", + "id": 1, + "method": "tools/list", + "params": {} + } + + _INVOKE_TOOL_REQUEST_BODY_TEMPLATE: ClassVar[Dict[str, Any]] = { + "jsonrpc": "2.0", + "id": 2, + "method": "tools/call", + } + + # Tool-specific property key overrides + # Format: {"tool_name": {"tool_def_key": "meta_schema_key"}} + _TOOL_PROPERTY_ALIAS: ClassVar[Dict[str, Dict[str, List[str]]]] = { + "_default": { + "imagegen_model_deployment_name": ["model_deployment_name"], + "model_deployment_name": ["model"], + "deployment_name": ["model"], + }, + "image_generation": { + "imagegen_model_deployment_name": ["model"] + }, + # Add more tool-specific mappings as needed + } + + def _build_list_tools_request(self) -> HttpRequest: + """Build request for listing MCP tools. + + :return: Request for listing MCP tools. + """ + return self._client.post(self._PATH, + params=self._QUERY_PARAMS, + headers=self._HEADERS, + content=self._LIST_TOOLS_REQUEST_BODY) + + @staticmethod + def _convert_listed_tools( + response: ListFoundryHostedMcpToolsResponse, + allowed_tools: List[FoundryHostedMcpTool]) -> Mapping[FoundryHostedMcpTool, List[FoundryToolDetails]]: + + allowlist = {tool.name: tool for tool in allowed_tools} + result = {} + for tool in response.result.tools: + definition = allowlist.get(tool.name) + if not definition: + continue + details = FoundryToolDetails( + name=tool.name, + description=tool.description, + metadata=tool.meta, + input_schema=tool.input_schema) + result[definition] = [details] + + return result + + def _build_invoke_tool_request(self, tool: ResolvedFoundryTool, arguments: Dict[str, Any]) -> HttpRequest: + if tool.definition.source != FoundryToolSource.HOSTED_MCP: + raise ToolInvocationError(f"Tool {tool.name} is not a Foundry-hosted MCP tool.", tool=tool) + definition = cast(FoundryHostedMcpTool, tool.definition) if TYPE_CHECKING else tool.definition + + payload = dict(self._INVOKE_TOOL_REQUEST_BODY_TEMPLATE) + payload["params"] = { + "name": tool.name, + "arguments": arguments + } + if tool.metadata and definition.configuration: + payload["_meta"] = tool.metadata.extract_from(definition.configuration, + self._resolve_property_alias(tool.name)) + + return self._client.post(self._PATH, + params=self._QUERY_PARAMS, + headers=self._HEADERS, + content=payload) + + @classmethod + def _resolve_property_alias(cls, tool_name: str) -> Dict[str, List[str]]: + """Get property key overrides for a specific tool. + + :param tool_name: Name of the tool. + :type tool_name: str + :return: Property key overrides. + :rtype: Dict[str, List[str]] + """ + overrides = dict(cls._TOOL_PROPERTY_ALIAS.get("_default", {})) + tool_specific = cls._TOOL_PROPERTY_ALIAS.get(tool_name, {}) + overrides.update(tool_specific) + return overrides + + +class FoundryMcpToolsOperations(BaseFoundryHostedMcpToolsOperations): + """Operations for Foundry-hosted MCP tools.""" + + async def list_tools( + self, + allowed_tools: List[FoundryHostedMcpTool]) -> Mapping[FoundryHostedMcpTool, List[FoundryToolDetails]]: + """List MCP tools. + + :param allowed_tools: List of allowed MCP tools to filter. + :type allowed_tools: List[FoundryHostedMcpTool] + :return: Details of MCP tools. + :rtype: Mapping[FoundryHostedMcpTool, List[FoundryToolDetails]] + """ + if not allowed_tools: + return {} + + request = self._build_list_tools_request() + response = await self._send_request(request) + async with response: + json_response = self._extract_response_json(response) + tools_response = ListFoundryHostedMcpToolsResponse.model_validate(json_response) + return self._convert_listed_tools(tools_response, allowed_tools) + + async def invoke_tool( + self, + tool: ResolvedFoundryTool, + arguments: Dict[str, Any], + ) -> Any: + """Invoke an MCP tool. + + :param tool: Tool descriptor for the tool to invoke. + :type tool: ResolvedFoundryTool + :param arguments: Input arguments for the tool. + :type arguments: Dict[str, Any] + :return: Result of the tool invocation. + :rtype: Any + """ + request = self._build_invoke_tool_request(tool, arguments) + response = await self._send_request(request) + async with response: + json_response = self._extract_response_json(response) + invoke_response = json_response + return invoke_response diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/__init__.py new file mode 100644 index 000000000000..28077537d94b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py new file mode 100644 index 000000000000..d50e5f2f9d7e --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_catalog.py @@ -0,0 +1,130 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import asyncio +import threading +from abc import ABC, abstractmethod +from concurrent.futures import Future +from typing import Any, Awaitable, Collection, Dict, List, Mapping, MutableMapping, Optional, Tuple + +from cachetools import TTLCache + +from ._facade import FoundryToolLike, ensure_foundry_tool +from ._user import UserProvider +from ..client._client import FoundryToolClient +from ..client._models import FoundryTool, FoundryToolDetails, FoundryToolSource, ResolvedFoundryTool, UserInfo + + +class FoundryToolCatalog(ABC): + """Base class for Foundry tool catalogs.""" + def __init__(self, user_provider: UserProvider): + self._user_provider = user_provider + + async def get(self, tool: FoundryToolLike) -> Optional[ResolvedFoundryTool]: + """Gets a Foundry tool by its definition. + + :param tool: The Foundry tool to resolve. + :type tool: FoundryToolLike + :return: The resolved Foundry tool. + :rtype: Optional[ResolvedFoundryTool] + """ + tools = await self.list([tool]) + return tools[0] if tools else None + + @abstractmethod + async def list(self, tools: List[FoundryToolLike]) -> List[ResolvedFoundryTool]: + """Lists all available Foundry tools. + + :param tools: The list of Foundry tools to resolve. + :type tools: List[FoundryToolLike] + :return: A list of resolved Foundry tools. + :rtype: List[ResolvedFoundryTool] + """ + raise NotImplementedError + + +class CachedFoundryToolCatalog(FoundryToolCatalog, ABC): + """Cached implementation of FoundryToolCatalog with concurrency-safe caching.""" + + def __init__(self, user_provider: UserProvider): + super().__init__(user_provider) + self._cache: MutableMapping[Any, Awaitable[List[FoundryToolDetails]]] = self._create_cache() + + def _create_cache(self) -> MutableMapping[Any, Awaitable[List[FoundryToolDetails]]]: + return TTLCache(maxsize=1024, ttl=600) + + def _get_key(self, user: Optional[UserInfo], tool: FoundryTool) -> Any: + if tool.source is FoundryToolSource.HOSTED_MCP: + return tool + return user, tool + + async def list(self, tools: List[FoundryToolLike]) -> List[ResolvedFoundryTool]: + user = await self._user_provider.get_user() + foundry_tools = [ensure_foundry_tool(tool) for tool in tools] + + # for tools that are not being listed, create a batch task, convert to per-tool resolving tasks, and cache them + tools_to_fetch = {k: tool for tool in foundry_tools if (k := self._get_key(user, tool)) not in self._cache} + if tools_to_fetch: + # Awaitable[Mapping[FoundryTool, List[FoundryToolDetails]]] + fetched_tools = asyncio.create_task(self._fetch_tools(tools_to_fetch.values(), user)) + + for k, tool in tools_to_fetch.items(): + # safe to write cache since it's the only runner in this event loop + self._cache[k] = asyncio.create_task(self._as_resolving_task(tool, fetched_tools)) + + # now we have every tool associated with a task + resolving_tasks = { + tool: self._cache[self._get_key(user, tool)] + for tool in foundry_tools + } + + try: + await asyncio.gather(*resolving_tasks.values()) + except: + # exception can only be caused by fetching tasks, remove them from cache + for k, tool in tools_to_fetch.items(): + if k in self._cache: + del self._cache[k] + raise + + resolved_tools = [] + for tool, task in resolving_tasks.items(): + # this acts like a lock - every task of the same tool waits for the same underlying fetch + details_list = await task + for details in details_list: + resolved_tools.append( + ResolvedFoundryTool( + definition=tool, + details=details + ) + ) + + return resolved_tools + + @staticmethod + async def _as_resolving_task( + tool: FoundryTool, + fetching: Awaitable[Mapping[FoundryTool, List[FoundryToolDetails]]] + ) -> List[FoundryToolDetails]: + details = await fetching + return details.get(tool, []) + + @abstractmethod + async def _fetch_tools(self, + tools: Collection[FoundryTool], + user: Optional[UserInfo]) -> Mapping[FoundryTool, List[FoundryToolDetails]]: + raise NotImplementedError + + +class DefaultFoundryToolCatalog(CachedFoundryToolCatalog): + """Default implementation of FoundryToolCatalog.""" + + def __init__(self, client: FoundryToolClient, user_provider: UserProvider, agent_name: str): + super().__init__(user_provider) + self._client = client + self._agent_name = agent_name + + async def _fetch_tools(self, + tools: Collection[FoundryTool], + user: Optional[UserInfo]) -> Mapping[FoundryTool, List[FoundryToolDetails]]: + return await self._client.list_tools(tools, self._agent_name, user) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_facade.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_facade.py new file mode 100644 index 000000000000..ebaca87cf1a7 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_facade.py @@ -0,0 +1,49 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import Any, Dict, Union + +from .. import FoundryConnectedTool, FoundryHostedMcpTool +from .._exceptions import InvalidToolFacadeError +from ..client._models import FoundryTool, FoundryToolProtocol + +# FoundryToolFacade: a “tool descriptor” bag. +# +# Reserved keys: +# Required: +# - "type": str Discriminator, e.g. "mcp" | "a2a" | "code_interpreter" | ... +# Optional: +# - "project_connection_id": str Project connection id of Foundry connected tools, required with "type" is "mcp" or a2a. +# +# Custom keys: +# - Allowed, but MUST NOT shadow reserved keys. +FoundryToolFacade = Dict[str, Any] + +FoundryToolLike = Union[FoundryToolFacade, FoundryTool] + + +def ensure_foundry_tool(tool: FoundryToolLike) -> FoundryTool: + """Ensure the input is a FoundryTool instance. + + :param tool: The tool descriptor, either as a FoundryToolFacade or FoundryTool. + :type tool: FoundryToolLike + :return: The corresponding FoundryTool instance. + :rtype: FoundryTool + """ + if isinstance(tool, FoundryTool): + return tool + + tool = tool.copy() + tool_type = tool.pop("type", None) + if not isinstance(tool_type, str) or not tool_type: + raise InvalidToolFacadeError("FoundryToolFacade must have a valid 'type' field of type str.") + + try: + protocol = FoundryToolProtocol(tool_type) + project_connection_id = tool.pop("project_connection_id", None) + if not isinstance(project_connection_id, str) or not project_connection_id: + raise InvalidToolFacadeError(f"project_connection_id is required for tool protocol {protocol}.") + + return FoundryConnectedTool(protocol=protocol, project_connection_id=project_connection_id) + except: + return FoundryHostedMcpTool(name=tool_type, configuration=tool) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_invoker.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_invoker.py new file mode 100644 index 000000000000..d24c79dd4d12 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_invoker.py @@ -0,0 +1,69 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC, abstractmethod +from typing import Any, Dict + +from ._user import UserProvider +from ..client._client import FoundryToolClient +from ..client._models import ResolvedFoundryTool + + +class FoundryToolInvoker(ABC): + """Abstract base class for Foundry tool invokers.""" + + @property + @abstractmethod + def resolved_tool(self) -> ResolvedFoundryTool: + """Get the resolved tool definition. + + :return: The tool definition. + :rtype: ResolvedFoundryTool + """ + raise NotImplementedError + + @abstractmethod + async def invoke(self, arguments: Dict[str, Any]) -> Any: + """Invoke the tool with the given arguments. + + :param arguments: The arguments to pass to the tool. + :type arguments: Dict[str, Any] + :return: The result of the tool invocation + :rtype: Any + """ + raise NotImplementedError + + +class DefaultFoundryToolInvoker(FoundryToolInvoker): + """Default implementation of FoundryToolInvoker.""" + + def __init__(self, + resolved_tool: ResolvedFoundryTool, + client: FoundryToolClient, + user_provider: UserProvider, + agent_name: str): + self._resolved_tool = resolved_tool + self._client = client + self._user_provider = user_provider + self._agent_name = agent_name + + @property + def resolved_tool(self) -> ResolvedFoundryTool: + """Get the resolved tool definition. + + :return: The tool definition. + :rtype: ResolvedFoundryTool + """ + return self._resolved_tool + + async def invoke(self, arguments: Dict[str, Any]) -> Any: + """Invoke the tool with the given arguments. + + :param arguments: The arguments to pass to the tool + :type arguments: Dict[str, Any] + :return: The result of the tool invocation + :rtype: Any + """ + user = await self._user_provider.get_user() + result = await self._client.invoke_tool(self._resolved_tool, arguments, self._agent_name, user) + return result diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_resolver.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_resolver.py new file mode 100644 index 000000000000..2764558b06bb --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_resolver.py @@ -0,0 +1,57 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from abc import ABC, abstractmethod +from typing import Awaitable, Union, overload + +from ._catalog import FoundryToolCatalog +from ._facade import FoundryToolLike, ensure_foundry_tool +from ._invoker import DefaultFoundryToolInvoker, FoundryToolInvoker +from ._user import UserProvider +from .. import FoundryToolClient +from .._exceptions import UnableToResolveToolInvocationError +from ..client._models import ResolvedFoundryTool + + +class FoundryToolInvocationResolver(ABC): + """Resolver for Foundry tool invocations.""" + + @abstractmethod + async def resolve(self, tool: Union[FoundryToolLike, ResolvedFoundryTool]) -> FoundryToolInvoker: + """Resolves a Foundry tool invocation. + + :param tool: The Foundry tool to resolve. + :type tool: Union[FoundryToolLike, ResolvedFoundryTool] + :return: The resolved Foundry tool invoker. + :rtype: FoundryToolInvoker + """ + raise NotImplementedError + + +class DefaultFoundryToolInvocationResolver(FoundryToolInvocationResolver): + """Default implementation of FoundryToolInvocationResolver.""" + + def __init__(self, + catalog: FoundryToolCatalog, + client: FoundryToolClient, + user_provider: UserProvider, + agent_name: str): + self._catalog = catalog + self._client = client + self._user_provider = user_provider + self._agent_name = agent_name + + async def resolve(self, tool: Union[FoundryToolLike, ResolvedFoundryTool]) -> FoundryToolInvoker: + """Resolves a Foundry tool invocation. + + :param tool: The Foundry tool to resolve. + :type tool: Union[FoundryToolLike, ResolvedFoundryTool] + :return: The resolved Foundry tool invoker. + :rtype: FoundryToolInvoker + """ + resolved_tool = (tool + if isinstance(tool, ResolvedFoundryTool) + else await self._catalog.get(ensure_foundry_tool(tool))) + if not resolved_tool: + raise UnableToResolveToolInvocationError(f"Unable to resolve tool {tool} from catalog", tool) + return DefaultFoundryToolInvoker(resolved_tool, self._client, self._user_provider, self._agent_name) \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_runtime.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_runtime.py new file mode 100644 index 000000000000..8ff723a6f7dc --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_runtime.py @@ -0,0 +1,87 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import os +from typing import Any, AsyncContextManager, Dict, Optional, Union + +from azure.core.credentials_async import AsyncTokenCredential + +from ._catalog import DefaultFoundryToolCatalog, FoundryToolCatalog +from ._facade import FoundryToolLike +from ._resolver import DefaultFoundryToolInvocationResolver, FoundryToolInvocationResolver +from ._user import ContextVarUserProvider, UserProvider +from ..client._models import ResolvedFoundryTool +from ..client._client import FoundryToolClient +from ...constants import Constants + + +class FoundryToolRuntime(AsyncContextManager["FoundryToolRuntime"]): + """Base class for Foundry tool runtimes.""" + + @property + def catalog(self) -> FoundryToolCatalog: + """The tool catalog. + + :return: The tool catalog. + :rtype: FoundryToolCatalog + """ + raise NotImplementedError + + @property + def invocation(self) -> FoundryToolInvocationResolver: + """The tool invocation resolver. + + :return: The tool invocation resolver. + :rtype: FoundryToolInvocationResolver + """ + raise NotImplementedError + + async def invoke(self, tool: Union[FoundryToolLike, ResolvedFoundryTool], arguments: Dict[str, Any]) -> Any: + """Invoke a tool with the given arguments. + + :param tool: The tool to invoke. + :type tool: Union[FoundryToolLike, ResolvedFoundryTool] + :param arguments: The arguments to pass to the tool. + :type arguments: Dict[str, Any] + :return: The result of the tool invocation. + :rtype: Any + """ + invoker = await self.invocation.resolve(tool) + return await invoker.invoke(arguments) + + +class DefaultFoundryToolRuntime(FoundryToolRuntime): + """Default implementation of FoundryToolRuntime.""" + + def __init__(self, + project_endpoint: str, + credential: "AsyncTokenCredential", + user_provider: Optional[UserProvider] = None): + # Do we need introduce DI here? + self._user_provider = user_provider or ContextVarUserProvider() + self._agent_name = os.getenv(Constants.AGENT_NAME, "$default") + self._client = FoundryToolClient(endpoint=project_endpoint, credential=credential) + self._catalog = DefaultFoundryToolCatalog(client=self._client, + user_provider=self._user_provider, + agent_name=self._agent_name) + self._invocation = DefaultFoundryToolInvocationResolver(catalog=self._catalog, + client=self._client, + user_provider=self._user_provider, + agent_name=self._agent_name) + + @property + def catalog(self) -> FoundryToolCatalog: + """The tool catalog.""" + return self._catalog + + @property + def invocation(self) -> FoundryToolInvocationResolver: + """The tool invocation resolver.""" + return self._invocation + + async def __aenter__(self) -> "DefaultFoundryToolRuntime": + await self._client.__aenter__() + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self._client.__aexit__(exc_type, exc_value, traceback) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py new file mode 100644 index 000000000000..17b25095a953 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_starlette.py @@ -0,0 +1,65 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from contextvars import ContextVar +from typing import Awaitable, Callable, Optional + +from starlette.applications import Starlette +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request +from starlette.types import ASGIApp + +from ._user import ContextVarUserProvider, resolve_user_from_headers +from ..client._models import UserInfo + +_UserContextType = ContextVar[Optional[UserInfo]] +_ResolverType = Callable[[Request], Awaitable[Optional[UserInfo]]] + +class UserInfoContextMiddleware(BaseHTTPMiddleware): + """Middleware to set user information in a context variable for each request.""" + + def __init__(self, app: ASGIApp, user_info_var: _UserContextType, user_resolver: _ResolverType): + super().__init__(app) + self._user_info_var = user_info_var + self._user_resolver = user_resolver + + @classmethod + def install(cls, + app: Starlette, + user_context: Optional[_UserContextType] = None, + user_resolver: Optional[_ResolverType] = None): + """Install the middleware into a Starlette application. + + :param app: The Starlette application to install the middleware into. + :type app: Starlette + :param user_context: Optional context variable to use for storing user info. + If not provided, a default context variable will be used. + :type user_context: Optional[ContextVar[Optional[UserInfo]]] + :param user_resolver: Optional function to resolve user info from the request. + If not provided, a default resolver will be used. + :type user_resolver: Optional[Callable[[Request], Awaitable[Optional[UserInfo]]]] + """ + app.add_middleware(UserInfoContextMiddleware, + user_info_var=user_context or ContextVarUserProvider.default_user_info_context, + user_resolver=user_resolver or cls._default_user_resolver) + + @staticmethod + async def _default_user_resolver(request: Request) -> Optional[UserInfo]: + return resolve_user_from_headers(request.headers) + + async def dispatch(self, request: Request, call_next): + """Process the incoming request, setting the user info in the context variable. + + :param request: The incoming Starlette request. + :type request: Request + :param call_next: The next middleware or endpoint to call. + :type call_next: Callable[[Request], Awaitable[Response]] + :return: The response from the next middleware or endpoint. + :rtype: Response + """ + user = await self._user_resolver(request) + token = self._user_info_var.set(user) + try: + return await call_next(request) + finally: + self._user_info_var.reset(token) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_user.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_user.py new file mode 100644 index 000000000000..14d8aad2690a --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/runtime/_user.py @@ -0,0 +1,52 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from contextvars import ContextVar +from abc import ABC, abstractmethod +from typing import ClassVar, Mapping, Optional + +from ..client._models import UserInfo + + +class UserProvider(ABC): + """Base class for user providers.""" + + @abstractmethod + async def get_user(self) -> Optional[UserInfo]: + """Get the user information.""" + raise NotImplementedError + + +class ContextVarUserProvider(UserProvider): + """User provider that retrieves user information from a ContextVar.""" + default_user_info_context: ClassVar[ContextVar[UserInfo]] = ContextVar("user_info_context") + + def __init__(self, context: Optional[ContextVar[UserInfo]] = None): + self.context = context or self.default_user_info_context + + async def get_user(self) -> Optional[UserInfo]: + """Get the user information from the context variable.""" + return self.context.get(None) + + +def resolve_user_from_headers(headers: Mapping[str, str], + object_id_header: str = "x-aml-oid", + tenant_id_header: str = "x-aml-tid") -> Optional[UserInfo]: + """Resolve user information from HTTP headers. + + :param headers: The HTTP headers. + :type headers: Mapping[str, str] + :param object_id_header: The header name for the object ID. + :type object_id_header: str + :param tenant_id_header: The header name for the tenant ID. + :type tenant_id_header: str + :return: The user information or None if not found. + :rtype: Optional[UserInfo] + """ + object_id = headers.get(object_id_header, "") + tenant_id = headers.get(tenant_id_header, "") + + if not object_id or not tenant_id: + return None + + return UserInfo(object_id=object_id, tenant_id=tenant_id) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/utils/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/utils/__init__.py new file mode 100644 index 000000000000..41fc7e00dd6d --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/utils/__init__.py @@ -0,0 +1,7 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) + +from ._name_resolver import * diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/utils/_name_resolver.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/utils/_name_resolver.py new file mode 100644 index 000000000000..ab9c87fd113c --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/tools/utils/_name_resolver.py @@ -0,0 +1,37 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from ..client._models import ResolvedFoundryTool + + +class ToolNameResolver: + """Utility class for resolving tool names to be registered to model.""" + + def __init__(self): + self._count_by_name = dict() + self._stable_names = dict() + + def resolve(self, tool: ResolvedFoundryTool) -> str: + """Resolve a stable name for the given tool. + If the tool name has not been used before, use it as is. + If it has been used, append an underscore and a count to make it unique. + + :param tool: The tool to resolve the name for. + :type tool: ResolvedFoundryTool + :return: The resolved stable name for the tool. + :rtype: str + """ + final_name = self._stable_names.get(tool.id) + if final_name is not None: + return final_name + + dup_count = self._count_by_name.setdefault(tool.details.name, 0) + + if dup_count == 0: + final_name = tool.details.name + else: + final_name = f"{tool.details.name}_{dup_count}" + + self._stable_names[tool.id] = final_name + self._count_by_name[tool.details.name] = dup_count + 1 + return self._stable_names[tool.id] diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/__init__.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/__init__.py new file mode 100644 index 000000000000..28077537d94b --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/__init__.py @@ -0,0 +1,5 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) diff --git a/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py new file mode 100644 index 000000000000..24de2e1345a4 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-core/azure/ai/agentserver/core/utils/_credential.py @@ -0,0 +1,89 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +import asyncio +import inspect +from types import TracebackType +from typing import Any, Optional, Sequence, Type, Union + +from azure.core.credentials import AccessToken, TokenCredential +from azure.core.credentials_async import AsyncTokenCredential + + +async def _to_thread(func, *args, **kwargs): + """Compatibility wrapper for asyncio.to_thread (Python 3.8+).""" + if hasattr(asyncio, "to_thread"): + return await asyncio.to_thread(func, *args, **kwargs) # py>=3.9 + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, lambda: func(*args, **kwargs)) + + +class AsyncTokenCredentialAdapter(AsyncTokenCredential): + """ + AsyncTokenCredential adapter for either: + - azure.core.credentials.TokenCredential (sync) + - azure.core.credentials_async.AsyncTokenCredential (async) + """ + + def __init__(self, credential: TokenCredential |AsyncTokenCredential) -> None: + if not hasattr(credential, "get_token"): + raise TypeError("credential must have a get_token method") + self._credential = credential + self._is_async = isinstance(credential, AsyncTokenCredential) or inspect.iscoroutinefunction( + getattr(credential, "get_token", None) + ) + + async def get_token( + self, + *scopes: str, + claims: str | None = None, + tenant_id: str | None = None, + enable_cae: bool = False, + **kwargs: Any, + ) -> AccessToken: + if self._is_async: + return await self._credential.get_token(*scopes, + claims=claims, + tenant_id=tenant_id, + enable_cae=enable_cae, + **kwargs) + return await _to_thread(self._credential.get_token, + *scopes, + claims=claims, + tenant_id=tenant_id, + enable_cae=enable_cae, + **kwargs) + + async def close(self) -> None: + """ + Best-effort resource cleanup: + - if underlying has async close(): await it + - else if underlying has sync close(): run it in a thread + """ + close_fn = getattr(self._credential, "close", None) + if close_fn is None: + return + + if inspect.iscoroutinefunction(close_fn): + await close_fn() + else: + await _to_thread(close_fn) + + async def __aenter__(self) -> "AsyncTokenCredentialAdapter": + enter = getattr(self._credential, "__aenter__", None) + if enter is not None and inspect.iscoroutinefunction(enter): + await enter() + return self + + async def __aexit__( + self, + exc_type: Type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + aexit = getattr(self._credential, "__aexit__", None) + if aexit is not None and inspect.iscoroutinefunction(aexit): + return await aexit(exc_type, exc_value, traceback) + await self.close() \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml b/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml index 9f3d01c09c88..e53b8f5474b7 100644 --- a/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml +++ b/sdk/agentserver/azure-ai-agentserver-core/pyproject.toml @@ -30,6 +30,7 @@ dependencies = [ "starlette>=0.45.0", "uvicorn>=0.31.0", "aiohttp>=3.13.0", # used by azure-identity aio + "cachetools" ] [build-system] diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py index 569166bc3786..6ccebed44f06 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/__init__.py @@ -3,26 +3,28 @@ # --------------------------------------------------------- __path__ = __import__("pkgutil").extend_path(__path__, __name__) -from typing import TYPE_CHECKING, Optional, Any +from typing import Optional, TYPE_CHECKING +from azure.ai.agentserver.core.application import PackageMetadata, set_current_app +from ._context import LanggraphRunContext from ._version import VERSION -from .tool_client import ToolClient from .langgraph import LangGraphAdapter if TYPE_CHECKING: # pragma: no cover - from . import models from azure.core.credentials_async import AsyncTokenCredential + from .models import LanggraphStateConverter def from_langgraph( agent, credentials: Optional["AsyncTokenCredential"] = None, - state_converter: Optional["models.LanggraphStateConverter"] = None, - **kwargs: Any + state_converter: Optional["LanggraphStateConverter"] = None ) -> "LangGraphAdapter": - return LangGraphAdapter(agent, credentials=credentials, state_converter=state_converter, **kwargs) + return LangGraphAdapter(agent, credentials=credentials, state_converter=state_converter) -__all__ = ["from_langgraph", "ToolClient"] +__all__ = ["from_langgraph", "LanggraphRunContext"] __version__ = VERSION + +set_current_app(PackageMetadata.from_dist("azure-ai-agentserver-langgraph")) \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/_context.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/_context.py new file mode 100644 index 000000000000..846133a7912c --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/_context.py @@ -0,0 +1,20 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from langgraph.runtime import get_runtime + +from .tools._context import FoundryToolContext + + +@dataclass +class LanggraphRunContext: + + tools: FoundryToolContext + + @classmethod + def get_current(cls) -> "LanggraphRunContext": + lg_runtime = get_runtime(cls) + return lg_runtime.context diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py index 51937fe31986..e36917d5c20b 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/langgraph.py @@ -5,24 +5,24 @@ # mypy: disable-error-code="assignment,arg-type" import os import re -from typing import TYPE_CHECKING, Any, Awaitable, Protocol, Union, Optional, List +from typing import Optional, TYPE_CHECKING, Union from langchain_core.runnables import RunnableConfig -from langchain_core.tools import StructuredTool from langgraph.graph.state import CompiledStateGraph -from azure.ai.agentserver.core.client.tools import OAuthConsentRequiredError from azure.ai.agentserver.core.constants import Constants from azure.ai.agentserver.core.logger import get_logger from azure.ai.agentserver.core.server.base import FoundryCBAgent from azure.ai.agentserver.core.server.common.agent_run_context import AgentRunContext - +from azure.ai.agentserver.core.tools import OAuthConsentRequiredError +from ._context import LanggraphRunContext from .models import ( LanggraphMessageStateConverter, LanggraphStateConverter, ) from .models.utils import is_state_schema_valid -from .tool_client import ToolClient +from .tools._context import FoundryToolContext +from .tools._resolver import FoundryLangChainToolResolver if TYPE_CHECKING: from azure.core.credentials_async import AsyncTokenCredential @@ -30,24 +30,6 @@ logger = get_logger() -class GraphFactory(Protocol): - """Protocol for graph factory functions. - - A graph factory is a callable that takes a ToolClient and returns - a CompiledStateGraph, either synchronously or asynchronously. - """ - - def __call__(self, tools: List[StructuredTool]) -> Union[CompiledStateGraph, Awaitable[CompiledStateGraph]]: - """Create a CompiledStateGraph using the provided ToolClient. - - :param tools: The list of StructuredTool instances. - :type tools: List[StructuredTool] - :return: A compiled LangGraph state graph, or an awaitable that resolves to one. - :rtype: Union[CompiledStateGraph, Awaitable[CompiledStateGraph]] - """ - ... - - class LangGraphAdapter(FoundryCBAgent): """ Adapter for LangGraph Agent. @@ -55,10 +37,9 @@ class LangGraphAdapter(FoundryCBAgent): def __init__( self, - graph: Union[CompiledStateGraph, GraphFactory], + graph: CompiledStateGraph, credentials: "Optional[AsyncTokenCredential]" = None, - state_converter: "Optional[LanggraphStateConverter]" = None, - **kwargs: Any + state_converter: "Optional[LanggraphStateConverter]" = None ) -> None: """ Initialize the LangGraphAdapter with a CompiledStateGraph or a function that returns one. @@ -71,152 +52,44 @@ def __init__( :param state_converter: custom state converter. Required if graph state is not MessagesState. :type state_converter: Optional[LanggraphStateConverter] """ - super().__init__(credentials=credentials, **kwargs) # pylint: disable=unexpected-keyword-arg - self._graph_or_factory: Union[CompiledStateGraph, GraphFactory] = graph - self._resolved_graph: "Optional[CompiledStateGraph]" = None + super().__init__(credentials=credentials) # pylint: disable=unexpected-keyword-arg + self._graph = graph + self._tool_resolver = FoundryLangChainToolResolver() self.azure_ai_tracer = None - # If graph is already compiled, validate and set up state converter - if isinstance(graph, CompiledStateGraph): - self._resolved_graph = graph - if not state_converter: - if is_state_schema_valid(self._resolved_graph.builder.state_schema): - self.state_converter = LanggraphMessageStateConverter() - else: - raise ValueError("state_converter is required for non-MessagesState graph.") + if not state_converter: + if is_state_schema_valid(self._graph.builder.state_schema): + self.state_converter = LanggraphMessageStateConverter() else: - self.state_converter = state_converter + raise ValueError("state_converter is required for non-MessagesState graph.") else: - # Defer validation until graph is resolved self.state_converter = state_converter - @property - def graph(self) -> "Optional[CompiledStateGraph]": - """ - Get the resolved graph. This property provides backward compatibility. - - :return: The resolved CompiledStateGraph if available, None otherwise. - :rtype: Optional[CompiledStateGraph] - """ - return self._resolved_graph - async def agent_run(self, context: AgentRunContext): # Resolve graph - always resolve if it's a factory function to get fresh graph each time # For factories, get a new graph instance per request to avoid concurrency issues - tool_client = None try: - if callable(self._graph_or_factory): - graph, tool_client = await self._resolve_graph_for_request(context) - elif self._resolved_graph is None: - await self._resolve_graph(context) - graph = self._resolved_graph - else: - graph = self._resolved_graph - input_data = self.state_converter.request_to_state(context) logger.debug(f"Converted input data: {input_data}") + + lg_run_context = await self.setup_lg_run_context() if not context.stream: - try: - response = await self.agent_run_non_stream(input_data, context, graph) - return response - finally: - # Close tool_client for non-streaming requests - if tool_client is not None: - try: - await tool_client.close() - logger.debug("Closed tool_client after non-streaming request") - except Exception as e: - logger.warning(f"Error closing tool_client: {e}") + response = await self.agent_run_non_stream(input_data, context, lg_run_context) + return response # For streaming, pass tool_client to be closed after streaming completes - return self.agent_run_astream(input_data, context, graph, tool_client) + return self.agent_run_astream(input_data, context, lg_run_context) except OAuthConsentRequiredError as e: - # Clean up tool_client if OAuth error occurs before streaming starts - if tool_client is not None: - await tool_client.close() - if not context.stream: response = await self.respond_with_oauth_consent(context, e) return response return self.respond_with_oauth_consent_astream(context, e) except Exception: - # Clean up tool_client if error occurs before streaming starts - if tool_client is not None: - await tool_client.close() raise - async def _resolve_graph(self, context: AgentRunContext): - """Resolve the graph if it's a factory function (for single-use/first-time resolution). - Creates a ToolClient and calls the factory function with it. - This is used for the initial resolution to set up state_converter. - - :param context: The context for the agent run. - :type context: AgentRunContext - """ - if callable(self._graph_or_factory): - logger.debug("Resolving graph from factory function") - - - # Create ToolClient with credentials - tool_client = self.get_tool_client(tools = context.get_tools(), user_info = context.get_user_info()) # pylint: disable=no-member - tool_client_wrapper = ToolClient(tool_client) - tools = await tool_client_wrapper.list_tools() - # Call the factory function with ToolClient - # Support both sync and async factories - import inspect - result = self._graph_or_factory(tools) - if inspect.iscoroutine(result): - self._resolved_graph = await result - else: - self._resolved_graph = result - - # Validate and set up state converter if not already set from initialization - if not self.state_converter and self._resolved_graph is not None: - if is_state_schema_valid(self._resolved_graph.builder.state_schema): - self.state_converter = LanggraphMessageStateConverter() - else: - raise ValueError("state_converter is required for non-MessagesState graph.") - - logger.debug("Graph resolved successfully") - else: - # Should not reach here, but just in case - self._resolved_graph = self._graph_or_factory - - async def _resolve_graph_for_request(self, context: AgentRunContext): - """ - Resolve a fresh graph instance for a single request to avoid concurrency issues. - Creates a ToolClient and calls the factory function with it. - This method returns a new graph instance and the tool_client for cleanup. - - :param context: The context for the agent run. - :type context: AgentRunContext - :return: A tuple of (compiled graph instance, tool_client wrapper). - :rtype: tuple[CompiledStateGraph, ToolClient] - """ - logger.debug("Resolving fresh graph from factory function for request") - - # Create ToolClient with credentials - tool_client = self.get_tool_client(tools = context.get_tools(), user_info = context.get_user_info()) # pylint: disable=no-member - tool_client_wrapper = ToolClient(tool_client) - tools = await tool_client_wrapper.list_tools() - # Call the factory function with ToolClient - # Support both sync and async factories - import inspect - result = self._graph_or_factory(tools) # type: ignore[operator] - if inspect.iscoroutine(result): - graph = await result - else: - graph = result - - # Ensure state converter is set up (use existing one or create new) - if not self.state_converter: - if is_state_schema_valid(graph.builder.state_schema): - self.state_converter = LanggraphMessageStateConverter() - else: - raise ValueError("state_converter is required for non-MessagesState graph.") - - logger.debug("Fresh graph resolved successfully for request") - return graph, tool_client_wrapper + async def setup_lg_run_context(self): + resolved = await self._tool_resolver.resolve_from_registry() + return LanggraphRunContext(FoundryToolContext(resolved)) def init_tracing_internal(self, exporter_endpoint=None, app_insights_conn_str=None): # set env vars for langsmith @@ -246,7 +119,8 @@ def get_trace_attributes(self): attrs["service.namespace"] = "azure.ai.agentserver.langgraph" return attrs - async def agent_run_non_stream(self, input_data: dict, context: AgentRunContext, graph: CompiledStateGraph): + async def agent_run_non_stream(self, input_data: dict, context: AgentRunContext, + lg_run_context: LanggraphRunContext): """ Run the agent with non-streaming response. @@ -254,8 +128,8 @@ async def agent_run_non_stream(self, input_data: dict, context: AgentRunContext, :type input_data: dict :param context: The context for the agent run. :type context: AgentRunContext - :param graph: The compiled graph instance to use for this request. - :type graph: CompiledStateGraph + :param lg_run_context: The tool context for the agent run. + :type lg_run_context: FoundryToolContext :return: The response of the agent run. :rtype: dict @@ -264,20 +138,14 @@ async def agent_run_non_stream(self, input_data: dict, context: AgentRunContext, try: config = self.create_runnable_config(context) stream_mode = self.state_converter.get_stream_mode(context) - result = await graph.ainvoke(input_data, config=config, stream_mode=stream_mode) + result = await self._graph.ainvoke(input_data, config=config, stream_mode=stream_mode, context=lg_run_context) output = self.state_converter.state_to_response(result, context) return output except Exception as e: logger.error(f"Error during agent run: {e}", exc_info=True) raise e - async def agent_run_astream( - self, - input_data: dict, - context: AgentRunContext, - graph: CompiledStateGraph, - tool_client: "Optional[ToolClient]" = None - ): + async def agent_run_astream(self, input_data: dict, context: AgentRunContext, lg_run_context: LanggraphRunContext): """ Run the agent with streaming response. @@ -285,10 +153,8 @@ async def agent_run_astream( :type input_data: dict :param context: The context for the agent run. :type context: AgentRunContext - :param graph: The compiled graph instance to use for this request. - :type graph: CompiledStateGraph - :param tool_client: Optional ToolClient to close after streaming completes. - :type tool_client: Optional[ToolClient] + :param lg_run_context: The tool context for the agent run. + :type lg_run_context: FoundryToolContext :return: An async generator yielding the response stream events. :rtype: AsyncGenerator[dict] @@ -297,20 +163,12 @@ async def agent_run_astream( logger.info(f"Starting streaming agent run {context.response_id}") config = self.create_runnable_config(context) stream_mode = self.state_converter.get_stream_mode(context) - stream = graph.astream(input=input_data, config=config, stream_mode=stream_mode) + stream = self._graph.astream(input=input_data, config=config, stream_mode=stream_mode, context=lg_run_context) async for result in self.state_converter.state_to_response_stream(stream, context): yield result except Exception as e: logger.error(f"Error during streaming agent run: {e}", exc_info=True) raise e - finally: - # Close tool_client if provided - if tool_client is not None: - try: - await tool_client.close() - logger.debug("Closed tool_client after streaming completed") - except Exception as e: - logger.warning(f"Error closing tool_client in stream: {e}") def create_runnable_config(self, context: AgentRunContext) -> RunnableConfig: """ diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tool_client.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tool_client.py deleted file mode 100644 index 78baf96bee80..000000000000 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tool_client.py +++ /dev/null @@ -1,226 +0,0 @@ -# --------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# --------------------------------------------------------- -"""Tool client for integrating AzureAIToolClient with LangGraph.""" - -from typing import TYPE_CHECKING, Any, Dict, List, Optional - -from langchain_core.tools import StructuredTool -from pydantic import BaseModel, Field, create_model - -if TYPE_CHECKING: - from azure.ai.agentserver.core.client.tools.aio import AzureAIToolClient, FoundryTool - -# pylint: disable=client-accepts-api-version-keyword,missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs -class ToolClient: - """Client that integrates AzureAIToolClient with LangGraph. - - This class provides methods to list tools from AzureAIToolClient and convert them - to LangChain BaseTool format, as well as invoke tools in a format compatible with - LangGraph's create_react_agent and StateGraph. - - :param tool_client: The AzureAIToolClient instance to use for tool operations. - :type tool_client: ~azure.ai.agentserver.core.client.tools.aio.AzureAIToolClient - - .. admonition:: Example: - - .. code-block:: python - - from azure.ai.agentserver.core.client.tools.aio import AzureAIToolClient - from azure.ai.agentserver.langgraph import ToolClient - from azure.identity.aio import DefaultAzureCredential - - async with DefaultAzureCredential() as credential: - tool_client = AzureAIToolClient( - endpoint="https://", - credential=credential - ) - - client = ToolClient(tool_client) - - # List tools as LangChain BaseTool instances - tools = await client.list_tools() - - # Use with create_react_agent - from langgraph.prebuilt import create_react_agent - from langchain_openai import AzureChatOpenAI - - model = AzureChatOpenAI(model="gpt-4o") - agent = create_react_agent(model, tools) - - # Invoke a tool directly - result = await client.invoke_tool( - tool_name="my_tool", - tool_input={"param": "value"} - ) - - :meta private: - """ - - def __init__(self, tool_client: "AzureAIToolClient") -> None: - """Initialize the ToolClient. - - :param tool_client: The AzureAIToolClient instance to use for tool operations. - :type tool_client: ~azure.ai.agentserver.core.client.tools.aio.AzureAIToolClient - """ - self._tool_client = tool_client - self._langchain_tools_cache: Optional[List[StructuredTool]] = None - - async def list_tools(self) -> List[StructuredTool]: - """List all available tools as LangChain BaseTool instances. - - Retrieves tools from AzureAIToolClient and converts them to LangChain - StructuredTool instances that can be used with LangGraph's create_react_agent - or StateGraph. - - :return: List of LangChain StructuredTool instances. - :rtype: List[~langchain_core.tools.StructuredTool] - :raises ~azure.core.exceptions.HttpResponseError: - Raised for HTTP communication failures. - - .. admonition:: Example: - - .. code-block:: python - - client = ToolClient(tool_client) - tools = await client.list_tools() - - # Use with create_react_agent - agent = create_react_agent(model, tools) - """ - # Get tools from AzureAIToolClient - if self._langchain_tools_cache is not None: - return self._langchain_tools_cache - - azure_tools = await self._tool_client.list_tools() - self._langchain_tools_cache = [] - # Convert to LangChain StructuredTool instances - for azure_tool in azure_tools: - langchain_tool = self._convert_to_langchain_tool(azure_tool) - self._langchain_tools_cache.append(langchain_tool) - - return self._langchain_tools_cache - - def _convert_to_langchain_tool(self, azure_tool: "FoundryTool") -> StructuredTool: - """Convert an AzureAITool to a LangChain StructuredTool. - - :param azure_tool: The AzureAITool to convert. - :type azure_tool: ~azure.ai.agentserver.core.client.tools.aio.AzureAITool - :return: A LangChain StructuredTool instance. - :rtype: ~langchain_core.tools.StructuredTool - """ - # Get the input schema from the tool descriptor - input_schema = azure_tool.input_schema or {} - - # Create a Pydantic model for the tool's input schema - args_schema = self._create_pydantic_model( - tool_name=azure_tool.name, - schema=dict(input_schema) - ) - - # Create an async function that invokes the tool - async def tool_func(**kwargs: Any) -> str: - """Invoke the Azure AI tool. - - :return: The result from the tool invocation as a string. - :rtype: str - :raises OAuthConsentRequiredError: If OAuth consent is required for the tool invocation. - """ - # Let OAuthConsentRequiredError propagate up to be handled by the agent - result = await azure_tool(**kwargs) - # Convert result to string for LangChain compatibility - if isinstance(result, dict): - import json - return json.dumps(result) - return str(result) - - # Create a StructuredTool with the async function - structured_tool = StructuredTool( - name=azure_tool.name, - description=azure_tool.description or "No description available", - coroutine=tool_func, - args_schema=args_schema, - ) - - return structured_tool - - def _create_pydantic_model( - self, - tool_name: str, - schema: Dict[str, Any] - ) -> type[BaseModel]: - """Create a Pydantic model from a JSON schema. - - :param tool_name: Name of the tool (used for model name). - :type tool_name: str - :param schema: JSON schema for the tool's input parameters. - :type schema: Dict[str, Any] - :return: A Pydantic model class. - :rtype: type[BaseModel] - """ - # Get properties from schema - properties = schema.get("properties") or {} - required_fields = schema.get("required") or [] - - # Build field definitions for Pydantic model - field_definitions = {} - for prop_name, prop_schema in properties.items(): - prop_type = self._json_type_to_python_type(prop_schema.get("type", "string")) - prop_description = prop_schema.get("description", "") - - # Determine if field is required - is_required = prop_name in required_fields - - if is_required: - field_definitions[prop_name] = ( - prop_type, - Field(..., description=prop_description) - ) - else: - field_definitions[prop_name] = ( - prop_type, - Field(default=None, description=prop_description) - ) - - # Create the model dynamically - model_name = f"{tool_name.replace('-', '_').replace(' ', '_').title()}-Input" - return create_model(model_name, **field_definitions) # type: ignore[call-overload] - - def _json_type_to_python_type(self, json_type: str) -> type: - """Convert JSON schema type to Python type. - - :param json_type: JSON schema type string. - :type json_type: str - :return: Corresponding Python type. - :rtype: type - """ - type_mapping = { - "string": str, - "integer": int, - "number": float, - "boolean": bool, - "array": list, - "object": dict, - } - return type_mapping.get(json_type, str) - - async def close(self) -> None: - await self._tool_client.close() - - async def __aenter__(self) -> "ToolClient": - """Async context manager entry. - - :return: The ToolClient instance. - :rtype: ToolClient - """ - return self - - async def __aexit__(self, *exc_details: Any) -> None: - """Async context manager exit. - - :param exc_details: Exception details if an exception occurred. - :type exc_details: Any - :return: None - :rtype: None - """ - # The tool_client lifecycle is managed externally diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/__init__.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/__init__.py new file mode 100644 index 000000000000..daf51382381d --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/__init__.py @@ -0,0 +1,10 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- + +__path__ = __import__('pkgutil').extend_path(__path__, __name__) + +from ._builder import * +from ._chat_model import FoundryToolLateBindingChatModel +from ._middleware import FoundryToolBindingMiddleware +from ._tool_node import FoundryToolNodeWrappers, FoundryToolCallWrapper \ No newline at end of file diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_builder.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_builder.py new file mode 100644 index 000000000000..afba02e26a0a --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_builder.py @@ -0,0 +1,61 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import List, Optional, Union, overload + +from langchain_core.language_models import BaseChatModel + +from azure.ai.agentserver.core.tools import FoundryToolLike +from ._chat_model import FoundryToolLateBindingChatModel +from ._middleware import FoundryToolBindingMiddleware +from ._resolver import get_registry + + +@overload +def use_foundry_tools(tools: List[FoundryToolLike], /) -> FoundryToolBindingMiddleware: + """Use foundry tools as middleware. + + :param tools: A list of foundry tools to bind. + :type tools: List[FoundryToolLike] + :return: A FoundryToolBindingMiddleware that binds the given tools. + :rtype: FoundryToolBindingMiddleware + """ + ... + + +@overload +def use_foundry_tools(model: BaseChatModel, tools: List[FoundryToolLike], /) -> FoundryToolLateBindingChatModel: + """Use foundry tools with a chat model. + + :param model: The chat model to bind the tools to. + :type model: BaseChatModel + :param tools: A list of foundry tools to bind. + :type tools: List[FoundryToolLike] + :return: A FoundryToolLateBindingChatModel that binds the given tools to the model. + :rtype: FoundryToolLateBindingChatModel + """ + ... + + +def use_foundry_tools( + model_or_tools: Union[BaseChatModel, List[FoundryToolLike]], + tools: Optional[List[FoundryToolLike]] = None, + /, +) -> Union[FoundryToolBindingMiddleware, FoundryToolLateBindingChatModel]: + """Use foundry tools with a chat model or as middleware. + + :param model_or_tools: The chat model to bind the tools to, or a list of foundry tools to bind as middleware. + :type model_or_tools: Union[BaseChatModel, List[FoundryToolLike]] + :param tools: A list of foundry tools to bind (required if model_or_tools is a chat model). + :type tools: Optional[List[FoundryToolLike]] + :return: A FoundryToolLateBindingChatModel or FoundryToolBindingMiddleware that binds the given tools. + :rtype: Union[FoundryToolBindingMiddleware, FoundryToolLateBindingChatModel] + """ + if isinstance(model_or_tools, BaseChatModel): + if tools is None: + raise ValueError("Tools must be provided when a model is given.") + get_registry().extend(tools) + return FoundryToolLateBindingChatModel(model_or_tools, foundry_tools=tools) + else: + get_registry().extend(model_or_tools) + return FoundryToolBindingMiddleware(model_or_tools) diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_chat_model.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_chat_model.py new file mode 100644 index 000000000000..2ab97a4b0269 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_chat_model.py @@ -0,0 +1,112 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +from typing import Any, Callable, Dict, List, Optional, Sequence + +from langchain_core.callbacks import CallbackManagerForLLMRun +from langchain_core.language_models import BaseChatModel, LanguageModelInput +from langchain_core.messages import AIMessage, BaseMessage +from langchain_core.outputs import ChatResult +from langchain_core.runnables import Runnable, RunnableConfig +from langchain_core.tools import BaseTool +from langgraph.prebuilt import ToolNode + +from azure.ai.agentserver.core.tools import FoundryToolLike +from ._tool_node import FoundryToolCallWrapper, FoundryToolNodeWrappers + + +class FoundryToolLateBindingChatModel(BaseChatModel): + """A ChatModel that supports late binding of Foundry tools during invocation. + + This ChatModel allows you to specify Foundry tools that will be resolved and bound + at the time of invocation, rather than at the time of model creation. + + :param delegate: The underlying chat model to delegate calls to. + :type delegate: BaseChatModel + :param foundry_tools: A list of Foundry tools to be resolved and bound during invocation. + :type foundry_tools: List[FoundryToolLike] + """ + + def __init__(self, delegate: BaseChatModel, foundry_tools: List[FoundryToolLike]): + super().__init__() + self._delegate = delegate + self._foundry_tools_to_bind = foundry_tools + self._bound_tools: List[Dict[str, Any] | type | Callable | BaseTool] = [] + self._bound_kwargs: dict[str, Any] = {} + + @property + def tool_node(self) -> ToolNode: + """Get a ToolNode that uses this chat model's Foundry tool call wrappers. + + :return: A ToolNode with Foundry tool call wrappers. + :rtype: ToolNode + """ + return ToolNode([], **self.tool_node_wrapper) + + @property + def tool_node_wrapper(self) -> FoundryToolNodeWrappers: + """Get the Foundry tool call wrappers for this chat model. + + Example:: + >>> from langgraph.prebuilt import ToolNode + >>> foundry_tool_bound_chat_model = FoundryToolLateBindingChatModel(...) + >>> ToolNode([...], **foundry_tool_bound_chat_model.as_wrappers()) + + :return: The Foundry tool call wrappers. + :rtype: FoundryToolNodeWrappers + """ + return FoundryToolCallWrapper(self._foundry_tools_to_bind).as_wrappers() + + def bind_tools(self, + tools: Sequence[ + Dict[str, Any] | type | Callable | BaseTool # noqa: UP006 + ], + *, + tool_choice: str | None = None, + **kwargs: Any) -> Runnable[LanguageModelInput, AIMessage]: + """Record tools to be bound later during invocation.""" + + self._bound_tools.extend(tools) + if tool_choice is not None: + self._bound_kwargs["tool_choice"] = tool_choice + self._bound_kwargs.update(kwargs) + + return self + + def _bound_delegate_for_call(self) -> Runnable[LanguageModelInput, AIMessage]: + from .._context import LanggraphRunContext + + foundry_tools = LanggraphRunContext.get_current().tools.resolved_tools.get(self._foundry_tools_to_bind) + all_tools = self._bound_tools.copy() + all_tools.extend(foundry_tools) + + if not all_tools: + return self._delegate + + bound_kwargs = self._bound_kwargs or {} + return self._delegate.bind_tools(all_tools, **bound_kwargs) + + def invoke(self, input: Any, config: Optional[RunnableConfig] = None, **kwargs: Any) -> Any: + return self._bound_delegate_for_call().invoke(input, config=config, **kwargs) + + async def ainvoke(self, input: Any, config: Optional[RunnableConfig] = None, **kwargs: Any) -> Any: + return await self._bound_delegate_for_call().ainvoke(input, config=config, **kwargs) + + def stream(self, input: Any, config: Optional[RunnableConfig] = None, **kwargs: Any): + yield from self._bound_delegate_for_call().stream(input, config=config, **kwargs) + + async def astream(self, input: Any, config: Optional[RunnableConfig] = None, **kwargs: Any): + async for x in self._bound_delegate_for_call().astream(input, config=config, **kwargs): + yield x + + @property + def _llm_type(self) -> str: + return f"foundry_tool_binding_model({getattr(self.delegate, '_llm_type', type(self.delegate).__name__)})" + + def _generate(self, messages: list[BaseMessage], stop: list[str] | None = None, + run_manager: CallbackManagerForLLMRun | None = None, **kwargs: Any) -> ChatResult: + # should never be called as invoke/ainvoke/stream/astream are redirected to delegate + raise NotImplementedError() + diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_context.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_context.py new file mode 100644 index 000000000000..53789efec1a4 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_context.py @@ -0,0 +1,16 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from dataclasses import dataclass, field + +from ._resolver import ResolvedTools + + +@dataclass +class FoundryToolContext: + """Context for tool resolution. + + :param resolved_tools: The resolved tools of all registered foundry tools. + :type resolved_tools: ResolvedTools + """ + resolved_tools: ResolvedTools = field(default_factory=lambda: ResolvedTools([])) diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_middleware.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_middleware.py new file mode 100644 index 000000000000..d3b95e95e9c6 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_middleware.py @@ -0,0 +1,110 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from __future__ import annotations + +from typing import Awaitable, Callable, List + +from langchain_core.tools import BaseTool, Tool +from langgraph.typing import ContextT, StateT_co + +from azure.ai.agentserver.core.tools import FoundryToolLike +from langchain.agents.middleware import AgentMiddleware, ModelRequest, ModelResponse +from langchain.agents.middleware.types import ModelCallResult +from langchain_core.messages import ToolMessage +from langgraph.prebuilt.tool_node import ToolCallRequest +from langgraph.types import Command + +from ._chat_model import FoundryToolLateBindingChatModel +from ._tool_node import FoundryToolCallWrapper + + +class FoundryToolBindingMiddleware(AgentMiddleware[StateT_co, ContextT]): + """Middleware that binds foundry tools to tool calls in the agent. + + :param foundry_tools: A list of foundry tools to bind. + :type foundry_tools: List[FoundryToolLike] + """ + + def __init__(self, foundry_tools: List[FoundryToolLike]): + super().__init__() + + # to ensure `create_agent()` will create a tool node when there are foundry tools to bind + # this tool will never be bound to model and called + self.tools = [self._dummy_tool()] if foundry_tools else [] + + self._foundry_tools_to_bind = foundry_tools + self._tool_call_wrapper = FoundryToolCallWrapper(self._foundry_tools_to_bind) + + @staticmethod + def _dummy_tool() -> BaseTool: + return Tool(name="__dummy_tool_by_foundry_middleware__", + func=lambda x: None, + description="__dummy_tool_by_foundry_middleware__") + + def wrap_model_call(self, request: ModelRequest, + handler: Callable[[ModelRequest], ModelResponse]) -> ModelCallResult: + """Wrap the model call to use a FoundryToolBindingChatModel. + + :param request: The model request. + :type request: ModelRequest + :param handler: The model call handler. + :type handler: Callable[[ModelRequest], ModelResponse] + :return: The model call result. + :rtype: ModelCallResult + """ + return handler(self._wrap_model(request)) + + async def awrap_model_call(self, request: ModelRequest, + handler: Callable[[ModelRequest], Awaitable[ModelResponse]]) -> ModelCallResult: + """Asynchronously wrap the model call to use a FoundryToolBindingChatModel. + + :param request: The model request. + :type request: ModelRequest + :param handler: The model call handler. + :type handler: Callable[[ModelRequest], Awaitable[ModelResponse]] + :return: The model call result. + :rtype: ModelCallResult + """ + return await handler(self._wrap_model(request)) + + def _wrap_model(self, request: ModelRequest) -> ModelRequest: + """Wrap the model in the request with a FoundryToolBindingChatModel. + + :param request: The model request. + :type request: ModelRequest + :return: The modified model request. + :rtype: ModelRequest + """ + if not self._foundry_tools_to_bind: + return request + wrapper = FoundryToolLateBindingChatModel(request.model, self._foundry_tools_to_bind) + return request.override(model=wrapper) + + def wrap_tool_call(self, request: ToolCallRequest, + handler: Callable[[ToolCallRequest], ToolMessage | Command]) -> ToolMessage | Command: + """Wrap the tool call to use FoundryToolCallWrapper. + + :param request: The tool call request. + :type request: ToolCallRequest + :param handler: The tool call handler. + :type handler: Callable[[ToolCallRequest], ToolMessage | Command] + :return: The tool call result. + :rtype: ToolMessage | Command + """ + return self._tool_call_wrapper.call_tool(request, handler) + + async def awrap_tool_call( + self, + request: ToolCallRequest, + handler: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]]) -> ToolMessage | Command: + """Asynchronously wrap the tool call to use FoundryToolCallWrapper. + + :param request: The tool call request. + :type request: ToolCallRequest + :param handler: The tool call handler. + :type handler: Callable[[ToolCallRequest], Awaitable[ToolMessage | Command]] + :return: The tool call result. + :rtype: ToolMessage | Command + """ + return await self._tool_call_wrapper.call_tool_async(request, handler) diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_resolver.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_resolver.py new file mode 100644 index 000000000000..e381343b6391 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_resolver.py @@ -0,0 +1,148 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from collections import defaultdict +from typing import Any, Dict, Iterable, List, Optional, Tuple, Union, overload + +from langchain_core.tools import BaseTool, StructuredTool +from pydantic import BaseModel, Field, create_model + +from azure.ai.agentserver.core import AgentServerContext +from azure.ai.agentserver.core.tools import FoundryTool, FoundryToolLike, ResolvedFoundryTool, SchemaDefinition, ensure_foundry_tool +from azure.ai.agentserver.core.tools.utils import ToolNameResolver + + +class ResolvedTools(Iterable[BaseTool]): + """A resolved view of foundry tools into LangChain tools. + + :param tools: An iterable of tuples of resolved foundry tools and their corresponding LangChain tools. + :type tools: Iterable[Tuple[ResolvedFoundryTool, BaseTool]] + """ + def __init__(self, tools: Iterable[Tuple[ResolvedFoundryTool, BaseTool]]): + self._by_source_id: Dict[FoundryTool, List[BaseTool]] = defaultdict(list) + for rt, t in tools: + self._by_source_id[rt.definition].append(t) + + @overload + def get(self, tool: FoundryToolLike, /) -> Iterable[BaseTool]: + """Get the LangChain tools for the given foundry tool. + + :param tool: The foundry tool to get the LangChain tools for. + :type tool: FoundryToolLike + :return: The list of LangChain tools for the given foundry tool. + :rtype: Iterable[BaseTool] + """ + ... + + @overload + def get(self, tools: Iterable[FoundryToolLike], /) -> Iterable[BaseTool]: + """Get the LangChain tools for the given foundry tools. + + :param tools: The foundry tools to get the LangChain tools for. + :type tools: Iterable[FoundryToolLike] + :return: The list of LangChain tools for the given foundry tools. + :rtype: Iterable[BaseTool] + """ + ... + + @overload + def get(self) -> Iterable[BaseTool]: + """Get all LangChain tools. + + :return: The list of all LangChain tools. + :rtype: Iterable[BaseTool] + """ + ... + + def get(self, tool: Union[FoundryToolLike, Iterable[FoundryToolLike], None] = None) -> Iterable[BaseTool]: + """Get the LangChain tools for the given foundry tool(s), or all tools if none is given. + + :param tool: The foundry tool or tools to get the LangChain tools for, or None to get all tools. + :type tool: Union[FoundryToolLike, Iterable[FoundryToolLike], None] + :return: The list of LangChain tools for the given foundry tool(s), or all tools if none is given. + :rtype: Iterable[BaseTool] + """ + if tool is None: + yield from self + return + + tool_list = [tool] if not isinstance(tool, Iterable) else tool + for t in tool_list: + ft = ensure_foundry_tool(t) + yield from self._by_source_id.get(ft, []) + + def __iter__(self): + for tool_list in self._by_source_id.values(): + yield from tool_list + + +class FoundryLangChainToolResolver: + """Resolves foundry tools into LangChain tools. + + :param name_resolver: The tool name resolver. + :type name_resolver: Optional[ToolNameResolver] + """ + def __init__(self, name_resolver: Optional[ToolNameResolver] = None): + self._name_resolver = name_resolver or ToolNameResolver() + + async def resolve_from_registry(self) -> ResolvedTools: + """Resolve the foundry tools from the global registry into LangChain tools. + + :return: The resolved LangChain tools. + :rtype: Iterable[Tuple[ResolvedFoundryTool, BaseTool]] + """ + return await self.resolve(get_registry()) + + async def resolve(self, foundry_tools: List[FoundryToolLike]) -> ResolvedTools: + """Resolve the given foundry tools into LangChain tools. + + :param foundry_tools: The foundry tools to resolve. + :type foundry_tools: List[FoundryToolLike] + :return: The resolved LangChain tools. + :rtype: Iterable[Tuple[ResolvedFoundryTool, BaseTool]] + """ + context = AgentServerContext.get() + resolved_foundry_tools = await context.tools.catalog.list(foundry_tools) + return ResolvedTools(tools=((tool, self._create_structured_tool(tool)) for tool in resolved_foundry_tools)) + + def _create_structured_tool(self, resolved_tool: ResolvedFoundryTool) -> StructuredTool: + name = self._name_resolver.resolve(resolved_tool) + args_schema = self._create_pydantic_model(name, resolved_tool.input_schema) + + async def _tool_func(**kwargs: Any) -> str: + result = await AgentServerContext.get().tools.invoke(resolved_tool, kwargs) + if isinstance(result, dict): + import json + return json.dumps(result) + return str(result) + + return StructuredTool.from_function( + name=name, + description=resolved_tool.details.description, + coroutine=_tool_func, + args_schema=args_schema + ) + + @classmethod + def _create_pydantic_model(cls, tool_name: str, input_schema: SchemaDefinition) -> type[BaseModel]: + field_definitions = {} + required_fields = input_schema.required + for prop_name, prop in input_schema.properties.items(): + py_type = prop.type.py_type + default = ... if prop_name in required_fields else None + field_definitions[prop_name] = (py_type, Field(default, description=prop.description)) + + model_name = f"{tool_name.replace('-', '_').replace(' ', '_').title()}-Input" + return create_model(model_name, **field_definitions) + + +_tool_registry: List[FoundryToolLike] = [] + + +def get_registry() -> List[FoundryToolLike]: + """Get the global foundry tool registry. + + :return: The list of registered foundry tools. + :rtype: List[FoundryToolLike] + """ + return _tool_registry diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_tool_node.py b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_tool_node.py new file mode 100644 index 000000000000..9dac2ec3a731 --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/azure/ai/agentserver/langgraph/tools/_tool_node.py @@ -0,0 +1,91 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +from typing import Awaitable, Callable, List, TypedDict, Union + +from azure.ai.agentserver.core.tools import FoundryToolLike +from langchain_core.messages import ToolMessage +from langgraph.prebuilt.tool_node import AsyncToolCallWrapper, ToolCallRequest, ToolCallWrapper +from langgraph.types import Command + +ToolInvocationResult = Union[ToolMessage, Command] +ToolInvocation = Callable[[ToolCallRequest], ToolInvocationResult] +AsyncToolInvocation = Callable[[ToolCallRequest], Awaitable[ToolInvocationResult]] + + +class FoundryToolNodeWrappers(TypedDict): + """A TypedDict for Foundry tool node wrappers. + + Example:: + >>> from langgraph.prebuilt import ToolNode + >>> call_wrapper = FoundryToolCallWrapper(...) + >>> ToolNode([...], **call_wrapper.as_wrappers()) + + :param wrap_tool_call: The synchronous tool call wrapper. + :type wrap_tool_call: ToolCallWrapper + :param awrap_tool_call: The asynchronous tool call wrapper. + :type awrap_tool_call: AsyncToolCallWrapper + """ + + wrap_tool_call: ToolCallWrapper + + awrap_tool_call: AsyncToolCallWrapper + + +class FoundryToolCallWrapper: + """A ToolCallWrapper that tries to resolve invokable foundry tools from context if tool is not resolved yet.""" + def __init__(self, foundry_tools: List[FoundryToolLike]): + self._allowed_foundry_tools = foundry_tools + + def as_wrappers(self) -> FoundryToolNodeWrappers: + """Get the wrappers as a TypedDict. + + :return: The wrappers as a TypedDict. + :rtype: FoundryToolNodeWrappers + """ + return FoundryToolNodeWrappers( + wrap_tool_call=self.call_tool, + awrap_tool_call=self.call_tool_async, + ) + + def call_tool(self, request: ToolCallRequest, invocation: ToolInvocation) -> ToolInvocationResult: + """Call the tool, resolving foundry tools from context if necessary. + + :param request: The tool call request. + :type request: ToolCallRequest + :param invocation: The tool invocation function. + :type invocation: ToolInvocation + :return: The result of the tool invocation. + :rtype: ToolInvocationResult + """ + return invocation(self._maybe_calling_foundry_tool(request)) + + async def call_tool_async(self, request: ToolCallRequest, invocation: AsyncToolInvocation) -> ToolInvocationResult: + """Call the tool, resolving foundry tools from context if necessary. + + :param request: The tool call request. + :type request: ToolCallRequest + :param invocation: The tool invocation function. + :type invocation: AsyncToolInvocation + :return: The result of the tool invocation. + :rtype: ToolInvocationResult + """ + return await invocation(self._maybe_calling_foundry_tool(request)) + + def _maybe_calling_foundry_tool(self, request: ToolCallRequest) -> ToolCallRequest: + if request.tool or not self._allowed_foundry_tools: + # tool is already resolved + return request + + from .._context import LanggraphRunContext + + tool_name = request.tool_call["name"] + for t in LanggraphRunContext.get_current().tools.resolved_tools.get(self._allowed_foundry_tools): + if t.name == tool_name: + return ToolCallRequest( + tool_call=request.tool_call, + tool=t, + state=request.state, + runtime=request.runtime, + ) + return request diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/react_agent_tool.py b/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/react_agent_tool.py new file mode 100644 index 000000000000..fcf5ae3eb7eb --- /dev/null +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/react_agent_tool.py @@ -0,0 +1,41 @@ +# --------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# --------------------------------------------------------- +import os + +from azure.identity import DefaultAzureCredential, get_bearer_token_provider +from dotenv import load_dotenv +from langchain.agents import create_agent +from langchain_core.tools import tool +from langchain_openai import AzureChatOpenAI +from langgraph.checkpoint.memory import MemorySaver + +from azure.ai.agentserver.langgraph import from_langgraph +from azure.ai.agentserver.langgraph.tools import use_foundry_tools + +load_dotenv() + +token_provider = get_bearer_token_provider( + DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default" +) + +memory = MemorySaver() +deployment_name = os.getenv("AZURE_OPENAI_CHAT_DEPLOYMENT_NAME", "gpt-4o") +model = AzureChatOpenAI(model=deployment_name, azure_ad_token_provider=token_provider) + +foundry_tools = use_foundry_tools([ + { + "type": "code_interpreter" + }, + # { + # "type": "mcp", + # "project_connection_id": "github_connection_id" + # } +]) + + +agent_executor = create_agent(model, checkpointer=memory, middleware=[foundry_tools]) + +if __name__ == "__main__": + # host the langgraph agent + from_langgraph(agent_executor).run() diff --git a/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/use_tool_client_example.py b/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/use_tool_client_example.py index 7daa62d0ec9f..2e64e67f3b11 100644 --- a/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/use_tool_client_example.py +++ b/sdk/agentserver/azure-ai-agentserver-langgraph/samples/tool_client_example/use_tool_client_example.py @@ -14,7 +14,7 @@ from langchain_openai import AzureChatOpenAI from langgraph.checkpoint.memory import MemorySaver -from azure.ai.agentserver.core.client.tools.aio import AzureAIToolClient +from azure.ai.agentserver.core.tools import FoundryToolClient from azure.ai.agentserver.langgraph import ToolClient, from_langgraph from azure.identity.aio import DefaultAzureCredential @@ -59,7 +59,7 @@ async def quickstart(): ] # Create the AzureAIToolClient # This client supports both MCP tools and Azure AI Tools API - tool_client = AzureAIToolClient( + tool_client = FoundryToolClient( endpoint=project_endpoint, credential=credential, tools=tool_definitions