|
1 | 1 | import asyncio |
2 | 2 | import contextlib |
| 3 | +from dataclasses import asdict, dataclass |
3 | 4 | import os |
4 | 5 | import socket |
| 6 | +from typing import Annotated |
5 | 7 | from unittest.mock import patch |
6 | 8 |
|
| 9 | +from mcp.types import CallToolResult, TextContent |
7 | 10 | import pytest |
8 | 11 | from starlette.middleware import Middleware |
9 | 12 | import uvicorn |
@@ -448,6 +451,106 @@ async def lifespan(app: Starlette): |
448 | 451 | response = result.messages[-1].content |
449 | 452 | assert "31.5" in response, "Invalid LLM response" |
450 | 453 |
|
| 454 | + @patch( |
| 455 | + "splunklib.ai.agent._testing_local_tools_path", |
| 456 | + os.path.join( |
| 457 | + os.path.dirname(__file__), |
| 458 | + "testdata", |
| 459 | + "non_existent.py", |
| 460 | + ), |
| 461 | + ) |
| 462 | + @patch("splunklib.ai.agent._testing_app_id", "app_id") |
| 463 | + @pytest.mark.asyncio |
| 464 | + async def test_tool_call_text_content_with_structured_output(self) -> None: |
| 465 | + pytest.importorskip("langchain_openai") |
| 466 | + |
| 467 | + mcp = FastMCP("MCP Server", streamable_http_path="/") |
| 468 | + |
| 469 | + @dataclass |
| 470 | + class Result: |
| 471 | + celsius_degrees: str |
| 472 | + |
| 473 | + @mcp.tool(description="Returns the current temperature in the city") |
| 474 | + def temperature(city: str) -> Annotated[CallToolResult, Result]: |
| 475 | + if city == "Krakow": |
| 476 | + temperature = "31.5C" |
| 477 | + else: |
| 478 | + temperature = "22.1C" |
| 479 | + |
| 480 | + # The Splunk MCP Server App returns a succeeded message in the content |
| 481 | + # and a proper output in the structured_content field. |
| 482 | + return CallToolResult( |
| 483 | + content=[ |
| 484 | + TextContent( |
| 485 | + type="text", |
| 486 | + text=f"Tool call succeeded, temperature in {city} found", |
| 487 | + ) |
| 488 | + ], |
| 489 | + structuredContent=asdict(Result(temperature)), |
| 490 | + ) |
| 491 | + |
| 492 | + @contextlib.asynccontextmanager |
| 493 | + async def lifespan(app: Starlette): |
| 494 | + async with mcp.session_manager.run(): |
| 495 | + yield |
| 496 | + |
| 497 | + async with run_http_server( |
| 498 | + Starlette( |
| 499 | + routes=[ |
| 500 | + Mount("/services/mcp", app=mcp.streamable_http_app()), |
| 501 | + Route( |
| 502 | + "/services/authorization/tokens", |
| 503 | + tokens_handler, |
| 504 | + methods=["POST"], |
| 505 | + ), |
| 506 | + ], |
| 507 | + lifespan=lifespan, |
| 508 | + ) |
| 509 | + ) as (host, port): |
| 510 | + service = await asyncio.to_thread( |
| 511 | + lambda: connect( |
| 512 | + scheme="http", |
| 513 | + host=host, |
| 514 | + port=port, |
| 515 | + splunkToken=AUTH_TOKEN, |
| 516 | + autologin=True, |
| 517 | + username="admin", # not required, but set to avoid mocking the authentication/current-context endpoint |
| 518 | + ), |
| 519 | + ) |
| 520 | + |
| 521 | + async with Agent( |
| 522 | + model=(await self.model()), |
| 523 | + system_prompt="You must use the available tools to perform requested operations", |
| 524 | + service=service, |
| 525 | + use_mcp_tools=True, |
| 526 | + ) as agent: |
| 527 | + result = await agent.invoke( |
| 528 | + [ |
| 529 | + HumanMessage( |
| 530 | + content=( |
| 531 | + "What is the weather like today in Krakow? Use the provided tools to check the temperature." |
| 532 | + "Return a short response, containing the tool response." |
| 533 | + ), |
| 534 | + ) |
| 535 | + ] |
| 536 | + ) |
| 537 | + |
| 538 | + found_tool_message = False |
| 539 | + for msg in result.messages: |
| 540 | + if isinstance(msg, ToolMessage): |
| 541 | + found_tool_message = True |
| 542 | + # Both text content and structured_content should be in the |
| 543 | + # content of a tool response. |
| 544 | + assert ( |
| 545 | + "Tool call succeeded, temperature in Krakow found" |
| 546 | + in msg.content |
| 547 | + ) |
| 548 | + assert '"celsius_degrees": "31.5C"' in msg.content |
| 549 | + assert found_tool_message, "missing ToolMessage in agent response" |
| 550 | + |
| 551 | + response = result.messages[-1].content |
| 552 | + assert "31.5" in response, "Invalid LLM response" |
| 553 | + |
451 | 554 |
|
452 | 555 | @contextlib.asynccontextmanager |
453 | 556 | async def run_http_server(app: Starlette): |
|
0 commit comments