72 lines
2.9 KiB
Python
72 lines
2.9 KiB
Python
import asyncio
|
|
from openai import AsyncOpenAI
|
|
from openai.types.chat import ChatCompletionUserMessageParam
|
|
from mcp import ClientSession
|
|
from mcp.client.sse import sse_client
|
|
from litellm.experimental_mcp_client.tools import (
|
|
transform_mcp_tool_to_openai_tool,
|
|
transform_openai_tool_call_request_to_mcp_tool_call_request,
|
|
)
|
|
|
|
|
|
async def main():
|
|
# Initialize clients
|
|
|
|
# point OpenAI client to local LiteLLM Proxy
|
|
client = AsyncOpenAI(api_key="sk-0jdcGHZJpX2oUJmyEs7zVA", base_url="https://litellm.seekkey.tech")
|
|
|
|
# Point MCP client to local LiteLLM Proxy with authentication
|
|
headers = {"Authorization": "Bearer sk-0jdcGHZJpX2oUJmyEs7zVA"}
|
|
async with sse_client("https://litellm.seekkey.tech/mcp/", headers=headers) as (read, write):
|
|
async with ClientSession(read, write) as session:
|
|
await session.initialize()
|
|
|
|
# 1. List MCP tools on LiteLLM Proxy
|
|
mcp_tools = await session.list_tools()
|
|
print("List of MCP tools for MCP server:", mcp_tools.tools)
|
|
|
|
# Create message
|
|
messages = [
|
|
ChatCompletionUserMessageParam(
|
|
content="Send an email about LiteLLM supporting MCP", role="user"
|
|
)
|
|
]
|
|
|
|
# 2. Use `transform_mcp_tool_to_openai_tool` to convert MCP tools to OpenAI tools
|
|
# Since OpenAI only supports tools in the OpenAI format, we need to convert the MCP tools to the OpenAI format.
|
|
openai_tools = [
|
|
transform_mcp_tool_to_openai_tool(tool) for tool in mcp_tools.tools
|
|
]
|
|
|
|
# 3. Provide the MCP tools to `gpt-4o`
|
|
response = await client.chat.completions.create(
|
|
model="gemini/gemini-2.5-flash",
|
|
messages=messages,
|
|
tools=openai_tools,
|
|
tool_choice="auto",
|
|
)
|
|
|
|
# 4. Handle tool call from `gpt-4o`
|
|
if response.choices[0].message.tool_calls:
|
|
tool_call = response.choices[0].message.tool_calls[0]
|
|
if tool_call:
|
|
|
|
# 5. Convert OpenAI tool call to MCP tool call
|
|
# Since MCP servers expect tools in the MCP format, we need to convert the OpenAI tool call to the MCP format.
|
|
# This is done using litellm.experimental_mcp_client.tools.transform_openai_tool_call_request_to_mcp_tool_call_request
|
|
mcp_call = (
|
|
transform_openai_tool_call_request_to_mcp_tool_call_request(
|
|
openai_tool=tool_call.model_dump()
|
|
)
|
|
)
|
|
|
|
# 6. Execute tool call on MCP server
|
|
result = await session.call_tool(
|
|
name=mcp_call.name, arguments=mcp_call.arguments
|
|
)
|
|
|
|
print("Result:", result)
|
|
|
|
|
|
# Run it
|
|
asyncio.run(main()) |