107 lines
4.5 KiB
Python
107 lines
4.5 KiB
Python
import asyncio
|
||
import sys
|
||
from openai import AsyncOpenAI
|
||
from openai.types.chat import ChatCompletionUserMessageParam
|
||
from mcp import ClientSession
|
||
from mcp.client.sse import sse_client
|
||
from litellm.experimental_mcp_client.tools import (
|
||
transform_mcp_tool_to_openai_tool,
|
||
transform_openai_tool_call_request_to_mcp_tool_call_request,
|
||
)
|
||
|
||
|
||
async def main():
|
||
print("开始测试本地MCP连接...")
|
||
|
||
try:
|
||
# Initialize clients
|
||
print("初始化OpenAI客户端...")
|
||
client = AsyncOpenAI(api_key="sk-1234", base_url="http://localhost:4000")
|
||
print("OpenAI客户端初始化完成")
|
||
|
||
# Point MCP client to local LiteLLM Proxy
|
||
print("准备连接本地MCP服务器...")
|
||
|
||
# 添加超时处理
|
||
try:
|
||
async with asyncio.timeout(10): # 10秒超时
|
||
print("正在建立SSE连接...")
|
||
async with sse_client("http://localhost:4000/mcp/") as (read, write):
|
||
print("SSE连接建立成功,初始化会话...")
|
||
async with ClientSession(read, write) as session:
|
||
print("正在初始化MCP会话...")
|
||
await session.initialize()
|
||
print("MCP会话初始化成功!")
|
||
|
||
# 1. List MCP tools on LiteLLM Proxy
|
||
print("获取MCP工具列表...")
|
||
mcp_tools = await session.list_tools()
|
||
print(f"找到 {len(mcp_tools.tools)} 个MCP工具:")
|
||
for tool in mcp_tools.tools:
|
||
print(f" - {tool.name}: {tool.description}")
|
||
|
||
if not mcp_tools.tools:
|
||
print("没有找到可用的MCP工具")
|
||
return
|
||
|
||
# Create message
|
||
messages = [
|
||
ChatCompletionUserMessageParam(
|
||
content="列出所有可用的数据库", role="user"
|
||
)
|
||
]
|
||
|
||
# 2. Convert MCP tools to OpenAI tools
|
||
print("转换MCP工具为OpenAI格式...")
|
||
openai_tools = [
|
||
transform_mcp_tool_to_openai_tool(tool) for tool in mcp_tools.tools
|
||
]
|
||
print(f"转换完成,共 {len(openai_tools)} 个工具")
|
||
|
||
# 3. Call LLM with tools
|
||
print("调用LLM...")
|
||
response = await client.chat.completions.create(
|
||
model="gemini/gemini-2.5-flash",
|
||
messages=messages,
|
||
tools=openai_tools,
|
||
tool_choice="auto",
|
||
)
|
||
print("LLM响应完成")
|
||
|
||
# 4. Handle tool call
|
||
if response.choices[0].message.tool_calls:
|
||
print("LLM请求调用工具...")
|
||
tool_call = response.choices[0].message.tool_calls[0]
|
||
print(f"工具调用: {tool_call.function.name}")
|
||
print(f"参数: {tool_call.function.arguments}")
|
||
|
||
# 5. Convert to MCP format and execute
|
||
mcp_call = transform_openai_tool_call_request_to_mcp_tool_call_request(
|
||
openai_tool=tool_call.model_dump()
|
||
)
|
||
|
||
print(f"执行MCP工具调用: {mcp_call.name}")
|
||
result = await session.call_tool(
|
||
name=mcp_call.name, arguments=mcp_call.arguments
|
||
)
|
||
|
||
print("工具调用结果:")
|
||
print(result)
|
||
else:
|
||
print("LLM没有请求调用工具")
|
||
print(f"LLM回复: {response.choices[0].message.content}")
|
||
|
||
except asyncio.TimeoutError:
|
||
print("连接超时!可能是本地服务器没有启动或MCP功能未配置")
|
||
return
|
||
|
||
except Exception as e:
|
||
print(f"发生错误: {type(e).__name__}: {e}")
|
||
import traceback
|
||
traceback.print_exc()
|
||
|
||
|
||
if __name__ == "__main__":
|
||
print("启动本地MCP调试测试...")
|
||
asyncio.run(main())
|
||
print("测试完成") |