66 lines
2.4 KiB
Python
66 lines
2.4 KiB
Python
import asyncio
|
||
import sys
|
||
from openai import AsyncOpenAI
|
||
from openai.types.chat import ChatCompletionUserMessageParam
|
||
from mcp import ClientSession
|
||
from mcp.client.sse import sse_client
|
||
|
||
|
||
async def main():
|
||
print("测试LiteLLM的MCP功能...")
|
||
|
||
try:
|
||
# Initialize OpenAI client
|
||
print("初始化OpenAI客户端...")
|
||
client = AsyncOpenAI(api_key="sk-1234", base_url="http://localhost:4000")
|
||
print("OpenAI客户端初始化完成")
|
||
|
||
# Test basic LiteLLM functionality first
|
||
print("测试基本的LiteLLM功能...")
|
||
response = await client.chat.completions.create(
|
||
model="gemini-flash",
|
||
messages=[
|
||
{"role": "user", "content": "Hello, this is a test message."}
|
||
]
|
||
)
|
||
print(f"LiteLLM响应: {response.choices[0].message.content}")
|
||
|
||
# Now test MCP endpoint
|
||
print("\n测试MCP端点...")
|
||
|
||
# 添加超时处理
|
||
try:
|
||
async with asyncio.timeout(10): # 10秒超时
|
||
print("正在建立SSE连接到 /mcp/ 端点...")
|
||
async with sse_client("http://localhost:4000/mcp/") as (read, write):
|
||
print("SSE连接建立成功,初始化会话...")
|
||
async with ClientSession(read, write) as session:
|
||
print("正在初始化MCP会话...")
|
||
await session.initialize()
|
||
print("MCP会话初始化成功!")
|
||
|
||
# List available tools
|
||
print("获取可用工具列表...")
|
||
tools = await session.list_tools()
|
||
print(f"找到 {len(tools.tools)} 个工具:")
|
||
for tool in tools.tools:
|
||
print(f" - {tool.name}: {tool.description}")
|
||
|
||
except asyncio.TimeoutError:
|
||
print("MCP连接超时!")
|
||
print("这可能意味着:")
|
||
print("1. LiteLLM版本不支持MCP功能")
|
||
print("2. MCP功能需要额外配置")
|
||
print("3. /mcp/ 端点不存在")
|
||
return
|
||
|
||
except Exception as e:
|
||
print(f"发生错误: {type(e).__name__}: {e}")
|
||
import traceback
|
||
traceback.print_exc()
|
||
|
||
|
||
if __name__ == "__main__":
|
||
print("启动LiteLLM MCP测试...")
|
||
asyncio.run(main())
|
||
print("测试完成") |