liurenchaxin/litellm/config.yaml

26 lines
659 B
YAML

model_list:
- model_name: test-model
litellm_params:
model: openai/gpt-3.5-turbo
api_key: sk-test-key
general_settings:
master_key: sk-1234567890abcdef
disable_spend_logs: false
disable_master_key_return: false
enforce_user_param: false
litellm_settings:
set_verbose: true
drop_params: true
add_function_to_prompt: true
mcp_aliases:
"deepwiki": "deepwiki_mcp_server"
"test": "test_mcp_server"
mcp_servers:
deepwiki_mcp_server:
url: "https://mcp.api-inference.modelscope.net/f9d3f201909c45/sse"
transport: "http"
test_mcp_server:
url: "http://localhost:8080/mcp"
transport: "http"