diff --git a/backend/conf/model/template/model_template_ark.yaml b/backend/conf/model/template/model_template_ark.yaml index 878edd7f..faf04cb8 100755 --- a/backend/conf/model/template/model_template_ark.yaml +++ b/backend/conf/model/template/model_template_ark.yaml @@ -1,4 +1,4 @@ -id: 2002 +id: 60000 name: Doubao Model icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-1.5-lite.yaml b/backend/conf/model/template/model_template_ark_doubao-1.5-lite.yaml index 520271d9..416c3e42 100755 --- a/backend/conf/model/template/model_template_ark_doubao-1.5-lite.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-1.5-lite.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 60010 name: Doubao-1.5-Lite icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-1.5-pro-256k.yaml b/backend/conf/model/template/model_template_ark_doubao-1.5-pro-256k.yaml index 9f6755a6..5b6f6d6e 100755 --- a/backend/conf/model/template/model_template_ark_doubao-1.5-pro-256k.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-1.5-pro-256k.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 60030 name: Doubao-1.5-Pro-256k icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-1.5-pro-32k.yaml b/backend/conf/model/template/model_template_ark_doubao-1.5-pro-32k.yaml index 23dd1d42..52535236 100755 --- a/backend/conf/model/template/model_template_ark_doubao-1.5-pro-32k.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-1.5-pro-32k.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 60020 name: Doubao-1.5-Pro-32k icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-1.5-thinking-pro.yaml b/backend/conf/model/template/model_template_ark_doubao-1.5-thinking-pro.yaml index e4257a24..badc80a3 100755 --- a/backend/conf/model/template/model_template_ark_doubao-1.5-thinking-pro.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-1.5-thinking-pro.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 60040 name: Doubao-1.5-Thinking-Pro icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-1.5-thinking-vision-pro.yaml b/backend/conf/model/template/model_template_ark_doubao-1.5-thinking-vision-pro.yaml index bc4a631e..60f5921f 100755 --- a/backend/conf/model/template/model_template_ark_doubao-1.5-thinking-vision-pro.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-1.5-thinking-vision-pro.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 60050 name: Doubao-1.5-Thinking-Vision-Pro icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-1.5-vision-lite.yaml b/backend/conf/model/template/model_template_ark_doubao-1.5-vision-lite.yaml index 4a394074..03abf734 100755 --- a/backend/conf/model/template/model_template_ark_doubao-1.5-vision-lite.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-1.5-vision-lite.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 60060 name: Doubao-1.5-Vision-Lite icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-1.5-vision-pro.yaml b/backend/conf/model/template/model_template_ark_doubao-1.5-vision-pro.yaml index ea09d6d4..ce264611 100755 --- a/backend/conf/model/template/model_template_ark_doubao-1.5-vision-pro.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-1.5-vision-pro.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 60070 name: Doubao-1.5-Vision-Pro icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-seed-1.6-flash.yaml b/backend/conf/model/template/model_template_ark_doubao-seed-1.6-flash.yaml index 2f51bd82..a928bf27 100755 --- a/backend/conf/model/template/model_template_ark_doubao-seed-1.6-flash.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-seed-1.6-flash.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 60080 name: Doubao-Seed-1.6-Flash icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-seed-1.6-thinking.yaml b/backend/conf/model/template/model_template_ark_doubao-seed-1.6-thinking.yaml index f716c917..bc6cd93c 100755 --- a/backend/conf/model/template/model_template_ark_doubao-seed-1.6-thinking.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-seed-1.6-thinking.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 60090 name: Doubao-Seed-1.6-Thinking icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_doubao-seed-1.6.yaml b/backend/conf/model/template/model_template_ark_doubao-seed-1.6.yaml index 9250e91b..5dc201fb 100755 --- a/backend/conf/model/template/model_template_ark_doubao-seed-1.6.yaml +++ b/backend/conf/model/template/model_template_ark_doubao-seed-1.6.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 61010 name: Doubao-Seed-1.6 icon_uri: default_icon/doubao_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_volc_deepseek-r1.yaml b/backend/conf/model/template/model_template_ark_volc_deepseek-r1.yaml index bcfe6d4b..4c2de1f2 100755 --- a/backend/conf/model/template/model_template_ark_volc_deepseek-r1.yaml +++ b/backend/conf/model/template/model_template_ark_volc_deepseek-r1.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 61020 name: Deepseek-R1-VolcEngine icon_uri: default_icon/deepseek_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ark_volc_deepseek-v3.yaml b/backend/conf/model/template/model_template_ark_volc_deepseek-v3.yaml index d45b1e12..071f2946 100755 --- a/backend/conf/model/template/model_template_ark_volc_deepseek-v3.yaml +++ b/backend/conf/model/template/model_template_ark_volc_deepseek-v3.yaml @@ -1,4 +1,4 @@ -id: 65536 +id: 61030 name: Deepseek-V3-VolcEngine icon_uri: default_icon/deepseek_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_basic.yaml b/backend/conf/model/template/model_template_basic.yaml index e092e600..72cf45f0 100755 --- a/backend/conf/model/template/model_template_basic.yaml +++ b/backend/conf/model/template/model_template_basic.yaml @@ -1,4 +1,4 @@ -id: 100 +id: 63010 name: test_model icon_uri: default_icon/test_icon_uri.png icon_url: test_icon_url diff --git a/backend/conf/model/template/model_template_byteplus_seed-1.6-flash.yaml b/backend/conf/model/template/model_template_byteplus_seed-1.6-flash.yaml new file mode 100755 index 00000000..ada7b668 --- /dev/null +++ b/backend/conf/model/template/model_template_byteplus_seed-1.6-flash.yaml @@ -0,0 +1,127 @@ +id: 64010 +name: Seed-1.6-Flash +icon_uri: default_icon/doubao_v2.png +icon_url: "" +description: + zh: 有极致推理速度的多模态深度思考模型;同时支持文本和视觉理解。文本理解能力超过上一代 Lite 系列模型,视觉理解比肩友商 Pro 系列模型。 + en: A multimodal deep thinking model with extreme reasoning speed; it supports both text and visual understanding. Its text comprehension surpasses the previous generation Lite series models, while its visual understanding rivals competitor Pro series models. +default_parameters: + - name: temperature + label: + zh: 生成随机性 + en: Temperature + desc: + zh: '- **temperature**: 调高温度会使得模型的输出更多样性和创新性,反之,降低温度会使输出内容更加遵循指令要求但减少多样性。建议不要与“Top p”同时调整。' + en: '**Temperature**:\n\n- When you increase this value, the model outputs more diverse and innovative content; when you decrease it, the model outputs less diverse content that strictly follows the given instructions.\n- It is recommended not to adjust this value with \"Top p\" at the same time.' + type: float + min: "0" + max: "1" + default_val: + balance: "0.8" + creative: "1" + default_val: "1.0" + precise: "0.3" + precision: 1 + options: [] + style: + widget: slider + label: + zh: 生成多样性 + en: Generation diversity + - name: max_tokens + label: + zh: 最大回复长度 + en: Response max length + desc: + zh: 控制模型输出的Tokens 长度上限。通常 100 Tokens 约等于 150 个中文汉字。 + en: You can specify the maximum length of the tokens output through this value. Typically, 100 tokens are approximately equal to 150 Chinese characters. + type: int + min: "1" + max: "4096" + default_val: + default_val: "4096" + options: [] + style: + widget: slider + label: + zh: 输入及输出设置 + en: Input and output settings + - name: top_p + label: + zh: Top P + en: Top P + desc: + zh: '- **Top p 为累计概率**: 模型在生成输出时会从概率最高的词汇开始选择,直到这些词汇的总概率累积达到Top p 值。这样可以限制模型只选择这些高概率的词汇,从而控制输出内容的多样性。建议不要与“生成随机性”同时调整。' + en: '**Top P**:\n\n- An alternative to sampling with temperature, where only tokens within the top p probability mass are considered. For example, 0.1 means only the top 10% probability mass tokens are considered.\n- We recommend altering this or temperature, but not both.' + type: float + min: "0" + max: "1" + default_val: + default_val: "0.7" + precision: 2 + options: [] + style: + widget: slider + label: + zh: 生成多样性 + en: Generation diversity + - name: response_format + label: + zh: 输出格式 + en: Response format + desc: + zh: '- **JSON**: 将引导模型使用JSON格式输出' + en: '**Response Format**:\n\n- **JSON**: Uses JSON format for replies' + type: int + min: "" + max: "" + default_val: + default_val: "0" + options: + - label: Text + value: "0" + - label: JSON + value: "1" + style: + widget: radio_buttons + label: + zh: 输入及输出设置 + en: Input and output settings +meta: + protocol: ark + capability: + function_call: true + input_modal: + - text + - image + - video + input_tokens: 224000 + json_mode: true + max_tokens: 256000 + output_modal: + - text + output_tokens: 32000 + prefix_caching: true + reasoning: true + prefill_response: false + conn_config: + # you could change the base_url to your own + base_url: "https://ark.ap-southeast.bytepluses.com/api/v3" + api_key: "" + timeout: 0s + model: "" # model_id / endpoint_id + temperature: 0.1 + frequency_penalty: 0 + presence_penalty: 0 + max_tokens: 4096 + top_p: 0.7 + top_k: 0 + stop: [] + ark: + region: "" + access_key: "" + secret_key: "" + retry_times: null + custom_header: {} + custom: {} + status: 0 diff --git a/backend/conf/model/template/model_template_byteplus_seed-1.6.yaml b/backend/conf/model/template/model_template_byteplus_seed-1.6.yaml new file mode 100755 index 00000000..1e2b0d32 --- /dev/null +++ b/backend/conf/model/template/model_template_byteplus_seed-1.6.yaml @@ -0,0 +1,127 @@ +id: 64020 +name: Doubao-Seed-1.6 +icon_uri: default_icon/doubao_v2.png +icon_url: "" +description: + zh: 全新多模态深度思考模型,同时支持 thinking、non-thinking、auto三种思考模式。其中 non-thinking 模型对比 doubao-1.5-pro-32k-250115 模型大幅提升。 + en: 'A brand-new multimodal deep thinking model supports three thinking modes: thinking, non-thinking, and auto. Among them, the non-thinking model has significantly improved compared to the doubao-1.5-pro-32k-250115 model.' +default_parameters: + - name: temperature + label: + zh: 生成随机性 + en: Temperature + desc: + zh: '- **temperature**: 调高温度会使得模型的输出更多样性和创新性,反之,降低温度会使输出内容更加遵循指令要求但减少多样性。建议不要与“Top p”同时调整。' + en: '**Temperature**:\n\n- When you increase this value, the model outputs more diverse and innovative content; when you decrease it, the model outputs less diverse content that strictly follows the given instructions.\n- It is recommended not to adjust this value with \"Top p\" at the same time.' + type: float + min: "0" + max: "1" + default_val: + balance: "0.8" + creative: "1" + default_val: "1.0" + precise: "0.3" + precision: 1 + options: [] + style: + widget: slider + label: + zh: 生成多样性 + en: Generation diversity + - name: max_tokens + label: + zh: 最大回复长度 + en: Response max length + desc: + zh: 控制模型输出的Tokens 长度上限。通常 100 Tokens 约等于 150 个中文汉字。 + en: You can specify the maximum length of the tokens output through this value. Typically, 100 tokens are approximately equal to 150 Chinese characters. + type: int + min: "1" + max: "4096" + default_val: + default_val: "4096" + options: [] + style: + widget: slider + label: + zh: 输入及输出设置 + en: Input and output settings + - name: top_p + label: + zh: Top P + en: Top P + desc: + zh: '- **Top p 为累计概率**: 模型在生成输出时会从概率最高的词汇开始选择,直到这些词汇的总概率累积达到Top p 值。这样可以限制模型只选择这些高概率的词汇,从而控制输出内容的多样性。建议不要与“生成随机性”同时调整。' + en: '**Top P**:\n\n- An alternative to sampling with temperature, where only tokens within the top p probability mass are considered. For example, 0.1 means only the top 10% probability mass tokens are considered.\n- We recommend altering this or temperature, but not both.' + type: float + min: "0" + max: "1" + default_val: + default_val: "0.7" + precision: 2 + options: [] + style: + widget: slider + label: + zh: 生成多样性 + en: Generation diversity + - name: response_format + label: + zh: 输出格式 + en: Response format + desc: + zh: '- **JSON**: 将引导模型使用JSON格式输出' + en: '**Response Format**:\n\n- **JSON**: Uses JSON format for replies' + type: int + min: "" + max: "" + default_val: + default_val: "0" + options: + - label: Text + value: "0" + - label: JSON + value: "1" + style: + widget: radio_buttons + label: + zh: 输入及输出设置 + en: Input and output settings +meta: + protocol: ark + capability: + function_call: true + input_modal: + - text + - image + - video + input_tokens: 224000 + json_mode: true + max_tokens: 256000 + output_modal: + - text + output_tokens: 32000 + prefix_caching: true + reasoning: true + prefill_response: false + conn_config: + # you could change the base_url to your own + base_url: "https://ark.ap-southeast.bytepluses.com/api/v3" + api_key: "" + timeout: 0s + model: "" # model_id / endpoint_id + temperature: 0.1 + frequency_penalty: 0 + presence_penalty: 0 + max_tokens: 4096 + top_p: 0.7 + top_k: 0 + stop: [] + ark: + region: "" + access_key: "" + secret_key: "" + retry_times: null + custom_header: {} + custom: {} + status: 0 diff --git a/backend/conf/model/template/model_template_claude.yaml b/backend/conf/model/template/model_template_claude.yaml index 5b533a96..9ae9bd49 100755 --- a/backend/conf/model/template/model_template_claude.yaml +++ b/backend/conf/model/template/model_template_claude.yaml @@ -1,4 +1,4 @@ -id: 2006 +id: 65010 name: Claude-3.5-Sonnet icon_uri: default_icon/claude_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_deepseek.yaml b/backend/conf/model/template/model_template_deepseek.yaml index c5d78f50..db3db8d3 100755 --- a/backend/conf/model/template/model_template_deepseek.yaml +++ b/backend/conf/model/template/model_template_deepseek.yaml @@ -1,4 +1,4 @@ -id: 2004 +id: 66010 name: DeepSeek-V3 icon_uri: default_icon/deepseek_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_gemini.yaml b/backend/conf/model/template/model_template_gemini.yaml index 638bf852..2ffddd3e 100755 --- a/backend/conf/model/template/model_template_gemini.yaml +++ b/backend/conf/model/template/model_template_gemini.yaml @@ -1,4 +1,4 @@ -id: 2007 +id: 67010 name: Gemini-2.5-Flash icon_uri: default_icon/gemini_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_ollama.yaml b/backend/conf/model/template/model_template_ollama.yaml index d6746749..66266dcb 100755 --- a/backend/conf/model/template/model_template_ollama.yaml +++ b/backend/conf/model/template/model_template_ollama.yaml @@ -1,4 +1,4 @@ -id: 2003 +id: 68010 name: Gemma-3 icon_uri: default_icon/ollama.png icon_url: "" diff --git a/backend/conf/model/template/model_template_openai.yaml b/backend/conf/model/template/model_template_openai.yaml index 58ac5ad7..3859e9cd 100755 --- a/backend/conf/model/template/model_template_openai.yaml +++ b/backend/conf/model/template/model_template_openai.yaml @@ -1,4 +1,4 @@ -id: 2001 +id: 69010 name: GPT-4o icon_uri: default_icon/openai_v2.png icon_url: "" diff --git a/backend/conf/model/template/model_template_qwen.yaml b/backend/conf/model/template/model_template_qwen.yaml index 8ee7067e..f45aa92b 100755 --- a/backend/conf/model/template/model_template_qwen.yaml +++ b/backend/conf/model/template/model_template_qwen.yaml @@ -1,4 +1,4 @@ -id: 2005 +id: 71010 name: Qwen3-32B icon_uri: default_icon/qwen_v2.png icon_url: ""