From 8b91a640b94719b15dd1fc66d8d96b8a89bfab69 Mon Sep 17 00:00:00 2001 From: junwen-lee Date: Wed, 6 Aug 2025 15:52:22 +0800 Subject: [PATCH] fix(singleagent): agent model configuration (#610) --- .../internal/agentflow/agent_flow_builder.go | 5 +- .../internal/agentflow/node_chat_model.go | 112 +++++++++++++++++- 2 files changed, 113 insertions(+), 4 deletions(-) diff --git a/backend/domain/agent/singleagent/internal/agentflow/agent_flow_builder.go b/backend/domain/agent/singleagent/internal/agentflow/agent_flow_builder.go index 35a13571..6a78d3e2 100644 --- a/backend/domain/agent/singleagent/internal/agentflow/agent_flow_builder.go +++ b/backend/domain/agent/singleagent/internal/agentflow/agent_flow_builder.go @@ -94,8 +94,9 @@ func BuildAgent(ctx context.Context, conf *Config) (r *AgentRunner, err error) { } chatModel, err := newChatModel(ctx, &config{ - modelFactory: conf.ModelFactory, - modelInfo: modelInfo, + modelFactory: conf.ModelFactory, + modelInfo: modelInfo, + agentModelSetting: conf.Agent.ModelInfo, }) if err != nil { return nil, err diff --git a/backend/domain/agent/singleagent/internal/agentflow/node_chat_model.go b/backend/domain/agent/singleagent/internal/agentflow/node_chat_model.go index efd479e6..b4ad5d78 100644 --- a/backend/domain/agent/singleagent/internal/agentflow/node_chat_model.go +++ b/backend/domain/agent/singleagent/internal/agentflow/node_chat_model.go @@ -20,15 +20,23 @@ import ( "context" "fmt" + "github.com/cloudwego/eino-ext/components/model/deepseek" + "github.com/cloudwego/eino-ext/libs/acl/openai" + + "github.com/coze-dev/coze-studio/backend/api/model/ocean/cloud/bot_common" "github.com/coze-dev/coze-studio/backend/infra/contract/chatmodel" "github.com/coze-dev/coze-studio/backend/infra/contract/modelmgr" "github.com/coze-dev/coze-studio/backend/pkg/errorx" + "github.com/coze-dev/coze-studio/backend/pkg/lang/conv" + "github.com/coze-dev/coze-studio/backend/pkg/lang/ptr" + "github.com/coze-dev/coze-studio/backend/pkg/logs" "github.com/coze-dev/coze-studio/backend/types/errno" ) type config struct { - modelFactory chatmodel.Factory - modelInfo *modelmgr.Model + modelFactory chatmodel.Factory + modelInfo *modelmgr.Model + agentModelSetting *bot_common.ModelInfo } func newChatModel(ctx context.Context, conf *config) (chatmodel.ToolCallingChatModel, error) { @@ -43,6 +51,23 @@ func newChatModel(ctx context.Context, conf *config) (chatmodel.ToolCallingChatM return nil, errorx.New(errno.ErrAgentSupportedChatModelProtocol, errorx.KV("protocol", string(modelMeta.Protocol))) } + logs.CtxInfof(ctx, "chatModel-before: %v", conv.DebugJsonToStr(modelDetail.Meta.ConnConfig)) + if conf.agentModelSetting != nil { + if conf.agentModelSetting.TopP != nil { + modelDetail.Meta.ConnConfig.TopP = ptr.Of(float32(*conf.agentModelSetting.TopP)) + } + if conf.agentModelSetting.Temperature != nil { + modelDetail.Meta.ConnConfig.Temperature = ptr.Of(float32(*conf.agentModelSetting.Temperature)) + } + if conf.agentModelSetting.MaxTokens != nil { + modelDetail.Meta.ConnConfig.MaxTokens = ptr.Of(int(*conf.agentModelSetting.MaxTokens)) + } + if conf.agentModelSetting.ResponseFormat != nil { + modelDetail.Meta = parseResponseFormat(conf.agentModelSetting.ResponseFormat, modelMeta) + } + + } + logs.CtxInfof(ctx, "chatModel-after: %v", conv.DebugJsonToStr(modelDetail.Meta.ConnConfig)) cm, err := conf.modelFactory.CreateChatModel(ctx, modelDetail.Meta.Protocol, conf.modelInfo.Meta.ConnConfig) if err != nil { @@ -52,6 +77,89 @@ func newChatModel(ctx context.Context, conf *config) (chatmodel.ToolCallingChatM return cm, nil } +func parseResponseFormat(responseFormat *bot_common.ModelResponseFormat, modelMeta modelmgr.ModelMeta) modelmgr.ModelMeta { + if responseFormat == nil { + return modelMeta + } + + switch modelMeta.Protocol { + case chatmodel.ProtocolOpenAI: + if modelMeta.ConnConfig.OpenAI == nil { + modelMeta.ConnConfig.Qwen = &chatmodel.QwenConfig{ + ResponseFormat: &openai.ChatCompletionResponseFormat{ + Type: responseFormatToOpenai(responseFormat), + }, + } + } else { + if modelMeta.ConnConfig.OpenAI.ResponseFormat == nil { + modelMeta.ConnConfig.OpenAI.ResponseFormat = &openai.ChatCompletionResponseFormat{ + Type: responseFormatToOpenai(responseFormat), + } + } else { + modelMeta.ConnConfig.OpenAI.ResponseFormat.Type = responseFormatToOpenai(responseFormat) + } + } + case chatmodel.ProtocolDeepseek: + if modelMeta.ConnConfig.Deepseek == nil { + modelMeta.ConnConfig.Deepseek = &chatmodel.DeepseekConfig{ + ResponseFormatType: responseFormatToDeepseek(responseFormat), + } + } else { + modelMeta.ConnConfig.Deepseek.ResponseFormatType = responseFormatToDeepseek(responseFormat) + } + case chatmodel.ProtocolQwen: + if modelMeta.ConnConfig.Qwen == nil { + modelMeta.ConnConfig.Qwen = &chatmodel.QwenConfig{ + ResponseFormat: &openai.ChatCompletionResponseFormat{ + Type: responseFormatToOpenai(responseFormat), + }, + } + } else { + if modelMeta.ConnConfig.Qwen.ResponseFormat == nil { + modelMeta.ConnConfig.Qwen.ResponseFormat = &openai.ChatCompletionResponseFormat{ + Type: responseFormatToOpenai(responseFormat), + } + } else { + modelMeta.ConnConfig.Qwen.ResponseFormat.Type = responseFormatToOpenai(responseFormat) + } + } + + default: + return modelMeta + } + return modelMeta +} + +func responseFormatToDeepseek(responseFormat *bot_common.ModelResponseFormat) deepseek.ResponseFormatType { + var deepseekResponseFormatType deepseek.ResponseFormatType = deepseek.ResponseFormatTypeText + if responseFormat == nil { + return deepseekResponseFormatType + } + switch *responseFormat { + case bot_common.ModelResponseFormat_Text: + deepseekResponseFormatType = deepseek.ResponseFormatTypeText + case bot_common.ModelResponseFormat_JSON: + deepseekResponseFormatType = deepseek.ResponseFormatTypeJSONObject + } + return deepseekResponseFormatType +} + +func responseFormatToOpenai(responseFormat *bot_common.ModelResponseFormat) openai.ChatCompletionResponseFormatType { + + openaiResponseFormatType := openai.ChatCompletionResponseFormatTypeText + if responseFormat == nil { + return openaiResponseFormatType + } + switch *responseFormat { + case bot_common.ModelResponseFormat_Text: + openaiResponseFormatType = openai.ChatCompletionResponseFormatTypeText + case bot_common.ModelResponseFormat_JSON: + openaiResponseFormatType = openai.ChatCompletionResponseFormatTypeJSONObject + } + + return openaiResponseFormatType +} + func loadModelInfo(ctx context.Context, manager modelmgr.Manager, modelID int64) (*modelmgr.Model, error) { if modelID == 0 { return nil, fmt.Errorf("modelID is required")