refactor: how to add a node type in workflow (#558)
This commit is contained in:
236
backend/domain/workflow/internal/nodes/database/adapt.go
Normal file
236
backend/domain/workflow/internal/nodes/database/adapt.go
Normal file
@@ -0,0 +1,236 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package database
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
einoCompose "github.com/cloudwego/eino/compose"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/database"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/canvas/convert"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/schema"
|
||||
)
|
||||
|
||||
func setDatabaseInputsForNodeSchema(n *vo.Node, ns *schema.NodeSchema) (err error) {
|
||||
selectParam := n.Data.Inputs.SelectParam
|
||||
if selectParam != nil {
|
||||
err = applyDBConditionToSchema(ns, selectParam.Condition, n.Parent())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
insertParam := n.Data.Inputs.InsertParam
|
||||
if insertParam != nil {
|
||||
err = applyInsetFieldInfoToSchema(ns, insertParam.FieldInfo, n.Parent())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
deleteParam := n.Data.Inputs.DeleteParam
|
||||
if deleteParam != nil {
|
||||
err = applyDBConditionToSchema(ns, &deleteParam.Condition, n.Parent())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
updateParam := n.Data.Inputs.UpdateParam
|
||||
if updateParam != nil {
|
||||
err = applyDBConditionToSchema(ns, &updateParam.Condition, n.Parent())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = applyInsetFieldInfoToSchema(ns, updateParam.FieldInfo, n.Parent())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func applyDBConditionToSchema(ns *schema.NodeSchema, condition *vo.DBCondition, parentNode *vo.Node) error {
|
||||
if condition.ConditionList == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
for idx, params := range condition.ConditionList {
|
||||
var right *vo.Param
|
||||
for _, param := range params {
|
||||
if param == nil {
|
||||
continue
|
||||
}
|
||||
if param.Name == "right" {
|
||||
right = param
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if right == nil {
|
||||
continue
|
||||
}
|
||||
name := fmt.Sprintf("__condition_right_%d", idx)
|
||||
tInfo, err := convert.CanvasBlockInputToTypeInfo(right.Input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ns.SetInputType(name, tInfo)
|
||||
sources, err := convert.CanvasBlockInputToFieldInfo(right.Input, einoCompose.FieldPath{name}, parentNode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ns.AddInputSource(sources...)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func applyInsetFieldInfoToSchema(ns *schema.NodeSchema, fieldInfo [][]*vo.Param, parentNode *vo.Node) error {
|
||||
if len(fieldInfo) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, params := range fieldInfo {
|
||||
// Each FieldInfo is list params, containing two elements.
|
||||
// The first is to set the name of the field and the second is the corresponding value.
|
||||
p0 := params[0]
|
||||
p1 := params[1]
|
||||
name := p0.Input.Value.Content.(string) // must string type
|
||||
tInfo, err := convert.CanvasBlockInputToTypeInfo(p1.Input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
name = "__setting_field_" + name
|
||||
ns.SetInputType(name, tInfo)
|
||||
sources, err := convert.CanvasBlockInputToFieldInfo(p1.Input, einoCompose.FieldPath{name}, parentNode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ns.AddInputSource(sources...)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func buildClauseGroupFromCondition(condition *vo.DBCondition) (*database.ClauseGroup, error) {
|
||||
clauseGroup := &database.ClauseGroup{}
|
||||
if len(condition.ConditionList) == 1 {
|
||||
params := condition.ConditionList[0]
|
||||
clause, err := buildClauseFromParams(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseGroup.Single = clause
|
||||
} else {
|
||||
relation, err := convertLogicTypeToRelation(condition.Logic)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseGroup.Multi = &database.MultiClause{
|
||||
Clauses: make([]*database.Clause, 0, len(condition.ConditionList)),
|
||||
Relation: relation,
|
||||
}
|
||||
for i := range condition.ConditionList {
|
||||
params := condition.ConditionList[i]
|
||||
clause, err := buildClauseFromParams(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clauseGroup.Multi.Clauses = append(clauseGroup.Multi.Clauses, clause)
|
||||
}
|
||||
}
|
||||
|
||||
return clauseGroup, nil
|
||||
}
|
||||
|
||||
func buildClauseFromParams(params []*vo.Param) (*database.Clause, error) {
|
||||
var left, operation *vo.Param
|
||||
for _, p := range params {
|
||||
if p == nil {
|
||||
continue
|
||||
}
|
||||
if p.Name == "left" {
|
||||
left = p
|
||||
continue
|
||||
}
|
||||
if p.Name == "operation" {
|
||||
operation = p
|
||||
continue
|
||||
}
|
||||
}
|
||||
if left == nil {
|
||||
return nil, fmt.Errorf("left clause is required")
|
||||
}
|
||||
if operation == nil {
|
||||
return nil, fmt.Errorf("operation clause is required")
|
||||
}
|
||||
operator, err := operationToOperator(operation.Input.Value.Content.(string))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
clause := &database.Clause{
|
||||
Left: left.Input.Value.Content.(string),
|
||||
Operator: operator,
|
||||
}
|
||||
|
||||
return clause, nil
|
||||
}
|
||||
|
||||
func convertLogicTypeToRelation(logicType vo.DatabaseLogicType) (database.ClauseRelation, error) {
|
||||
switch logicType {
|
||||
case vo.DatabaseLogicAnd:
|
||||
return database.ClauseRelationAND, nil
|
||||
case vo.DatabaseLogicOr:
|
||||
return database.ClauseRelationOR, nil
|
||||
default:
|
||||
return "", fmt.Errorf("logic type %v is invalid", logicType)
|
||||
}
|
||||
}
|
||||
|
||||
func operationToOperator(s string) (database.Operator, error) {
|
||||
switch s {
|
||||
case "EQUAL":
|
||||
return database.OperatorEqual, nil
|
||||
case "NOT_EQUAL":
|
||||
return database.OperatorNotEqual, nil
|
||||
case "GREATER_THAN":
|
||||
return database.OperatorGreater, nil
|
||||
case "LESS_THAN":
|
||||
return database.OperatorLesser, nil
|
||||
case "GREATER_EQUAL":
|
||||
return database.OperatorGreaterOrEqual, nil
|
||||
case "LESS_EQUAL":
|
||||
return database.OperatorLesserOrEqual, nil
|
||||
case "IN":
|
||||
return database.OperatorIn, nil
|
||||
case "NOT_IN":
|
||||
return database.OperatorNotIn, nil
|
||||
case "IS_NULL":
|
||||
return database.OperatorIsNull, nil
|
||||
case "IS_NOT_NULL":
|
||||
return database.OperatorIsNotNull, nil
|
||||
case "LIKE":
|
||||
return database.OperatorLike, nil
|
||||
case "NOT_LIKE":
|
||||
return database.OperatorNotLike, nil
|
||||
}
|
||||
return "", fmt.Errorf("not a valid Operation string")
|
||||
}
|
||||
@@ -342,7 +342,7 @@ func responseFormatted(configOutput map[string]*vo.TypeInfo, response *database.
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func convertClauseGroupToConditionGroup(ctx context.Context, clauseGroup *database.ClauseGroup, input map[string]any) (*database.ConditionGroup, error) {
|
||||
func convertClauseGroupToConditionGroup(_ context.Context, clauseGroup *database.ClauseGroup, input map[string]any) (*database.ConditionGroup, error) {
|
||||
var (
|
||||
rightValue any
|
||||
ok bool
|
||||
@@ -394,13 +394,13 @@ func convertClauseGroupToConditionGroup(ctx context.Context, clauseGroup *databa
|
||||
return conditionGroup, nil
|
||||
}
|
||||
|
||||
func convertClauseGroupToUpdateInventory(ctx context.Context, clauseGroup *database.ClauseGroup, input map[string]any) (*UpdateInventory, error) {
|
||||
func convertClauseGroupToUpdateInventory(ctx context.Context, clauseGroup *database.ClauseGroup, input map[string]any) (*updateInventory, error) {
|
||||
conditionGroup, err := convertClauseGroupToConditionGroup(ctx, clauseGroup, input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fields := parseToInput(input)
|
||||
inventory := &UpdateInventory{
|
||||
inventory := &updateInventory{
|
||||
ConditionGroup: conditionGroup,
|
||||
Fields: fields,
|
||||
}
|
||||
|
||||
@@ -19,48 +19,89 @@ package database
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/database"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/canvas/convert"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/schema"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/sonic"
|
||||
)
|
||||
|
||||
type CustomSQLConfig struct {
|
||||
DatabaseInfoID int64
|
||||
SQLTemplate string
|
||||
OutputConfig map[string]*vo.TypeInfo
|
||||
CustomSQLExecutor database.DatabaseOperator
|
||||
DatabaseInfoID int64
|
||||
SQLTemplate string
|
||||
}
|
||||
|
||||
func NewCustomSQL(_ context.Context, cfg *CustomSQLConfig) (*CustomSQL, error) {
|
||||
if cfg == nil {
|
||||
return nil, errors.New("config is required")
|
||||
func (c *CustomSQLConfig) Adapt(_ context.Context, n *vo.Node, _ ...nodes.AdaptOption) (*schema.NodeSchema, error) {
|
||||
ns := &schema.NodeSchema{
|
||||
Key: vo.NodeKey(n.ID),
|
||||
Type: entity.NodeTypeDatabaseCustomSQL,
|
||||
Name: n.Data.Meta.Title,
|
||||
Configs: c,
|
||||
}
|
||||
if cfg.DatabaseInfoID == 0 {
|
||||
|
||||
dsList := n.Data.Inputs.DatabaseInfoList
|
||||
if len(dsList) == 0 {
|
||||
return nil, fmt.Errorf("database info is requird")
|
||||
}
|
||||
databaseInfo := dsList[0]
|
||||
|
||||
dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c.DatabaseInfoID = dsID
|
||||
|
||||
sql := n.Data.Inputs.SQL
|
||||
if len(sql) == 0 {
|
||||
return nil, fmt.Errorf("sql is requird")
|
||||
}
|
||||
|
||||
c.SQLTemplate = sql
|
||||
|
||||
if err = convert.SetInputsForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = convert.SetOutputTypesForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ns, nil
|
||||
}
|
||||
|
||||
func (c *CustomSQLConfig) Build(_ context.Context, ns *schema.NodeSchema, _ ...schema.BuildOption) (any, error) {
|
||||
if c.DatabaseInfoID == 0 {
|
||||
return nil, errors.New("database info id is required and greater than 0")
|
||||
}
|
||||
if cfg.SQLTemplate == "" {
|
||||
if c.SQLTemplate == "" {
|
||||
return nil, errors.New("sql template is required")
|
||||
}
|
||||
if cfg.CustomSQLExecutor == nil {
|
||||
return nil, errors.New("custom sqler is required")
|
||||
}
|
||||
|
||||
return &CustomSQL{
|
||||
config: cfg,
|
||||
databaseInfoID: c.DatabaseInfoID,
|
||||
sqlTemplate: c.SQLTemplate,
|
||||
outputTypes: ns.OutputTypes,
|
||||
customSQLExecutor: database.GetDatabaseOperator(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type CustomSQL struct {
|
||||
config *CustomSQLConfig
|
||||
databaseInfoID int64
|
||||
sqlTemplate string
|
||||
outputTypes map[string]*vo.TypeInfo
|
||||
customSQLExecutor database.DatabaseOperator
|
||||
}
|
||||
|
||||
func (c *CustomSQL) Execute(ctx context.Context, input map[string]any) (map[string]any, error) {
|
||||
|
||||
func (c *CustomSQL) Invoke(ctx context.Context, input map[string]any) (map[string]any, error) {
|
||||
req := &database.CustomSQLRequest{
|
||||
DatabaseInfoID: c.config.DatabaseInfoID,
|
||||
DatabaseInfoID: c.databaseInfoID,
|
||||
IsDebugRun: isDebugExecute(ctx),
|
||||
UserID: getExecUserID(ctx),
|
||||
}
|
||||
@@ -71,7 +112,7 @@ func (c *CustomSQL) Execute(ctx context.Context, input map[string]any) (map[stri
|
||||
}
|
||||
|
||||
templateSQL := ""
|
||||
templateParts := nodes.ParseTemplate(c.config.SQLTemplate)
|
||||
templateParts := nodes.ParseTemplate(c.sqlTemplate)
|
||||
sqlParams := make([]database.SQLParam, 0, len(templateParts))
|
||||
var nilError = errors.New("field is nil")
|
||||
for _, templatePart := range templateParts {
|
||||
@@ -113,12 +154,12 @@ func (c *CustomSQL) Execute(ctx context.Context, input map[string]any) (map[stri
|
||||
templateSQL = strings.Replace(templateSQL, "`?`", "?", -1)
|
||||
req.SQL = templateSQL
|
||||
req.Params = sqlParams
|
||||
response, err := c.config.CustomSQLExecutor.Execute(ctx, req)
|
||||
response, err := c.customSQLExecutor.Execute(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret, err := responseFormatted(c.config.OutputConfig, response)
|
||||
ret, err := responseFormatted(c.outputTypes, response)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ import (
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/database/databasemock"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/execute"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/schema"
|
||||
)
|
||||
|
||||
type mockCustomSQLer struct {
|
||||
@@ -39,7 +40,7 @@ func (m mockCustomSQLer) Execute() func(ctx context.Context, request *database.C
|
||||
m.validate(request)
|
||||
r := &database.Response{
|
||||
Objects: []database.Object{
|
||||
database.Object{
|
||||
{
|
||||
"v1": "v1_ret",
|
||||
"v2": "v2_ret",
|
||||
},
|
||||
@@ -58,9 +59,9 @@ func TestCustomSQL_Execute(t *testing.T) {
|
||||
validate: func(req *database.CustomSQLRequest) {
|
||||
assert.Equal(t, int64(111), req.DatabaseInfoID)
|
||||
ps := []database.SQLParam{
|
||||
database.SQLParam{Value: "v1_value"},
|
||||
database.SQLParam{Value: "v2_value"},
|
||||
database.SQLParam{Value: "v3_value"},
|
||||
{Value: "v1_value"},
|
||||
{Value: "v2_value"},
|
||||
{Value: "v3_value"},
|
||||
}
|
||||
assert.Equal(t, ps, req.Params)
|
||||
assert.Equal(t, "select * from v1 where v1 = ? and v2 = ? and v3 = ?", req.SQL)
|
||||
@@ -80,23 +81,25 @@ func TestCustomSQL_Execute(t *testing.T) {
|
||||
mockDatabaseOperator := databasemock.NewMockDatabaseOperator(ctrl)
|
||||
mockDatabaseOperator.EXPECT().Execute(gomock.Any(), gomock.Any()).DoAndReturn(mockSQLer.Execute()).AnyTimes()
|
||||
|
||||
defer mockey.Mock(database.GetDatabaseOperator).Return(mockDatabaseOperator).Build().UnPatch()
|
||||
|
||||
cfg := &CustomSQLConfig{
|
||||
DatabaseInfoID: 111,
|
||||
SQLTemplate: "select * from v1 where v1 = {{v1}} and v2 = '{{v2}}' and v3 = `{{v3}}`",
|
||||
CustomSQLExecutor: mockDatabaseOperator,
|
||||
OutputConfig: map[string]*vo.TypeInfo{
|
||||
DatabaseInfoID: 111,
|
||||
SQLTemplate: "select * from v1 where v1 = {{v1}} and v2 = '{{v2}}' and v3 = `{{v3}}`",
|
||||
}
|
||||
|
||||
c1, err := cfg.Build(context.Background(), &schema.NodeSchema{
|
||||
OutputTypes: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeObject, Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeString},
|
||||
"v2": {Type: vo.DataTypeString},
|
||||
}}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
}
|
||||
cl := &CustomSQL{
|
||||
config: cfg,
|
||||
}
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
ret, err := cl.Execute(t.Context(), map[string]any{
|
||||
ret, err := c1.(*CustomSQL).Invoke(t.Context(), map[string]any{
|
||||
"v1": "v1_value",
|
||||
"v2": "v2_value",
|
||||
"v3": "v3_value",
|
||||
|
||||
@@ -20,61 +20,102 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/database"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/canvas/convert"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/schema"
|
||||
)
|
||||
|
||||
type DeleteConfig struct {
|
||||
DatabaseInfoID int64
|
||||
ClauseGroup *database.ClauseGroup
|
||||
OutputConfig map[string]*vo.TypeInfo
|
||||
|
||||
Deleter database.DatabaseOperator
|
||||
}
|
||||
type Delete struct {
|
||||
config *DeleteConfig
|
||||
}
|
||||
|
||||
func NewDelete(_ context.Context, cfg *DeleteConfig) (*Delete, error) {
|
||||
if cfg == nil {
|
||||
return nil, errors.New("config is required")
|
||||
func (d *DeleteConfig) Adapt(_ context.Context, n *vo.Node, _ ...nodes.AdaptOption) (*schema.NodeSchema, error) {
|
||||
ns := &schema.NodeSchema{
|
||||
Key: vo.NodeKey(n.ID),
|
||||
Type: entity.NodeTypeDatabaseDelete,
|
||||
Name: n.Data.Meta.Title,
|
||||
Configs: d,
|
||||
}
|
||||
if cfg.DatabaseInfoID == 0 {
|
||||
|
||||
dsList := n.Data.Inputs.DatabaseInfoList
|
||||
if len(dsList) == 0 {
|
||||
return nil, fmt.Errorf("database info is requird")
|
||||
}
|
||||
databaseInfo := dsList[0]
|
||||
|
||||
dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
d.DatabaseInfoID = dsID
|
||||
|
||||
deleteParam := n.Data.Inputs.DeleteParam
|
||||
|
||||
clauseGroup, err := buildClauseGroupFromCondition(&deleteParam.Condition)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
d.ClauseGroup = clauseGroup
|
||||
|
||||
if err = setDatabaseInputsForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = convert.SetOutputTypesForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ns, nil
|
||||
}
|
||||
|
||||
func (d *DeleteConfig) Build(_ context.Context, ns *schema.NodeSchema, _ ...schema.BuildOption) (any, error) {
|
||||
if d.DatabaseInfoID == 0 {
|
||||
return nil, errors.New("database info id is required and greater than 0")
|
||||
}
|
||||
|
||||
if cfg.ClauseGroup == nil {
|
||||
if d.ClauseGroup == nil {
|
||||
return nil, errors.New("clauseGroup is required")
|
||||
}
|
||||
if cfg.Deleter == nil {
|
||||
return nil, errors.New("deleter is required")
|
||||
}
|
||||
|
||||
return &Delete{
|
||||
config: cfg,
|
||||
databaseInfoID: d.DatabaseInfoID,
|
||||
clauseGroup: d.ClauseGroup,
|
||||
outputTypes: ns.OutputTypes,
|
||||
deleter: database.GetDatabaseOperator(),
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func (d *Delete) Delete(ctx context.Context, in map[string]any) (map[string]any, error) {
|
||||
conditionGroup, err := convertClauseGroupToConditionGroup(ctx, d.config.ClauseGroup, in)
|
||||
type Delete struct {
|
||||
databaseInfoID int64
|
||||
clauseGroup *database.ClauseGroup
|
||||
outputTypes map[string]*vo.TypeInfo
|
||||
deleter database.DatabaseOperator
|
||||
}
|
||||
|
||||
func (d *Delete) Invoke(ctx context.Context, in map[string]any) (map[string]any, error) {
|
||||
conditionGroup, err := convertClauseGroupToConditionGroup(ctx, d.clauseGroup, in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
request := &database.DeleteRequest{
|
||||
DatabaseInfoID: d.config.DatabaseInfoID,
|
||||
DatabaseInfoID: d.databaseInfoID,
|
||||
ConditionGroup: conditionGroup,
|
||||
IsDebugRun: isDebugExecute(ctx),
|
||||
UserID: getExecUserID(ctx),
|
||||
}
|
||||
|
||||
response, err := d.config.Deleter.Delete(ctx, request)
|
||||
response, err := d.deleter.Delete(ctx, request)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret, err := responseFormatted(d.config.OutputConfig, response)
|
||||
ret, err := responseFormatted(d.outputTypes, response)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -82,7 +123,7 @@ func (d *Delete) Delete(ctx context.Context, in map[string]any) (map[string]any,
|
||||
}
|
||||
|
||||
func (d *Delete) ToCallbackInput(_ context.Context, in map[string]any) (map[string]any, error) {
|
||||
conditionGroup, err := convertClauseGroupToConditionGroup(context.Background(), d.config.ClauseGroup, in)
|
||||
conditionGroup, err := convertClauseGroupToConditionGroup(context.Background(), d.clauseGroup, in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -90,7 +131,7 @@ func (d *Delete) ToCallbackInput(_ context.Context, in map[string]any) (map[stri
|
||||
}
|
||||
|
||||
func (d *Delete) toDatabaseDeleteCallbackInput(conditionGroup *database.ConditionGroup) (map[string]any, error) {
|
||||
databaseID := d.config.DatabaseInfoID
|
||||
databaseID := d.databaseInfoID
|
||||
result := make(map[string]any)
|
||||
|
||||
result["databaseInfoList"] = []string{fmt.Sprintf("%d", databaseID)}
|
||||
|
||||
@@ -20,54 +20,84 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/database"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/canvas/convert"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/schema"
|
||||
)
|
||||
|
||||
type InsertConfig struct {
|
||||
DatabaseInfoID int64
|
||||
OutputConfig map[string]*vo.TypeInfo
|
||||
Inserter database.DatabaseOperator
|
||||
}
|
||||
|
||||
type Insert struct {
|
||||
config *InsertConfig
|
||||
}
|
||||
|
||||
func NewInsert(_ context.Context, cfg *InsertConfig) (*Insert, error) {
|
||||
if cfg == nil {
|
||||
return nil, errors.New("config is required")
|
||||
func (i *InsertConfig) Adapt(_ context.Context, n *vo.Node, _ ...nodes.AdaptOption) (*schema.NodeSchema, error) {
|
||||
ns := &schema.NodeSchema{
|
||||
Key: vo.NodeKey(n.ID),
|
||||
Type: entity.NodeTypeDatabaseInsert,
|
||||
Name: n.Data.Meta.Title,
|
||||
Configs: i,
|
||||
}
|
||||
if cfg.DatabaseInfoID == 0 {
|
||||
|
||||
dsList := n.Data.Inputs.DatabaseInfoList
|
||||
if len(dsList) == 0 {
|
||||
return nil, fmt.Errorf("database info is requird")
|
||||
}
|
||||
databaseInfo := dsList[0]
|
||||
|
||||
dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
i.DatabaseInfoID = dsID
|
||||
|
||||
if err = setDatabaseInputsForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = convert.SetOutputTypesForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ns, nil
|
||||
}
|
||||
|
||||
func (i *InsertConfig) Build(_ context.Context, ns *schema.NodeSchema, _ ...schema.BuildOption) (any, error) {
|
||||
if i.DatabaseInfoID == 0 {
|
||||
return nil, errors.New("database info id is required and greater than 0")
|
||||
}
|
||||
|
||||
if cfg.Inserter == nil {
|
||||
return nil, errors.New("inserter is required")
|
||||
}
|
||||
return &Insert{
|
||||
config: cfg,
|
||||
databaseInfoID: i.DatabaseInfoID,
|
||||
outputTypes: ns.OutputTypes,
|
||||
inserter: database.GetDatabaseOperator(),
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func (is *Insert) Insert(ctx context.Context, input map[string]any) (map[string]any, error) {
|
||||
type Insert struct {
|
||||
databaseInfoID int64
|
||||
outputTypes map[string]*vo.TypeInfo
|
||||
inserter database.DatabaseOperator
|
||||
}
|
||||
|
||||
func (is *Insert) Invoke(ctx context.Context, input map[string]any) (map[string]any, error) {
|
||||
fields := parseToInput(input)
|
||||
req := &database.InsertRequest{
|
||||
DatabaseInfoID: is.config.DatabaseInfoID,
|
||||
DatabaseInfoID: is.databaseInfoID,
|
||||
Fields: fields,
|
||||
IsDebugRun: isDebugExecute(ctx),
|
||||
UserID: getExecUserID(ctx),
|
||||
}
|
||||
|
||||
response, err := is.config.Inserter.Insert(ctx, req)
|
||||
response, err := is.inserter.Insert(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret, err := responseFormatted(is.config.OutputConfig, response)
|
||||
ret, err := responseFormatted(is.outputTypes, response)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -76,7 +106,7 @@ func (is *Insert) Insert(ctx context.Context, input map[string]any) (map[string]
|
||||
}
|
||||
|
||||
func (is *Insert) ToCallbackInput(_ context.Context, input map[string]any) (map[string]any, error) {
|
||||
databaseID := is.config.DatabaseInfoID
|
||||
databaseID := is.databaseInfoID
|
||||
fs := parseToInput(input)
|
||||
result := make(map[string]any)
|
||||
result["databaseInfoList"] = []string{fmt.Sprintf("%d", databaseID)}
|
||||
|
||||
@@ -20,68 +20,137 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/database"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/canvas/convert"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/schema"
|
||||
)
|
||||
|
||||
type QueryConfig struct {
|
||||
DatabaseInfoID int64
|
||||
QueryFields []string
|
||||
OrderClauses []*database.OrderClause
|
||||
OutputConfig map[string]*vo.TypeInfo
|
||||
ClauseGroup *database.ClauseGroup
|
||||
Limit int64
|
||||
Op database.DatabaseOperator
|
||||
}
|
||||
|
||||
type Query struct {
|
||||
config *QueryConfig
|
||||
}
|
||||
|
||||
func NewQuery(_ context.Context, cfg *QueryConfig) (*Query, error) {
|
||||
if cfg == nil {
|
||||
return nil, errors.New("config is required")
|
||||
func (q *QueryConfig) Adapt(_ context.Context, n *vo.Node, _ ...nodes.AdaptOption) (*schema.NodeSchema, error) {
|
||||
ns := &schema.NodeSchema{
|
||||
Key: vo.NodeKey(n.ID),
|
||||
Type: entity.NodeTypeDatabaseQuery,
|
||||
Name: n.Data.Meta.Title,
|
||||
Configs: q,
|
||||
}
|
||||
if cfg.DatabaseInfoID == 0 {
|
||||
|
||||
dsList := n.Data.Inputs.DatabaseInfoList
|
||||
if len(dsList) == 0 {
|
||||
return nil, fmt.Errorf("database info is requird")
|
||||
}
|
||||
databaseInfo := dsList[0]
|
||||
|
||||
dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
q.DatabaseInfoID = dsID
|
||||
|
||||
selectParam := n.Data.Inputs.SelectParam
|
||||
q.Limit = selectParam.Limit
|
||||
|
||||
queryFields := make([]string, 0)
|
||||
for _, v := range selectParam.FieldList {
|
||||
queryFields = append(queryFields, strconv.FormatInt(v.FieldID, 10))
|
||||
}
|
||||
q.QueryFields = queryFields
|
||||
|
||||
orderClauses := make([]*database.OrderClause, 0, len(selectParam.OrderByList))
|
||||
for _, o := range selectParam.OrderByList {
|
||||
orderClauses = append(orderClauses, &database.OrderClause{
|
||||
FieldID: strconv.FormatInt(o.FieldID, 10),
|
||||
IsAsc: o.IsAsc,
|
||||
})
|
||||
}
|
||||
q.OrderClauses = orderClauses
|
||||
|
||||
clauseGroup := &database.ClauseGroup{}
|
||||
|
||||
if selectParam.Condition != nil {
|
||||
clauseGroup, err = buildClauseGroupFromCondition(selectParam.Condition)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
q.ClauseGroup = clauseGroup
|
||||
|
||||
if err = setDatabaseInputsForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = convert.SetOutputTypesForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ns, nil
|
||||
}
|
||||
|
||||
func (q *QueryConfig) Build(_ context.Context, ns *schema.NodeSchema, _ ...schema.BuildOption) (any, error) {
|
||||
if q.DatabaseInfoID == 0 {
|
||||
return nil, errors.New("database info id is required and greater than 0")
|
||||
}
|
||||
|
||||
if cfg.Limit == 0 {
|
||||
if q.Limit == 0 {
|
||||
return nil, errors.New("limit is required and greater than 0")
|
||||
}
|
||||
|
||||
if cfg.Op == nil {
|
||||
return nil, errors.New("op is required")
|
||||
}
|
||||
|
||||
return &Query{config: cfg}, nil
|
||||
|
||||
return &Query{
|
||||
databaseInfoID: q.DatabaseInfoID,
|
||||
queryFields: q.QueryFields,
|
||||
orderClauses: q.OrderClauses,
|
||||
outputTypes: ns.OutputTypes,
|
||||
clauseGroup: q.ClauseGroup,
|
||||
limit: q.Limit,
|
||||
op: database.GetDatabaseOperator(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (ds *Query) Query(ctx context.Context, in map[string]any) (map[string]any, error) {
|
||||
conditionGroup, err := convertClauseGroupToConditionGroup(ctx, ds.config.ClauseGroup, in)
|
||||
type Query struct {
|
||||
databaseInfoID int64
|
||||
queryFields []string
|
||||
orderClauses []*database.OrderClause
|
||||
outputTypes map[string]*vo.TypeInfo
|
||||
clauseGroup *database.ClauseGroup
|
||||
limit int64
|
||||
op database.DatabaseOperator
|
||||
}
|
||||
|
||||
func (ds *Query) Invoke(ctx context.Context, in map[string]any) (map[string]any, error) {
|
||||
conditionGroup, err := convertClauseGroupToConditionGroup(ctx, ds.clauseGroup, in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req := &database.QueryRequest{
|
||||
DatabaseInfoID: ds.config.DatabaseInfoID,
|
||||
OrderClauses: ds.config.OrderClauses,
|
||||
SelectFields: ds.config.QueryFields,
|
||||
Limit: ds.config.Limit,
|
||||
DatabaseInfoID: ds.databaseInfoID,
|
||||
OrderClauses: ds.orderClauses,
|
||||
SelectFields: ds.queryFields,
|
||||
Limit: ds.limit,
|
||||
IsDebugRun: isDebugExecute(ctx),
|
||||
UserID: getExecUserID(ctx),
|
||||
}
|
||||
|
||||
req.ConditionGroup = conditionGroup
|
||||
|
||||
response, err := ds.config.Op.Query(ctx, req)
|
||||
response, err := ds.op.Query(ctx, req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret, err := responseFormatted(ds.config.OutputConfig, response)
|
||||
ret, err := responseFormatted(ds.outputTypes, response)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -93,18 +162,18 @@ func notNeedTakeMapValue(op database.Operator) bool {
|
||||
}
|
||||
|
||||
func (ds *Query) ToCallbackInput(ctx context.Context, in map[string]any) (map[string]any, error) {
|
||||
conditionGroup, err := convertClauseGroupToConditionGroup(ctx, ds.config.ClauseGroup, in)
|
||||
conditionGroup, err := convertClauseGroupToConditionGroup(ctx, ds.clauseGroup, in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return toDatabaseQueryCallbackInput(ds.config, conditionGroup)
|
||||
return ds.toDatabaseQueryCallbackInput(conditionGroup)
|
||||
}
|
||||
|
||||
func toDatabaseQueryCallbackInput(config *QueryConfig, conditionGroup *database.ConditionGroup) (map[string]any, error) {
|
||||
func (ds *Query) toDatabaseQueryCallbackInput(conditionGroup *database.ConditionGroup) (map[string]any, error) {
|
||||
result := make(map[string]any)
|
||||
|
||||
databaseID := config.DatabaseInfoID
|
||||
databaseID := ds.databaseInfoID
|
||||
result["databaseInfoList"] = []string{fmt.Sprintf("%d", databaseID)}
|
||||
result["selectParam"] = map[string]any{}
|
||||
|
||||
@@ -116,8 +185,8 @@ func toDatabaseQueryCallbackInput(config *QueryConfig, conditionGroup *database.
|
||||
FieldID string `json:"fieldId"`
|
||||
IsDistinct bool `json:"isDistinct"`
|
||||
}
|
||||
fieldList := make([]Field, 0, len(config.QueryFields))
|
||||
for _, f := range config.QueryFields {
|
||||
fieldList := make([]Field, 0, len(ds.queryFields))
|
||||
for _, f := range ds.queryFields {
|
||||
fieldList = append(fieldList, Field{FieldID: f})
|
||||
}
|
||||
type Order struct {
|
||||
@@ -126,7 +195,7 @@ func toDatabaseQueryCallbackInput(config *QueryConfig, conditionGroup *database.
|
||||
}
|
||||
|
||||
OrderList := make([]Order, 0)
|
||||
for _, c := range config.OrderClauses {
|
||||
for _, c := range ds.orderClauses {
|
||||
OrderList = append(OrderList, Order{
|
||||
FieldID: c.FieldID,
|
||||
IsAsc: c.IsAsc,
|
||||
@@ -135,12 +204,11 @@ func toDatabaseQueryCallbackInput(config *QueryConfig, conditionGroup *database.
|
||||
result["selectParam"] = map[string]any{
|
||||
"condition": condition,
|
||||
"fieldList": fieldList,
|
||||
"limit": config.Limit,
|
||||
"limit": ds.limit,
|
||||
"orderByList": OrderList,
|
||||
}
|
||||
|
||||
return result, nil
|
||||
|
||||
}
|
||||
|
||||
type ConditionItem struct {
|
||||
@@ -216,6 +284,5 @@ func convertToLogic(rel database.ClauseRelation) (string, error) {
|
||||
return "AND", nil
|
||||
default:
|
||||
return "", fmt.Errorf("unknown clause relation %v", rel)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,6 +30,7 @@ import (
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/database/databasemock"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/execute"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/schema"
|
||||
)
|
||||
|
||||
type mockDsSelect struct {
|
||||
@@ -82,16 +83,7 @@ func TestDataset_Query(t *testing.T) {
|
||||
},
|
||||
OrderClauses: []*database.OrderClause{{FieldID: "v1", IsAsc: false}},
|
||||
QueryFields: []string{"v1", "v2"},
|
||||
OutputConfig: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{
|
||||
Type: vo.DataTypeObject,
|
||||
Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeString},
|
||||
"v2": {Type: vo.DataTypeString},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
Limit: 10,
|
||||
}
|
||||
|
||||
mockQuery := &mockDsSelect{objects: objects, t: t, validate: func(request *database.QueryRequest) {
|
||||
@@ -106,17 +98,27 @@ func TestDataset_Query(t *testing.T) {
|
||||
mockDatabaseOperator := databasemock.NewMockDatabaseOperator(ctrl)
|
||||
mockDatabaseOperator.EXPECT().Query(gomock.Any(), gomock.Any()).DoAndReturn(mockQuery.Query())
|
||||
|
||||
cfg.Op = mockDatabaseOperator
|
||||
defer mockey.Mock(database.GetDatabaseOperator).Return(mockDatabaseOperator).Build().UnPatch()
|
||||
|
||||
ds := Query{
|
||||
config: cfg,
|
||||
}
|
||||
ds, err := cfg.Build(context.Background(), &schema.NodeSchema{
|
||||
OutputTypes: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{
|
||||
Type: vo.DataTypeObject,
|
||||
Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeString},
|
||||
"v2": {Type: vo.DataTypeString},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
in := map[string]interface{}{
|
||||
"__condition_right_0": 1,
|
||||
}
|
||||
|
||||
result, err := ds.Query(t.Context(), in)
|
||||
result, err := ds.(*Query).Invoke(t.Context(), in)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "1", result["outputList"].([]any)[0].(database.Object)["v1"])
|
||||
assert.Equal(t, "2", result["outputList"].([]any)[0].(database.Object)["v2"])
|
||||
@@ -137,17 +139,7 @@ func TestDataset_Query(t *testing.T) {
|
||||
|
||||
OrderClauses: []*database.OrderClause{{FieldID: "v1", IsAsc: false}},
|
||||
QueryFields: []string{"v1", "v2"},
|
||||
|
||||
OutputConfig: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{
|
||||
Type: vo.DataTypeObject,
|
||||
Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeString},
|
||||
"v2": {Type: vo.DataTypeString},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
Limit: 10,
|
||||
}
|
||||
|
||||
objects := make([]database.Object, 0)
|
||||
@@ -170,18 +162,28 @@ func TestDataset_Query(t *testing.T) {
|
||||
mockDatabaseOperator := databasemock.NewMockDatabaseOperator(ctrl)
|
||||
mockDatabaseOperator.EXPECT().Query(gomock.Any(), gomock.Any()).DoAndReturn(mockQuery.Query()).AnyTimes()
|
||||
|
||||
cfg.Op = mockDatabaseOperator
|
||||
defer mockey.Mock(database.GetDatabaseOperator).Return(mockDatabaseOperator).Build().UnPatch()
|
||||
|
||||
ds := Query{
|
||||
config: cfg,
|
||||
}
|
||||
ds, err := cfg.Build(context.Background(), &schema.NodeSchema{
|
||||
OutputTypes: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{
|
||||
Type: vo.DataTypeObject,
|
||||
Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeString},
|
||||
"v2": {Type: vo.DataTypeString},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
in := map[string]any{
|
||||
"__condition_right_0": 1,
|
||||
"__condition_right_1": 2,
|
||||
}
|
||||
|
||||
result, err := ds.Query(t.Context(), in)
|
||||
result, err := ds.(*Query).Invoke(t.Context(), in)
|
||||
assert.NoError(t, err)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "1", result["outputList"].([]any)[0].(database.Object)["v1"])
|
||||
@@ -199,17 +201,7 @@ func TestDataset_Query(t *testing.T) {
|
||||
},
|
||||
OrderClauses: []*database.OrderClause{{FieldID: "v1", IsAsc: false}},
|
||||
QueryFields: []string{"v1", "v2"},
|
||||
|
||||
OutputConfig: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{
|
||||
Type: vo.DataTypeObject,
|
||||
Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeInteger},
|
||||
"v2": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
Limit: 10,
|
||||
}
|
||||
objects := make([]database.Object, 0)
|
||||
objects = append(objects, database.Object{
|
||||
@@ -230,17 +222,27 @@ func TestDataset_Query(t *testing.T) {
|
||||
mockDatabaseOperator := databasemock.NewMockDatabaseOperator(ctrl)
|
||||
mockDatabaseOperator.EXPECT().Query(gomock.Any(), gomock.Any()).DoAndReturn(mockQuery.Query()).AnyTimes()
|
||||
|
||||
cfg.Op = mockDatabaseOperator
|
||||
defer mockey.Mock(database.GetDatabaseOperator).Return(mockDatabaseOperator).Build().UnPatch()
|
||||
|
||||
ds := Query{
|
||||
config: cfg,
|
||||
}
|
||||
ds, err := cfg.Build(context.Background(), &schema.NodeSchema{
|
||||
OutputTypes: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{
|
||||
Type: vo.DataTypeObject,
|
||||
Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeInteger},
|
||||
"v2": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
in := map[string]any{
|
||||
"__condition_right_0": 1,
|
||||
}
|
||||
|
||||
result, err := ds.Query(t.Context(), in)
|
||||
result, err := ds.(*Query).Invoke(t.Context(), in)
|
||||
assert.NoError(t, err)
|
||||
fmt.Println(result)
|
||||
assert.Equal(t, map[string]any{
|
||||
@@ -261,18 +263,7 @@ func TestDataset_Query(t *testing.T) {
|
||||
},
|
||||
OrderClauses: []*database.OrderClause{{FieldID: "v1", IsAsc: false}},
|
||||
QueryFields: []string{"v1", "v2"},
|
||||
|
||||
OutputConfig: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{
|
||||
Type: vo.DataTypeObject,
|
||||
Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeInteger},
|
||||
"v2": {Type: vo.DataTypeInteger},
|
||||
"v3": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
Limit: 10,
|
||||
}
|
||||
objects := make([]database.Object, 0)
|
||||
objects = append(objects, database.Object{
|
||||
@@ -290,15 +281,26 @@ func TestDataset_Query(t *testing.T) {
|
||||
mockDatabaseOperator := databasemock.NewMockDatabaseOperator(ctrl)
|
||||
mockDatabaseOperator.EXPECT().Query(gomock.Any(), gomock.Any()).DoAndReturn(mockQuery.Query()).AnyTimes()
|
||||
|
||||
cfg.Op = mockDatabaseOperator
|
||||
defer mockey.Mock(database.GetDatabaseOperator).Return(mockDatabaseOperator).Build().UnPatch()
|
||||
|
||||
ds := Query{
|
||||
config: cfg,
|
||||
}
|
||||
ds, err := cfg.Build(context.Background(), &schema.NodeSchema{
|
||||
OutputTypes: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{
|
||||
Type: vo.DataTypeObject,
|
||||
Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeInteger},
|
||||
"v2": {Type: vo.DataTypeInteger},
|
||||
"v3": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
in := map[string]any{"__condition_right_0": 1}
|
||||
|
||||
result, err := ds.Query(t.Context(), in)
|
||||
result, err := ds.(*Query).Invoke(t.Context(), in)
|
||||
assert.NoError(t, err)
|
||||
fmt.Println(result)
|
||||
assert.Equal(t, int64(1), result["outputList"].([]any)[0].(database.Object)["v1"])
|
||||
@@ -321,22 +323,7 @@ func TestDataset_Query(t *testing.T) {
|
||||
},
|
||||
OrderClauses: []*database.OrderClause{{FieldID: "v1", IsAsc: false}},
|
||||
QueryFields: []string{"v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8"},
|
||||
|
||||
OutputConfig: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray,
|
||||
ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeObject, Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeInteger},
|
||||
"v2": {Type: vo.DataTypeNumber},
|
||||
"v3": {Type: vo.DataTypeBoolean},
|
||||
"v4": {Type: vo.DataTypeBoolean},
|
||||
"v5": {Type: vo.DataTypeTime},
|
||||
"v6": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeInteger}},
|
||||
"v7": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeBoolean}},
|
||||
"v8": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeNumber}},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
Limit: 10,
|
||||
}
|
||||
|
||||
objects := make([]database.Object, 0)
|
||||
@@ -363,17 +350,32 @@ func TestDataset_Query(t *testing.T) {
|
||||
mockDatabaseOperator := databasemock.NewMockDatabaseOperator(ctrl)
|
||||
mockDatabaseOperator.EXPECT().Query(gomock.Any(), gomock.Any()).DoAndReturn(mockQuery.Query()).AnyTimes()
|
||||
|
||||
cfg.Op = mockDatabaseOperator
|
||||
defer mockey.Mock(database.GetDatabaseOperator).Return(mockDatabaseOperator).Build().UnPatch()
|
||||
|
||||
ds := Query{
|
||||
config: cfg,
|
||||
}
|
||||
ds, err := cfg.Build(context.Background(), &schema.NodeSchema{
|
||||
OutputTypes: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray,
|
||||
ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeObject, Properties: map[string]*vo.TypeInfo{
|
||||
"v1": {Type: vo.DataTypeInteger},
|
||||
"v2": {Type: vo.DataTypeNumber},
|
||||
"v3": {Type: vo.DataTypeBoolean},
|
||||
"v4": {Type: vo.DataTypeBoolean},
|
||||
"v5": {Type: vo.DataTypeTime},
|
||||
"v6": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeInteger}},
|
||||
"v7": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeBoolean}},
|
||||
"v8": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeNumber}},
|
||||
},
|
||||
}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
in := map[string]any{
|
||||
"__condition_right_0": 1,
|
||||
}
|
||||
|
||||
result, err := ds.Query(t.Context(), in)
|
||||
result, err := ds.(*Query).Invoke(t.Context(), in)
|
||||
assert.NoError(t, err)
|
||||
object := result["outputList"].([]any)[0].(database.Object)
|
||||
|
||||
@@ -400,10 +402,7 @@ func TestDataset_Query(t *testing.T) {
|
||||
},
|
||||
OrderClauses: []*database.OrderClause{{FieldID: "v1", IsAsc: false}},
|
||||
QueryFields: []string{"v1", "v2", "v3", "v4", "v5", "v6", "v7", "v8"},
|
||||
OutputConfig: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeObject, Properties: map[string]*vo.TypeInfo{}}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
Limit: 10,
|
||||
}
|
||||
|
||||
objects := make([]database.Object, 0)
|
||||
@@ -429,16 +428,21 @@ func TestDataset_Query(t *testing.T) {
|
||||
mockDatabaseOperator := databasemock.NewMockDatabaseOperator(ctrl)
|
||||
mockDatabaseOperator.EXPECT().Query(gomock.Any(), gomock.Any()).DoAndReturn(mockQuery.Query()).AnyTimes()
|
||||
|
||||
cfg.Op = mockDatabaseOperator
|
||||
ds := Query{
|
||||
config: cfg,
|
||||
}
|
||||
defer mockey.Mock(database.GetDatabaseOperator).Return(mockDatabaseOperator).Build().UnPatch()
|
||||
|
||||
ds, err := cfg.Build(context.Background(), &schema.NodeSchema{
|
||||
OutputTypes: map[string]*vo.TypeInfo{
|
||||
"outputList": {Type: vo.DataTypeArray, ElemTypeInfo: &vo.TypeInfo{Type: vo.DataTypeObject, Properties: map[string]*vo.TypeInfo{}}},
|
||||
"rowNum": {Type: vo.DataTypeInteger},
|
||||
},
|
||||
})
|
||||
assert.NoError(t, err)
|
||||
|
||||
in := map[string]any{
|
||||
"__condition_right_0": 1,
|
||||
}
|
||||
|
||||
result, err := ds.Query(t.Context(), in)
|
||||
result, err := ds.(*Query).Invoke(t.Context(), in)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, result["outputList"].([]any)[0].(database.Object), database.Object{
|
||||
"v1": "1",
|
||||
|
||||
@@ -20,47 +20,93 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/database"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/canvas/convert"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/workflow/internal/schema"
|
||||
)
|
||||
|
||||
type UpdateConfig struct {
|
||||
DatabaseInfoID int64
|
||||
ClauseGroup *database.ClauseGroup
|
||||
OutputConfig map[string]*vo.TypeInfo
|
||||
Updater database.DatabaseOperator
|
||||
}
|
||||
|
||||
func (u *UpdateConfig) Adapt(_ context.Context, n *vo.Node, _ ...nodes.AdaptOption) (*schema.NodeSchema, error) {
|
||||
ns := &schema.NodeSchema{
|
||||
Key: vo.NodeKey(n.ID),
|
||||
Type: entity.NodeTypeDatabaseUpdate,
|
||||
Name: n.Data.Meta.Title,
|
||||
Configs: u,
|
||||
}
|
||||
|
||||
dsList := n.Data.Inputs.DatabaseInfoList
|
||||
if len(dsList) == 0 {
|
||||
return nil, fmt.Errorf("database info is requird")
|
||||
}
|
||||
databaseInfo := dsList[0]
|
||||
|
||||
dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
u.DatabaseInfoID = dsID
|
||||
|
||||
updateParam := n.Data.Inputs.UpdateParam
|
||||
if updateParam == nil {
|
||||
return nil, fmt.Errorf("update param is requird")
|
||||
}
|
||||
clauseGroup, err := buildClauseGroupFromCondition(&updateParam.Condition)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
u.ClauseGroup = clauseGroup
|
||||
|
||||
if err = setDatabaseInputsForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = convert.SetOutputTypesForNodeSchema(n, ns); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return ns, nil
|
||||
}
|
||||
|
||||
func (u *UpdateConfig) Build(_ context.Context, ns *schema.NodeSchema, _ ...schema.BuildOption) (any, error) {
|
||||
if u.DatabaseInfoID == 0 {
|
||||
return nil, errors.New("database info id is required and greater than 0")
|
||||
}
|
||||
|
||||
if u.ClauseGroup == nil {
|
||||
return nil, errors.New("clause group is required and greater than 0")
|
||||
}
|
||||
|
||||
return &Update{
|
||||
databaseInfoID: u.DatabaseInfoID,
|
||||
clauseGroup: u.ClauseGroup,
|
||||
outputTypes: ns.OutputTypes,
|
||||
updater: database.GetDatabaseOperator(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type Update struct {
|
||||
config *UpdateConfig
|
||||
databaseInfoID int64
|
||||
clauseGroup *database.ClauseGroup
|
||||
outputTypes map[string]*vo.TypeInfo
|
||||
updater database.DatabaseOperator
|
||||
}
|
||||
type UpdateInventory struct {
|
||||
|
||||
type updateInventory struct {
|
||||
ConditionGroup *database.ConditionGroup
|
||||
Fields map[string]any
|
||||
}
|
||||
|
||||
func NewUpdate(_ context.Context, cfg *UpdateConfig) (*Update, error) {
|
||||
if cfg == nil {
|
||||
return nil, errors.New("config is required")
|
||||
}
|
||||
if cfg.DatabaseInfoID == 0 {
|
||||
return nil, errors.New("database info id is required and greater than 0")
|
||||
}
|
||||
|
||||
if cfg.ClauseGroup == nil {
|
||||
return nil, errors.New("clause group is required and greater than 0")
|
||||
}
|
||||
|
||||
if cfg.Updater == nil {
|
||||
return nil, errors.New("updater is required")
|
||||
}
|
||||
|
||||
return &Update{config: cfg}, nil
|
||||
}
|
||||
|
||||
func (u *Update) Update(ctx context.Context, in map[string]any) (map[string]any, error) {
|
||||
inventory, err := convertClauseGroupToUpdateInventory(ctx, u.config.ClauseGroup, in)
|
||||
func (u *Update) Invoke(ctx context.Context, in map[string]any) (map[string]any, error) {
|
||||
inventory, err := convertClauseGroupToUpdateInventory(ctx, u.clauseGroup, in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -72,20 +118,20 @@ func (u *Update) Update(ctx context.Context, in map[string]any) (map[string]any,
|
||||
}
|
||||
|
||||
req := &database.UpdateRequest{
|
||||
DatabaseInfoID: u.config.DatabaseInfoID,
|
||||
DatabaseInfoID: u.databaseInfoID,
|
||||
ConditionGroup: inventory.ConditionGroup,
|
||||
Fields: fields,
|
||||
IsDebugRun: isDebugExecute(ctx),
|
||||
UserID: getExecUserID(ctx),
|
||||
}
|
||||
|
||||
response, err := u.config.Updater.Update(ctx, req)
|
||||
response, err := u.updater.Update(ctx, req)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret, err := responseFormatted(u.config.OutputConfig, response)
|
||||
ret, err := responseFormatted(u.outputTypes, response)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -94,15 +140,15 @@ func (u *Update) Update(ctx context.Context, in map[string]any) (map[string]any,
|
||||
}
|
||||
|
||||
func (u *Update) ToCallbackInput(_ context.Context, in map[string]any) (map[string]any, error) {
|
||||
inventory, err := convertClauseGroupToUpdateInventory(context.Background(), u.config.ClauseGroup, in)
|
||||
inventory, err := convertClauseGroupToUpdateInventory(context.Background(), u.clauseGroup, in)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return u.toDatabaseUpdateCallbackInput(inventory)
|
||||
}
|
||||
|
||||
func (u *Update) toDatabaseUpdateCallbackInput(inventory *UpdateInventory) (map[string]any, error) {
|
||||
databaseID := u.config.DatabaseInfoID
|
||||
func (u *Update) toDatabaseUpdateCallbackInput(inventory *updateInventory) (map[string]any, error) {
|
||||
databaseID := u.databaseInfoID
|
||||
result := make(map[string]any)
|
||||
result["databaseInfoList"] = []string{fmt.Sprintf("%d", databaseID)}
|
||||
result["updateParam"] = map[string]any{}
|
||||
@@ -128,6 +174,6 @@ func (u *Update) toDatabaseUpdateCallbackInput(inventory *UpdateInventory) (map[
|
||||
"condition": condition,
|
||||
"fieldInfo": fieldInfo,
|
||||
}
|
||||
return result, nil
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user