feat: manually mirror opencoze's code from bytedance
Change-Id: I09a73aadda978ad9511264a756b2ce51f5761adf
This commit is contained in:
780
backend/application/knowledge/convertor.go
Normal file
780
backend/application/knowledge/convertor.go
Normal file
@@ -0,0 +1,780 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package knowledge
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
modelCommon "github.com/coze-dev/coze-studio/backend/api/model/common"
|
||||
knowledgeModel "github.com/coze-dev/coze-studio/backend/api/model/crossdomain/knowledge"
|
||||
model "github.com/coze-dev/coze-studio/backend/api/model/crossdomain/knowledge"
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/flow/dataengine/dataset"
|
||||
"github.com/coze-dev/coze-studio/backend/application/upload"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/knowledge/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/knowledge/service"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/document"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/document/parser"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/ptr"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/slices"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/logs"
|
||||
)
|
||||
|
||||
func assertValAs(typ document.TableColumnType, val string) (*document.ColumnData, error) {
|
||||
cd := &document.ColumnData{
|
||||
Type: typ,
|
||||
}
|
||||
if val == "" {
|
||||
return cd, nil
|
||||
}
|
||||
switch typ {
|
||||
case document.TableColumnTypeString:
|
||||
return &document.ColumnData{
|
||||
Type: document.TableColumnTypeString,
|
||||
ValString: &val,
|
||||
}, nil
|
||||
|
||||
case document.TableColumnTypeInteger:
|
||||
i, err := strconv.ParseInt(val, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &document.ColumnData{
|
||||
Type: document.TableColumnTypeInteger,
|
||||
ValInteger: &i,
|
||||
}, nil
|
||||
|
||||
case document.TableColumnTypeTime:
|
||||
// 支持时间戳和时间字符串
|
||||
i, err := strconv.ParseInt(val, 10, 64)
|
||||
if err == nil {
|
||||
t := time.Unix(i, 0)
|
||||
return &document.ColumnData{
|
||||
Type: document.TableColumnTypeTime,
|
||||
ValTime: &t,
|
||||
}, nil
|
||||
|
||||
}
|
||||
t, err := time.Parse(time.DateTime, val)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &document.ColumnData{
|
||||
Type: document.TableColumnTypeTime,
|
||||
ValTime: &t,
|
||||
}, nil
|
||||
|
||||
case document.TableColumnTypeNumber:
|
||||
f, err := strconv.ParseFloat(val, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &document.ColumnData{
|
||||
Type: document.TableColumnTypeNumber,
|
||||
ValNumber: &f,
|
||||
}, nil
|
||||
|
||||
case document.TableColumnTypeBoolean:
|
||||
t, err := strconv.ParseBool(val)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &document.ColumnData{
|
||||
Type: document.TableColumnTypeBoolean,
|
||||
ValBoolean: &t,
|
||||
}, nil
|
||||
case document.TableColumnTypeImage:
|
||||
return &document.ColumnData{
|
||||
Type: document.TableColumnTypeImage,
|
||||
ValImage: &val,
|
||||
}, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("[assertValAs] type not support, type=%d, val=%s", typ, val)
|
||||
}
|
||||
}
|
||||
|
||||
func convertTableDataType2Entity(t dataset.TableDataType) service.TableDataType {
|
||||
switch t {
|
||||
case dataset.TableDataType_AllData:
|
||||
return service.AllData
|
||||
case dataset.TableDataType_OnlySchema:
|
||||
return service.OnlySchema
|
||||
case dataset.TableDataType_OnlyPreview:
|
||||
return service.OnlyPreview
|
||||
default:
|
||||
return service.AllData
|
||||
}
|
||||
}
|
||||
|
||||
func convertTableSheet2Entity(sheet *dataset.TableSheet) *entity.TableSheet {
|
||||
if sheet == nil {
|
||||
return nil
|
||||
}
|
||||
return &entity.TableSheet{
|
||||
SheetId: sheet.GetSheetID(),
|
||||
StartLineIdx: sheet.GetStartLineIdx(),
|
||||
HeaderLineIdx: sheet.GetHeaderLineIdx(),
|
||||
}
|
||||
}
|
||||
|
||||
func convertDocTableSheet2Model(sheet entity.TableSheet) *dataset.DocTableSheet {
|
||||
return &dataset.DocTableSheet{
|
||||
ID: sheet.SheetId,
|
||||
SheetName: sheet.SheetName,
|
||||
TotalRow: sheet.TotalRows,
|
||||
}
|
||||
}
|
||||
|
||||
func convertTableMeta(t []*entity.TableColumn) []*modelCommon.DocTableColumn {
|
||||
if len(t) == 0 {
|
||||
return nil
|
||||
}
|
||||
resp := make([]*modelCommon.DocTableColumn, 0)
|
||||
for i := range t {
|
||||
if t[i] == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
resp = append(resp, &modelCommon.DocTableColumn{
|
||||
ID: t[i].ID,
|
||||
ColumnName: t[i].Name,
|
||||
IsSemantic: t[i].Indexing,
|
||||
Desc: &t[i].Description,
|
||||
Sequence: t[i].Sequence,
|
||||
ColumnType: convertColumnType(t[i].Type),
|
||||
})
|
||||
}
|
||||
return resp
|
||||
}
|
||||
|
||||
func convertColumnType(t document.TableColumnType) *modelCommon.ColumnType {
|
||||
switch t {
|
||||
case document.TableColumnTypeString:
|
||||
return modelCommon.ColumnTypePtr(modelCommon.ColumnType_Text)
|
||||
case document.TableColumnTypeBoolean:
|
||||
return modelCommon.ColumnTypePtr(modelCommon.ColumnType_Boolean)
|
||||
case document.TableColumnTypeNumber:
|
||||
return modelCommon.ColumnTypePtr(modelCommon.ColumnType_Float)
|
||||
case document.TableColumnTypeTime:
|
||||
return modelCommon.ColumnTypePtr(modelCommon.ColumnType_Date)
|
||||
case document.TableColumnTypeInteger:
|
||||
return modelCommon.ColumnTypePtr(modelCommon.ColumnType_Number)
|
||||
case document.TableColumnTypeImage:
|
||||
return modelCommon.ColumnTypePtr(modelCommon.ColumnType_Image)
|
||||
default:
|
||||
return modelCommon.ColumnTypePtr(modelCommon.ColumnType_Text)
|
||||
}
|
||||
}
|
||||
|
||||
func convertDocTableSheet(t *entity.TableSheet) *modelCommon.DocTableSheet {
|
||||
if t == nil {
|
||||
return nil
|
||||
}
|
||||
return &modelCommon.DocTableSheet{
|
||||
ID: t.SheetId,
|
||||
SheetName: t.SheetName,
|
||||
TotalRow: t.TotalRows,
|
||||
}
|
||||
}
|
||||
|
||||
func convertSlice2Model(sliceEntity *entity.Slice) *dataset.SliceInfo {
|
||||
if sliceEntity == nil {
|
||||
return nil
|
||||
}
|
||||
return &dataset.SliceInfo{
|
||||
SliceID: sliceEntity.ID,
|
||||
Content: convertSliceContent(sliceEntity),
|
||||
Status: convertSliceStatus2Model(sliceEntity.SliceStatus),
|
||||
HitCount: sliceEntity.Hit,
|
||||
CharCount: sliceEntity.CharCount,
|
||||
Sequence: sliceEntity.Sequence,
|
||||
DocumentID: sliceEntity.DocumentID,
|
||||
ChunkInfo: "",
|
||||
}
|
||||
}
|
||||
|
||||
func convertSliceContent(s *entity.Slice) string {
|
||||
if len(s.RawContent) == 0 {
|
||||
return ""
|
||||
}
|
||||
if s.RawContent[0].Type == knowledgeModel.SliceContentTypeTable {
|
||||
tableData := make([]sliceContentData, 0, len(s.RawContent[0].Table.Columns))
|
||||
for _, col := range s.RawContent[0].Table.Columns {
|
||||
tableData = append(tableData, sliceContentData{
|
||||
ColumnID: strconv.FormatInt(col.ColumnID, 10),
|
||||
ColumnName: col.ColumnName,
|
||||
Value: col.GetNullableStringValue(),
|
||||
Desc: "",
|
||||
})
|
||||
}
|
||||
b, _ := json.Marshal(tableData)
|
||||
return string(b)
|
||||
}
|
||||
return s.GetSliceContent()
|
||||
}
|
||||
|
||||
type sliceContentData struct {
|
||||
ColumnID string `json:"column_id"`
|
||||
ColumnName string `json:"column_name"`
|
||||
Value string `json:"value"`
|
||||
Desc string `json:"desc"`
|
||||
}
|
||||
|
||||
func convertSliceStatus2Model(status knowledgeModel.SliceStatus) dataset.SliceStatus {
|
||||
switch status {
|
||||
case knowledgeModel.SliceStatusInit:
|
||||
return dataset.SliceStatus_PendingVectoring
|
||||
case knowledgeModel.SliceStatusFinishStore:
|
||||
return dataset.SliceStatus_FinishVectoring
|
||||
case knowledgeModel.SliceStatusFailed:
|
||||
return dataset.SliceStatus_Deactive
|
||||
default:
|
||||
return dataset.SliceStatus_PendingVectoring
|
||||
}
|
||||
}
|
||||
func convertFilterStrategy2Model(strategy *entity.ParsingStrategy) *dataset.FilterStrategy {
|
||||
if strategy == nil {
|
||||
return nil
|
||||
}
|
||||
if len(strategy.FilterPages) != 0 {
|
||||
return &dataset.FilterStrategy{
|
||||
FilterPage: slices.Transform(strategy.FilterPages, func(page int) int32 {
|
||||
return int32(page)
|
||||
}),
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
func convertDocument2Model(documentEntity *entity.Document) *dataset.DocumentInfo {
|
||||
if documentEntity == nil {
|
||||
return nil
|
||||
}
|
||||
chunkStrategy := convertChunkingStrategy2Model(documentEntity.ChunkingStrategy)
|
||||
filterStrategy := convertFilterStrategy2Model(documentEntity.ParsingStrategy)
|
||||
parseStrategy, _ := convertParsingStrategy2Model(documentEntity.ParsingStrategy)
|
||||
docInfo := &dataset.DocumentInfo{
|
||||
Name: documentEntity.Name,
|
||||
DocumentID: documentEntity.ID,
|
||||
TosURI: &documentEntity.URI,
|
||||
CreateTime: int32(documentEntity.CreatedAtMs / 1000),
|
||||
UpdateTime: int32(documentEntity.UpdatedAtMs / 1000),
|
||||
CreatorID: ptr.Of(documentEntity.CreatorID),
|
||||
SliceCount: int32(documentEntity.SliceCount),
|
||||
Type: string(documentEntity.FileExtension),
|
||||
Size: int32(documentEntity.Size),
|
||||
CharCount: int32(documentEntity.CharCount),
|
||||
Status: convertDocumentStatus2Model(documentEntity.Status),
|
||||
HitCount: int32(documentEntity.Hits),
|
||||
SourceType: convertDocumentSource2Model(documentEntity.Source),
|
||||
FormatType: convertDocumentTypeEntity2Dataset(documentEntity.Type),
|
||||
WebURL: &documentEntity.URL,
|
||||
TableMeta: convertTableColumns2Model(documentEntity.TableInfo.Columns),
|
||||
StatusDescript: &documentEntity.StatusMsg,
|
||||
SpaceID: ptr.Of(documentEntity.SpaceID),
|
||||
EditableAppendContent: nil,
|
||||
FilterStrategy: filterStrategy,
|
||||
PreviewTosURL: &documentEntity.URL,
|
||||
ChunkStrategy: chunkStrategy,
|
||||
ParsingStrategy: parseStrategy,
|
||||
}
|
||||
return docInfo
|
||||
}
|
||||
|
||||
func convertDocumentSource2Entity(sourceType dataset.DocumentSource) entity.DocumentSource {
|
||||
switch sourceType {
|
||||
case dataset.DocumentSource_Custom:
|
||||
return entity.DocumentSourceCustom
|
||||
case dataset.DocumentSource_Document:
|
||||
return entity.DocumentSourceLocal
|
||||
default:
|
||||
return entity.DocumentSourceLocal
|
||||
}
|
||||
}
|
||||
|
||||
func convertDocumentSource2Model(sourceType entity.DocumentSource) dataset.DocumentSource {
|
||||
switch sourceType {
|
||||
case entity.DocumentSourceCustom:
|
||||
return dataset.DocumentSource_Custom
|
||||
case entity.DocumentSourceLocal:
|
||||
return dataset.DocumentSource_Document
|
||||
default:
|
||||
return dataset.DocumentSource_Document
|
||||
}
|
||||
}
|
||||
|
||||
func convertDocumentStatus2Model(status entity.DocumentStatus) dataset.DocumentStatus {
|
||||
switch status {
|
||||
case entity.DocumentStatusDeleted:
|
||||
return dataset.DocumentStatus_Deleted
|
||||
case entity.DocumentStatusEnable, entity.DocumentStatusInit:
|
||||
return dataset.DocumentStatus_Enable
|
||||
case entity.DocumentStatusFailed:
|
||||
return dataset.DocumentStatus_Failed
|
||||
default:
|
||||
return dataset.DocumentStatus_Processing
|
||||
}
|
||||
}
|
||||
|
||||
func convertTableColumns2Entity(columns []*dataset.TableColumn) []*entity.TableColumn {
|
||||
if len(columns) == 0 {
|
||||
return nil
|
||||
}
|
||||
columnEntities := make([]*entity.TableColumn, 0, len(columns))
|
||||
for i := range columns {
|
||||
columnEntities = append(columnEntities, &entity.TableColumn{
|
||||
ID: columns[i].GetID(),
|
||||
Name: columns[i].GetColumnName(),
|
||||
Type: convertColumnType2Entity(columns[i].GetColumnType()),
|
||||
Description: columns[i].GetDesc(),
|
||||
Indexing: columns[i].GetIsSemantic(),
|
||||
Sequence: columns[i].GetSequence(),
|
||||
})
|
||||
}
|
||||
return columnEntities
|
||||
}
|
||||
|
||||
func convertTableColumns2Model(columns []*entity.TableColumn) []*dataset.TableColumn {
|
||||
if len(columns) == 0 {
|
||||
return nil
|
||||
}
|
||||
columnModels := make([]*dataset.TableColumn, 0, len(columns))
|
||||
for i := range columns {
|
||||
columnType := convertColumnType2Model(columns[i].Type)
|
||||
columnModels = append(columnModels, &dataset.TableColumn{
|
||||
ID: columns[i].ID,
|
||||
ColumnName: columns[i].Name,
|
||||
ColumnType: &columnType,
|
||||
Desc: &columns[i].Description,
|
||||
IsSemantic: columns[i].Indexing,
|
||||
Sequence: columns[i].Sequence,
|
||||
})
|
||||
}
|
||||
return columnModels
|
||||
}
|
||||
|
||||
func convertTableColumnDataSlice(cols []*entity.TableColumn, data []*document.ColumnData) (map[string]string, error) {
|
||||
if len(cols) != len(data) {
|
||||
return nil, fmt.Errorf("[convertTableColumnDataSlice] invalid cols and vals, len(cols)=%d, len(vals)=%d", len(cols), len(data))
|
||||
}
|
||||
|
||||
resp := make(map[string]string, len(data))
|
||||
for i := range data {
|
||||
col := cols[i]
|
||||
val := data[i]
|
||||
content := ""
|
||||
if val != nil {
|
||||
content = val.GetStringValue()
|
||||
}
|
||||
resp[strconv.FormatInt(col.Sequence, 10)] = content
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func convertColumnType2Model(columnType document.TableColumnType) dataset.ColumnType {
|
||||
switch columnType {
|
||||
case document.TableColumnTypeString:
|
||||
return dataset.ColumnType_Text
|
||||
case document.TableColumnTypeInteger:
|
||||
return dataset.ColumnType_Number
|
||||
case document.TableColumnTypeImage:
|
||||
return dataset.ColumnType_Image
|
||||
case document.TableColumnTypeBoolean:
|
||||
return dataset.ColumnType_Boolean
|
||||
case document.TableColumnTypeTime:
|
||||
return dataset.ColumnType_Date
|
||||
case document.TableColumnTypeNumber:
|
||||
return dataset.ColumnType_Float
|
||||
default:
|
||||
return dataset.ColumnType_Text
|
||||
}
|
||||
}
|
||||
|
||||
func convertColumnType2Entity(columnType dataset.ColumnType) document.TableColumnType {
|
||||
switch columnType {
|
||||
case dataset.ColumnType_Text:
|
||||
return document.TableColumnTypeString
|
||||
case dataset.ColumnType_Number:
|
||||
return document.TableColumnTypeInteger
|
||||
case dataset.ColumnType_Image:
|
||||
return document.TableColumnTypeImage
|
||||
case dataset.ColumnType_Boolean:
|
||||
return document.TableColumnTypeBoolean
|
||||
case dataset.ColumnType_Date:
|
||||
return document.TableColumnTypeTime
|
||||
case dataset.ColumnType_Float:
|
||||
return document.TableColumnTypeNumber
|
||||
default:
|
||||
return document.TableColumnTypeString
|
||||
}
|
||||
}
|
||||
|
||||
func convertParsingStrategy2Entity(strategy *dataset.ParsingStrategy, sheet *dataset.TableSheet, captionType *dataset.CaptionType, filterStrategy *dataset.FilterStrategy) *entity.ParsingStrategy {
|
||||
if strategy == nil && sheet == nil && captionType == nil {
|
||||
return nil
|
||||
}
|
||||
res := &entity.ParsingStrategy{}
|
||||
if strategy != nil {
|
||||
res.ExtractImage = strategy.GetImageExtraction()
|
||||
res.ExtractTable = strategy.GetTableExtraction()
|
||||
res.ImageOCR = strategy.GetImageOcr()
|
||||
res.ParsingType = convertParsingType2Entity(strategy.GetParsingType())
|
||||
if strategy.GetParsingType() == dataset.ParsingType_FastParsing {
|
||||
res.ExtractImage = false
|
||||
res.ExtractTable = false
|
||||
res.ImageOCR = false
|
||||
}
|
||||
}
|
||||
if sheet != nil {
|
||||
res.SheetID = sheet.GetSheetID()
|
||||
res.HeaderLine = int(sheet.GetHeaderLineIdx())
|
||||
res.DataStartLine = int(sheet.GetStartLineIdx())
|
||||
}
|
||||
if filterStrategy != nil {
|
||||
res.FilterPages = slices.Transform(filterStrategy.GetFilterPage(), func(page int32) int { return int(page) })
|
||||
}
|
||||
res.CaptionType = convertCaptionType2Entity(captionType)
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func convertParsingType2Entity(pt dataset.ParsingType) entity.ParsingType {
|
||||
switch pt {
|
||||
case dataset.ParsingType_AccurateParsing:
|
||||
return entity.ParsingType_AccurateParsing
|
||||
case dataset.ParsingType_FastParsing:
|
||||
return entity.ParsingType_FastParsing
|
||||
default:
|
||||
return entity.ParsingType_FastParsing
|
||||
}
|
||||
}
|
||||
|
||||
func convertParsingStrategy2Model(strategy *entity.ParsingStrategy) (s *dataset.ParsingStrategy, sheet *dataset.TableSheet) {
|
||||
if strategy == nil {
|
||||
return nil, nil
|
||||
}
|
||||
sheet = &dataset.TableSheet{
|
||||
SheetID: strategy.SheetID,
|
||||
HeaderLineIdx: int64(strategy.HeaderLine),
|
||||
StartLineIdx: int64(strategy.DataStartLine),
|
||||
}
|
||||
return &dataset.ParsingStrategy{
|
||||
ParsingType: ptr.Of(convertParsingType2Model(strategy.ParsingType)),
|
||||
ImageExtraction: &strategy.ExtractImage,
|
||||
TableExtraction: &strategy.ExtractTable,
|
||||
ImageOcr: &strategy.ImageOCR,
|
||||
}, sheet
|
||||
}
|
||||
func convertParsingType2Model(pt entity.ParsingType) dataset.ParsingType {
|
||||
switch pt {
|
||||
case entity.ParsingType_AccurateParsing:
|
||||
return dataset.ParsingType_AccurateParsing
|
||||
case entity.ParsingType_FastParsing:
|
||||
return dataset.ParsingType_FastParsing
|
||||
default:
|
||||
return dataset.ParsingType_FastParsing
|
||||
}
|
||||
}
|
||||
func convertChunkingStrategy2Entity(strategy *dataset.ChunkStrategy) *entity.ChunkingStrategy {
|
||||
if strategy == nil {
|
||||
return nil
|
||||
}
|
||||
if strategy.ChunkType == dataset.ChunkType_DefaultChunk {
|
||||
return &entity.ChunkingStrategy{
|
||||
ChunkType: convertChunkType2Entity(dataset.ChunkType_DefaultChunk),
|
||||
}
|
||||
}
|
||||
return &entity.ChunkingStrategy{
|
||||
ChunkType: convertChunkType2Entity(strategy.ChunkType),
|
||||
ChunkSize: strategy.GetMaxTokens(),
|
||||
Separator: strategy.GetSeparator(),
|
||||
Overlap: strategy.GetOverlap(),
|
||||
TrimSpace: strategy.GetRemoveExtraSpaces(),
|
||||
TrimURLAndEmail: strategy.GetRemoveUrlsEmails(),
|
||||
MaxDepth: strategy.GetMaxLevel(),
|
||||
SaveTitle: strategy.GetSaveTitle(),
|
||||
}
|
||||
}
|
||||
|
||||
func GetExtension(uri string) string {
|
||||
if uri == "" {
|
||||
return ""
|
||||
}
|
||||
fileExtension := path.Base(uri)
|
||||
ext := path.Ext(fileExtension)
|
||||
if ext != "" {
|
||||
return strings.TrimPrefix(ext, ".")
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func convertCaptionType2Entity(ct *dataset.CaptionType) *parser.ImageAnnotationType {
|
||||
if ct == nil {
|
||||
return nil
|
||||
}
|
||||
switch ptr.From(ct) {
|
||||
case dataset.CaptionType_Auto:
|
||||
return ptr.Of(parser.ImageAnnotationTypeModel)
|
||||
case dataset.CaptionType_Manual:
|
||||
return ptr.Of(parser.ImageAnnotationTypeManual)
|
||||
default:
|
||||
return ptr.Of(parser.ImageAnnotationTypeModel)
|
||||
}
|
||||
}
|
||||
func convertDatasetStatus2Entity(status dataset.DatasetStatus) model.KnowledgeStatus {
|
||||
switch status {
|
||||
case dataset.DatasetStatus_DatasetReady:
|
||||
return model.KnowledgeStatusEnable
|
||||
case dataset.DatasetStatus_DatasetForbid, dataset.DatasetStatus_DatasetDeleted:
|
||||
return model.KnowledgeStatusDisable
|
||||
default:
|
||||
return model.KnowledgeStatusEnable
|
||||
}
|
||||
}
|
||||
|
||||
func convertChunkType2model(chunkType parser.ChunkType) dataset.ChunkType {
|
||||
switch chunkType {
|
||||
case parser.ChunkTypeCustom:
|
||||
return dataset.ChunkType_CustomChunk
|
||||
case parser.ChunkTypeDefault:
|
||||
return dataset.ChunkType_DefaultChunk
|
||||
case parser.ChunkTypeLeveled:
|
||||
return dataset.ChunkType_LevelChunk
|
||||
default:
|
||||
return dataset.ChunkType_CustomChunk
|
||||
}
|
||||
}
|
||||
|
||||
func convertChunkType2Entity(chunkType dataset.ChunkType) parser.ChunkType {
|
||||
switch chunkType {
|
||||
case dataset.ChunkType_CustomChunk:
|
||||
return parser.ChunkTypeCustom
|
||||
case dataset.ChunkType_DefaultChunk:
|
||||
return parser.ChunkTypeDefault
|
||||
case dataset.ChunkType_LevelChunk:
|
||||
return parser.ChunkTypeLeveled
|
||||
default:
|
||||
return parser.ChunkTypeDefault
|
||||
}
|
||||
}
|
||||
|
||||
func convertChunkingStrategy2Model(chunkingStrategy *entity.ChunkingStrategy) *dataset.ChunkStrategy {
|
||||
if chunkingStrategy == nil {
|
||||
return nil
|
||||
}
|
||||
return &dataset.ChunkStrategy{
|
||||
Separator: chunkingStrategy.Separator,
|
||||
MaxTokens: chunkingStrategy.ChunkSize,
|
||||
RemoveExtraSpaces: chunkingStrategy.TrimSpace,
|
||||
RemoveUrlsEmails: chunkingStrategy.TrimURLAndEmail,
|
||||
ChunkType: convertChunkType2model(chunkingStrategy.ChunkType),
|
||||
Overlap: &chunkingStrategy.Overlap,
|
||||
MaxLevel: &chunkingStrategy.MaxDepth,
|
||||
SaveTitle: &chunkingStrategy.SaveTitle,
|
||||
}
|
||||
}
|
||||
|
||||
func convertDocumentTypeEntity2Dataset(formatType model.DocumentType) dataset.FormatType {
|
||||
switch formatType {
|
||||
case model.DocumentTypeText:
|
||||
return dataset.FormatType_Text
|
||||
case model.DocumentTypeTable:
|
||||
return dataset.FormatType_Table
|
||||
case model.DocumentTypeImage:
|
||||
return dataset.FormatType_Image
|
||||
default:
|
||||
return dataset.FormatType_Text
|
||||
}
|
||||
}
|
||||
|
||||
func convertDocumentTypeDataset2Entity(formatType dataset.FormatType) model.DocumentType {
|
||||
switch formatType {
|
||||
case dataset.FormatType_Text:
|
||||
return model.DocumentTypeText
|
||||
case dataset.FormatType_Table:
|
||||
return model.DocumentTypeTable
|
||||
case dataset.FormatType_Image:
|
||||
return model.DocumentTypeImage
|
||||
default:
|
||||
return model.DocumentTypeUnknown
|
||||
}
|
||||
}
|
||||
|
||||
func batchConvertKnowledgeEntity2Model(ctx context.Context, knowledgeEntity []*model.Knowledge) (map[int64]*dataset.Dataset, error) {
|
||||
knowledgeMap := map[int64]*dataset.Dataset{}
|
||||
for _, k := range knowledgeEntity {
|
||||
documentEntity, err := KnowledgeSVC.DomainSVC.ListDocument(ctx, &service.ListDocumentRequest{
|
||||
KnowledgeID: k.ID,
|
||||
SelectAll: true,
|
||||
})
|
||||
if err != nil {
|
||||
logs.CtxErrorf(ctx, "list document failed, err: %v", err)
|
||||
return nil, err
|
||||
}
|
||||
datasetStatus := dataset.DatasetStatus_DatasetReady
|
||||
if k.Status == model.KnowledgeStatusDisable {
|
||||
datasetStatus = dataset.DatasetStatus_DatasetForbid
|
||||
}
|
||||
|
||||
var (
|
||||
rule *entity.ChunkingStrategy
|
||||
totalSize int64
|
||||
sliceCount int32
|
||||
processingFileList []string
|
||||
processingFileIDList []string
|
||||
fileList []string
|
||||
)
|
||||
for i := range documentEntity.Documents {
|
||||
doc := documentEntity.Documents[i]
|
||||
totalSize += doc.Size
|
||||
sliceCount += int32(doc.SliceCount)
|
||||
if doc.Status == entity.DocumentStatusChunking || doc.Status == entity.DocumentStatusUploading {
|
||||
processingFileList = append(processingFileList, doc.Name)
|
||||
processingFileIDList = append(processingFileIDList, strconv.FormatInt(doc.ID, 10))
|
||||
}
|
||||
if i == 0 {
|
||||
rule = doc.ChunkingStrategy
|
||||
}
|
||||
fileList = append(fileList, doc.Name)
|
||||
}
|
||||
knowledgeMap[k.ID] = &dataset.Dataset{
|
||||
DatasetID: k.ID,
|
||||
Name: k.Name,
|
||||
FileList: fileList,
|
||||
AllFileSize: totalSize,
|
||||
BotUsedCount: 0,
|
||||
Status: datasetStatus,
|
||||
ProcessingFileList: processingFileList,
|
||||
UpdateTime: int32(k.UpdatedAtMs / 1000),
|
||||
IconURI: k.IconURI,
|
||||
IconURL: k.IconURL,
|
||||
Description: k.Description,
|
||||
CanEdit: true,
|
||||
CreateTime: int32(k.CreatedAtMs / 1000),
|
||||
CreatorID: k.CreatorID,
|
||||
SpaceID: k.SpaceID,
|
||||
FailedFileList: nil,
|
||||
FormatType: convertDocumentTypeEntity2Dataset(k.Type),
|
||||
SliceCount: sliceCount,
|
||||
DocCount: int32(len(documentEntity.Documents)),
|
||||
HitCount: int32(k.SliceHit),
|
||||
ChunkStrategy: convertChunkingStrategy2Model(rule),
|
||||
ProcessingFileIDList: processingFileIDList,
|
||||
ProjectID: strconv.FormatInt(k.AppID, 10),
|
||||
}
|
||||
}
|
||||
return knowledgeMap, nil
|
||||
}
|
||||
|
||||
func convertSourceInfo(sourceInfo *dataset.SourceInfo) (*service.TableSourceInfo, error) {
|
||||
if sourceInfo == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
fType := sourceInfo.FileType
|
||||
if fType == nil && sourceInfo.TosURI != nil {
|
||||
split := strings.Split(sourceInfo.GetTosURI(), ".")
|
||||
fType = &split[len(split)-1]
|
||||
}
|
||||
|
||||
var customContent []map[string]string
|
||||
if sourceInfo.CustomContent != nil {
|
||||
if err := json.Unmarshal([]byte(sourceInfo.GetCustomContent()), &customContent); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return &service.TableSourceInfo{
|
||||
FileType: fType,
|
||||
Uri: sourceInfo.TosURI,
|
||||
FileBase64: sourceInfo.FileBase64,
|
||||
CustomContent: customContent,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func convertCreateDocReviewReq(req *dataset.CreateDocumentReviewRequest) *service.CreateDocumentReviewRequest {
|
||||
if req == nil {
|
||||
return nil
|
||||
}
|
||||
var captionType *dataset.CaptionType
|
||||
if req.GetChunkStrategy() != nil {
|
||||
captionType = req.GetChunkStrategy().CaptionType
|
||||
}
|
||||
resp := &service.CreateDocumentReviewRequest{
|
||||
ChunkStrategy: convertChunkingStrategy2Entity(req.ChunkStrategy),
|
||||
ParsingStrategy: convertParsingStrategy2Entity(req.ParsingStrategy, nil, captionType, nil),
|
||||
}
|
||||
resp.KnowledgeID = req.GetDatasetID()
|
||||
resp.Reviews = slices.Transform(req.GetReviews(), func(r *dataset.ReviewInput) *service.ReviewInput {
|
||||
return &service.ReviewInput{
|
||||
DocumentName: r.GetDocumentName(),
|
||||
DocumentType: r.GetDocumentType(),
|
||||
TosUri: r.GetTosURI(),
|
||||
DocumentID: ptr.Of(r.GetDocumentID()),
|
||||
}
|
||||
})
|
||||
return resp
|
||||
}
|
||||
|
||||
func convertReviewStatus2Model(status *entity.ReviewStatus) *dataset.ReviewStatus {
|
||||
if status == nil {
|
||||
return nil
|
||||
}
|
||||
switch *status {
|
||||
case entity.ReviewStatus_Enable:
|
||||
return dataset.ReviewStatusPtr(dataset.ReviewStatus_Enable)
|
||||
case entity.ReviewStatus_Processing:
|
||||
return dataset.ReviewStatusPtr(dataset.ReviewStatus_Processing)
|
||||
case entity.ReviewStatus_Failed:
|
||||
return dataset.ReviewStatusPtr(dataset.ReviewStatus_Failed)
|
||||
case entity.ReviewStatus_ForceStop:
|
||||
return dataset.ReviewStatusPtr(dataset.ReviewStatus_ForceStop)
|
||||
default:
|
||||
return dataset.ReviewStatusPtr(dataset.ReviewStatus_Processing)
|
||||
}
|
||||
}
|
||||
|
||||
func getIconURI(tp dataset.FormatType) string {
|
||||
switch tp {
|
||||
case dataset.FormatType_Text:
|
||||
return upload.TextKnowledgeDefaultIcon
|
||||
case dataset.FormatType_Table:
|
||||
return upload.TableKnowledgeDefaultIcon
|
||||
case dataset.FormatType_Image:
|
||||
return upload.ImageKnowledgeDefaultIcon
|
||||
default:
|
||||
return upload.TextKnowledgeDefaultIcon
|
||||
}
|
||||
}
|
||||
|
||||
func convertFormatType2Entity(tp dataset.FormatType) model.DocumentType {
|
||||
switch tp {
|
||||
case dataset.FormatType_Text:
|
||||
return model.DocumentTypeText
|
||||
case dataset.FormatType_Table:
|
||||
return model.DocumentTypeTable
|
||||
case dataset.FormatType_Image:
|
||||
return model.DocumentTypeImage
|
||||
default:
|
||||
return model.DocumentTypeUnknown
|
||||
}
|
||||
}
|
||||
445
backend/application/knowledge/init.go
Normal file
445
backend/application/knowledge/init.go
Normal file
@@ -0,0 +1,445 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package knowledge
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/cloudwego/eino-ext/components/embedding/ark"
|
||||
"github.com/cloudwego/eino-ext/components/embedding/openai"
|
||||
ao "github.com/cloudwego/eino-ext/components/model/ark"
|
||||
"github.com/cloudwego/eino-ext/components/model/deepseek"
|
||||
"github.com/cloudwego/eino-ext/components/model/gemini"
|
||||
"github.com/cloudwego/eino-ext/components/model/ollama"
|
||||
mo "github.com/cloudwego/eino-ext/components/model/openai"
|
||||
"github.com/cloudwego/eino-ext/components/model/qwen"
|
||||
"github.com/cloudwego/eino/components/prompt"
|
||||
"github.com/cloudwego/eino/schema"
|
||||
"github.com/milvus-io/milvus/client/v2/milvusclient"
|
||||
"github.com/volcengine/volc-sdk-golang/service/vikingdb"
|
||||
"github.com/volcengine/volc-sdk-golang/service/visual"
|
||||
"google.golang.org/genai"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/application/search"
|
||||
knowledgeImpl "github.com/coze-dev/coze-studio/backend/domain/knowledge/service"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/cache"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/chatmodel"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/document/nl2sql"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/document/ocr"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/document/searchstore"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/embedding"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/es"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/idgen"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/imagex"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/messages2query"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/rdb"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/storage"
|
||||
chatmodelImpl "github.com/coze-dev/coze-studio/backend/infra/impl/chatmodel"
|
||||
builtinNL2SQL "github.com/coze-dev/coze-studio/backend/infra/impl/document/nl2sql/builtin"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/impl/document/ocr/veocr"
|
||||
builtinParser "github.com/coze-dev/coze-studio/backend/infra/impl/document/parser/builtin"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/impl/document/rerank/rrf"
|
||||
sses "github.com/coze-dev/coze-studio/backend/infra/impl/document/searchstore/elasticsearch"
|
||||
ssmilvus "github.com/coze-dev/coze-studio/backend/infra/impl/document/searchstore/milvus"
|
||||
ssvikingdb "github.com/coze-dev/coze-studio/backend/infra/impl/document/searchstore/vikingdb"
|
||||
arkemb "github.com/coze-dev/coze-studio/backend/infra/impl/embedding/ark"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/impl/embedding/wrap"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/impl/eventbus"
|
||||
builtinM2Q "github.com/coze-dev/coze-studio/backend/infra/impl/messages2query/builtin"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/conv"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/ptr"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/logs"
|
||||
"github.com/coze-dev/coze-studio/backend/types/consts"
|
||||
)
|
||||
|
||||
type ServiceComponents struct {
|
||||
DB *gorm.DB
|
||||
IDGenSVC idgen.IDGenerator
|
||||
Storage storage.Storage
|
||||
RDB rdb.RDB
|
||||
ImageX imagex.ImageX
|
||||
ES es.Client
|
||||
EventBus search.ResourceEventBus
|
||||
CacheCli cache.Cmdable
|
||||
}
|
||||
|
||||
func InitService(c *ServiceComponents) (*KnowledgeApplicationService, error) {
|
||||
ctx := context.Background()
|
||||
|
||||
nameServer := os.Getenv(consts.MQServer)
|
||||
|
||||
knowledgeProducer, err := eventbus.NewProducer(nameServer, consts.RMQTopicKnowledge, consts.RMQConsumeGroupKnowledge, 2)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init knowledge producer failed, err=%w", err)
|
||||
}
|
||||
|
||||
var sManagers []searchstore.Manager
|
||||
|
||||
// es full text search
|
||||
sManagers = append(sManagers, sses.NewManager(&sses.ManagerConfig{Client: c.ES}))
|
||||
|
||||
// vector search
|
||||
mgr, err := getVectorStore(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init vector store failed, err=%w", err)
|
||||
}
|
||||
sManagers = append(sManagers, mgr)
|
||||
|
||||
var ocrImpl ocr.OCR
|
||||
switch os.Getenv("OCR_TYPE") {
|
||||
case "ve":
|
||||
ocrAK := os.Getenv("VE_OCR_AK")
|
||||
ocrSK := os.Getenv("VE_OCR_SK")
|
||||
inst := visual.NewInstance()
|
||||
inst.Client.SetAccessKey(ocrAK)
|
||||
inst.Client.SetSecretKey(ocrSK)
|
||||
ocrImpl = veocr.NewOCR(&veocr.Config{Client: inst})
|
||||
default:
|
||||
// accept ocr not configured
|
||||
}
|
||||
|
||||
root, err := os.Getwd()
|
||||
if err != nil {
|
||||
logs.Warnf("[InitConfig] Failed to get current working directory: %v", err)
|
||||
root = os.Getenv("PWD")
|
||||
}
|
||||
|
||||
var rewriter messages2query.MessagesToQuery
|
||||
if rewriterChatModel, _, err := getBuiltinChatModel(ctx, "M2Q_"); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
filePath := filepath.Join(root, "resources/conf/prompt/messages_to_query_template_jinja2.json")
|
||||
rewriterTemplate, err := readJinja2PromptTemplate(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rewriter, err = builtinM2Q.NewMessagesToQuery(ctx, rewriterChatModel, rewriterTemplate)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var n2s nl2sql.NL2SQL
|
||||
if n2sChatModel, _, err := getBuiltinChatModel(ctx, "NL2SQL_"); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
filePath := filepath.Join(root, "resources/conf/prompt/nl2sql_template_jinja2.json")
|
||||
n2sTemplate, err := readJinja2PromptTemplate(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
n2s, err = builtinNL2SQL.NewNL2SQL(ctx, n2sChatModel, n2sTemplate)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
imageAnnoChatModel, configured, err := getBuiltinChatModel(ctx, "IA_")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
knowledgeDomainSVC, knowledgeEventHandler := knowledgeImpl.NewKnowledgeSVC(&knowledgeImpl.KnowledgeSVCConfig{
|
||||
DB: c.DB,
|
||||
IDGen: c.IDGenSVC,
|
||||
RDB: c.RDB,
|
||||
Producer: knowledgeProducer,
|
||||
SearchStoreManagers: sManagers,
|
||||
ParseManager: builtinParser.NewManager(c.Storage, ocrImpl, imageAnnoChatModel), // default builtin
|
||||
Storage: c.Storage,
|
||||
Rewriter: rewriter,
|
||||
Reranker: rrf.NewRRFReranker(0), // default rrf
|
||||
NL2Sql: n2s,
|
||||
OCR: ocrImpl,
|
||||
CacheCli: c.CacheCli,
|
||||
IsAutoAnnotationSupported: configured,
|
||||
ModelFactory: chatmodelImpl.NewDefaultFactory(),
|
||||
})
|
||||
|
||||
if err = eventbus.RegisterConsumer(nameServer, consts.RMQTopicKnowledge, consts.RMQConsumeGroupKnowledge, knowledgeEventHandler); err != nil {
|
||||
return nil, fmt.Errorf("register knowledge consumer failed, err=%w", err)
|
||||
}
|
||||
|
||||
KnowledgeSVC.DomainSVC = knowledgeDomainSVC
|
||||
KnowledgeSVC.eventBus = c.EventBus
|
||||
KnowledgeSVC.storage = c.Storage
|
||||
return KnowledgeSVC, nil
|
||||
}
|
||||
|
||||
func getVectorStore(ctx context.Context) (searchstore.Manager, error) {
|
||||
vsType := os.Getenv("VECTOR_STORE_TYPE")
|
||||
|
||||
switch vsType {
|
||||
case "milvus":
|
||||
cctx, cancel := context.WithTimeout(ctx, time.Second*5)
|
||||
defer cancel()
|
||||
|
||||
milvusAddr := os.Getenv("MILVUS_ADDR")
|
||||
mc, err := milvusclient.New(cctx, &milvusclient.ClientConfig{Address: milvusAddr})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init milvus client failed, err=%w", err)
|
||||
}
|
||||
|
||||
emb, err := getEmbedding(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init milvus embedding failed, err=%w", err)
|
||||
}
|
||||
|
||||
mgr, err := ssmilvus.NewManager(&ssmilvus.ManagerConfig{
|
||||
Client: mc,
|
||||
Embedding: emb,
|
||||
EnableHybrid: ptr.Of(true),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init milvus vector store failed, err=%w", err)
|
||||
}
|
||||
|
||||
return mgr, nil
|
||||
case "vikingdb":
|
||||
var (
|
||||
host = os.Getenv("VIKING_DB_HOST")
|
||||
region = os.Getenv("VIKING_DB_REGION")
|
||||
ak = os.Getenv("VIKING_DB_AK")
|
||||
sk = os.Getenv("VIKING_DB_SK")
|
||||
scheme = os.Getenv("VIKING_DB_SCHEME")
|
||||
modelName = os.Getenv("VIKING_DB_MODEL_NAME")
|
||||
)
|
||||
if ak == "" || sk == "" {
|
||||
return nil, fmt.Errorf("invalid vikingdb ak / sk")
|
||||
}
|
||||
if host == "" {
|
||||
host = "api-vikingdb.volces.com"
|
||||
}
|
||||
if region == "" {
|
||||
region = "cn-beijing"
|
||||
}
|
||||
if scheme == "" {
|
||||
scheme = "https"
|
||||
}
|
||||
|
||||
var embConfig *ssvikingdb.VikingEmbeddingConfig
|
||||
if modelName != "" {
|
||||
embName := ssvikingdb.VikingEmbeddingModelName(modelName)
|
||||
if embName.Dimensions() == 0 {
|
||||
return nil, fmt.Errorf("embedding model not support, model_name=%s", modelName)
|
||||
}
|
||||
embConfig = &ssvikingdb.VikingEmbeddingConfig{
|
||||
UseVikingEmbedding: true,
|
||||
EnableHybrid: embName.SupportStatus() == embedding.SupportDenseAndSparse,
|
||||
ModelName: embName,
|
||||
ModelVersion: embName.ModelVersion(),
|
||||
DenseWeight: ptr.Of(0.2),
|
||||
BuiltinEmbedding: nil,
|
||||
}
|
||||
} else {
|
||||
builtinEmbedding, err := getEmbedding(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("builtint embedding init failed, err=%w", err)
|
||||
}
|
||||
|
||||
embConfig = &ssvikingdb.VikingEmbeddingConfig{
|
||||
UseVikingEmbedding: false,
|
||||
EnableHybrid: false,
|
||||
BuiltinEmbedding: builtinEmbedding,
|
||||
}
|
||||
}
|
||||
svc := vikingdb.NewVikingDBService(host, region, ak, sk, scheme)
|
||||
mgr, err := ssvikingdb.NewManager(&ssvikingdb.ManagerConfig{
|
||||
Service: svc,
|
||||
IndexingConfig: nil, // use default config
|
||||
EmbeddingConfig: embConfig,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init vikingdb manager failed, err=%w", err)
|
||||
}
|
||||
|
||||
return mgr, nil
|
||||
|
||||
default:
|
||||
return nil, fmt.Errorf("unexpected vector store type, type=%s", vsType)
|
||||
}
|
||||
}
|
||||
|
||||
func getEmbedding(ctx context.Context) (embedding.Embedder, error) {
|
||||
var emb embedding.Embedder
|
||||
|
||||
switch os.Getenv("EMBEDDING_TYPE") {
|
||||
case "openai":
|
||||
var (
|
||||
openAIEmbeddingBaseURL = os.Getenv("OPENAI_EMBEDDING_BASE_URL")
|
||||
openAIEmbeddingModel = os.Getenv("OPENAI_EMBEDDING_MODEL")
|
||||
openAIEmbeddingApiKey = os.Getenv("OPENAI_EMBEDDING_API_KEY")
|
||||
openAIEmbeddingByAzure = os.Getenv("OPENAI_EMBEDDING_BY_AZURE")
|
||||
openAIEmbeddingApiVersion = os.Getenv("OPENAI_EMBEDDING_API_VERSION")
|
||||
openAIEmbeddingDims = os.Getenv("OPENAI_EMBEDDING_DIMS")
|
||||
openAIRequestEmbeddingDims = os.Getenv("OPENAI_EMBEDDING_REQUEST_DIMS")
|
||||
)
|
||||
|
||||
byAzure, err := strconv.ParseBool(openAIEmbeddingByAzure)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init openai embedding by_azure failed, err=%w", err)
|
||||
}
|
||||
|
||||
dims, err := strconv.ParseInt(openAIEmbeddingDims, 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init openai embedding dims failed, err=%w", err)
|
||||
}
|
||||
|
||||
openAICfg := &openai.EmbeddingConfig{
|
||||
APIKey: openAIEmbeddingApiKey,
|
||||
ByAzure: byAzure,
|
||||
BaseURL: openAIEmbeddingBaseURL,
|
||||
APIVersion: openAIEmbeddingApiVersion,
|
||||
Model: openAIEmbeddingModel,
|
||||
// Dimensions: ptr.Of(int(dims)),
|
||||
}
|
||||
reqDims := conv.StrToInt64D(openAIRequestEmbeddingDims, 0)
|
||||
if reqDims > 0 {
|
||||
// some openai model not support request dims
|
||||
openAICfg.Dimensions = ptr.Of(int(reqDims))
|
||||
}
|
||||
|
||||
emb, err = wrap.NewOpenAIEmbedder(ctx, openAICfg, dims)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init openai embedding failed, err=%w", err)
|
||||
}
|
||||
|
||||
case "ark":
|
||||
var (
|
||||
arkEmbeddingBaseURL = os.Getenv("ARK_EMBEDDING_BASE_URL")
|
||||
arkEmbeddingModel = os.Getenv("ARK_EMBEDDING_MODEL")
|
||||
arkEmbeddingAK = os.Getenv("ARK_EMBEDDING_AK")
|
||||
arkEmbeddingDims = os.Getenv("ARK_EMBEDDING_DIMS")
|
||||
)
|
||||
|
||||
dims, err := strconv.ParseInt(arkEmbeddingDims, 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init ark embedding dims failed, err=%w", err)
|
||||
}
|
||||
|
||||
emb, err = arkemb.NewArkEmbedder(ctx, &ark.EmbeddingConfig{
|
||||
APIKey: arkEmbeddingAK,
|
||||
Model: arkEmbeddingModel,
|
||||
BaseURL: arkEmbeddingBaseURL,
|
||||
}, dims)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init ark embedding client failed, err=%w", err)
|
||||
}
|
||||
default:
|
||||
return nil, fmt.Errorf("init knowledge embedding failed, type not configured")
|
||||
}
|
||||
|
||||
return emb, nil
|
||||
}
|
||||
|
||||
func getBuiltinChatModel(ctx context.Context, envPrefix string) (bcm chatmodel.BaseChatModel, configured bool, err error) {
|
||||
getEnv := func(key string) string {
|
||||
if val := os.Getenv(envPrefix + key); val != "" {
|
||||
return val
|
||||
}
|
||||
return os.Getenv(key)
|
||||
}
|
||||
|
||||
switch getEnv("BUILTIN_CM_TYPE") {
|
||||
case "openai":
|
||||
byAzure, _ := strconv.ParseBool(getEnv("BUILTIN_CM_OPENAI_BY_AZURE"))
|
||||
bcm, err = mo.NewChatModel(ctx, &mo.ChatModelConfig{
|
||||
APIKey: getEnv("BUILTIN_CM_OPENAI_API_KEY"),
|
||||
ByAzure: byAzure,
|
||||
BaseURL: getEnv("BUILTIN_CM_OPENAI_BASE_URL"),
|
||||
Model: getEnv("BUILTIN_CM_OPENAI_MODEL"),
|
||||
})
|
||||
case "ark":
|
||||
bcm, err = ao.NewChatModel(ctx, &ao.ChatModelConfig{
|
||||
APIKey: getEnv("BUILTIN_CM_ARK_API_KEY"),
|
||||
Model: getEnv("BUILTIN_CM_ARK_MODEL"),
|
||||
BaseURL: getEnv("BUILTIN_CM_ARK_BASE_URL"),
|
||||
})
|
||||
case "deepseek":
|
||||
bcm, err = deepseek.NewChatModel(ctx, &deepseek.ChatModelConfig{
|
||||
APIKey: getEnv("BUILTIN_CM_DEEPSEEK_API_KEY"),
|
||||
BaseURL: getEnv("BUILTIN_CM_DEEPSEEK_BASE_URL"),
|
||||
Model: getEnv("BUILTIN_CM_DEEPSEEK_MODEL"),
|
||||
})
|
||||
case "ollama":
|
||||
bcm, err = ollama.NewChatModel(ctx, &ollama.ChatModelConfig{
|
||||
BaseURL: getEnv("BUILTIN_CM_OLLAMA_BASE_URL"),
|
||||
Model: getEnv("BUILTIN_CM_OLLAMA_MODEL"),
|
||||
})
|
||||
case "qwen":
|
||||
bcm, err = qwen.NewChatModel(ctx, &qwen.ChatModelConfig{
|
||||
APIKey: getEnv("BUILTIN_CM_QWEN_API_KEY"),
|
||||
BaseURL: getEnv("BUILTIN_CM_QWEN_BASE_URL"),
|
||||
Model: getEnv("BUILTIN_CM_QWEN_MODEL"),
|
||||
})
|
||||
case "gemini":
|
||||
backend, convErr := strconv.ParseInt(getEnv("BUILTIN_CM_GEMINI_BACKEND"), 10, 64)
|
||||
if convErr != nil {
|
||||
return nil, false, convErr
|
||||
}
|
||||
c, clientErr := genai.NewClient(ctx, &genai.ClientConfig{
|
||||
APIKey: getEnv("BUILTIN_CM_GEMINI_API_KEY"),
|
||||
Backend: genai.Backend(backend),
|
||||
Project: getEnv("BUILTIN_CM_GEMINI_PROJECT"),
|
||||
Location: getEnv("BUILTIN_CM_GEMINI_LOCATION"),
|
||||
HTTPOptions: genai.HTTPOptions{
|
||||
BaseURL: getEnv("BUILTIN_CM_GEMINI_BASE_URL"),
|
||||
},
|
||||
})
|
||||
if clientErr != nil {
|
||||
return nil, false, clientErr
|
||||
}
|
||||
bcm, err = gemini.NewChatModel(ctx, &gemini.Config{
|
||||
Client: c,
|
||||
Model: getEnv("BUILTIN_CM_GEMINI_MODEL"),
|
||||
})
|
||||
default:
|
||||
// accept builtin chat model not configured
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, false, fmt.Errorf("knowledge init openai chat mode failed, %w", err)
|
||||
}
|
||||
if bcm != nil {
|
||||
configured = true
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func readJinja2PromptTemplate(jsonFilePath string) (prompt.ChatTemplate, error) {
|
||||
b, err := os.ReadFile(jsonFilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var m2qMessages []*schema.Message
|
||||
if err = json.Unmarshal(b, &m2qMessages); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tpl := make([]schema.MessagesTemplate, len(m2qMessages))
|
||||
for i := range m2qMessages {
|
||||
tpl[i] = m2qMessages[i]
|
||||
}
|
||||
return prompt.FromMessages(schema.Jinja2, tpl...), nil
|
||||
}
|
||||
1123
backend/application/knowledge/knowledge.go
Normal file
1123
backend/application/knowledge/knowledge.go
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user