diff --git a/backend/application/workflow/chatflow.go b/backend/application/workflow/chatflow.go index 9f4a46705..bf69c3d32 100644 --- a/backend/application/workflow/chatflow.go +++ b/backend/application/workflow/chatflow.go @@ -28,8 +28,6 @@ import ( "github.com/coze-dev/coze-studio/backend/api/model/crossdomain/message" "github.com/coze-dev/coze-studio/backend/api/model/workflow" "github.com/coze-dev/coze-studio/backend/application/base/ctxutil" - "github.com/coze-dev/coze-studio/backend/crossdomain/contract/crossconversation" - "github.com/coze-dev/coze-studio/backend/crossdomain/contract/crossmessage" "github.com/coze-dev/coze-studio/backend/domain/workflow/entity" "github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo" "github.com/coze-dev/coze-studio/backend/pkg/errorx" @@ -234,6 +232,10 @@ func defaultCard() *inputCard { return card } + + + + func (w *ApplicationService) CreateApplicationConversationDef(ctx context.Context, req *workflow.CreateProjectConversationDefRequest) (resp *workflow.CreateProjectConversationDefResponse, err error) { defer func() { if panicErr := recover(); panicErr != nil { @@ -1370,3 +1372,5 @@ func renderSelectOptionCardDSL(c string) (string, error) { return rCardString, nil } +======= +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) diff --git a/backend/application/workflow/init.go b/backend/application/workflow/init.go index 8f475fd3b..52bac2118 100644 --- a/backend/application/workflow/init.go +++ b/backend/application/workflow/init.go @@ -28,6 +28,7 @@ import ( wfconversation "github.com/coze-dev/coze-studio/backend/crossdomain/workflow/conversation" "github.com/coze-dev/coze-studio/backend/crossdomain/impl/code" + wfconversation "github.com/coze-dev/coze-studio/backend/crossdomain/workflow/conversation" wfplugin "github.com/coze-dev/coze-studio/backend/crossdomain/workflow/plugin" wfsearch "github.com/coze-dev/coze-studio/backend/crossdomain/workflow/search" "github.com/coze-dev/coze-studio/backend/crossdomain/workflow/variable" @@ -37,7 +38,10 @@ import ( plugin "github.com/coze-dev/coze-studio/backend/domain/plugin/service" search "github.com/coze-dev/coze-studio/backend/domain/search/service" "github.com/coze-dev/coze-studio/backend/domain/workflow" +<<<<<<< HEAD crossconversation "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/conversation" +======= +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) crossplugin "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/plugin" crosssearch "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/search" crossvariable "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/variable" @@ -49,6 +53,8 @@ import ( "github.com/coze-dev/coze-studio/backend/infra/contract/imagex" "github.com/coze-dev/coze-studio/backend/infra/contract/storage" "github.com/coze-dev/coze-studio/backend/pkg/logs" + + crossconversation "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/conversation" ) type ServiceComponents struct { diff --git a/backend/application/workflow/workflow.go b/backend/application/workflow/workflow.go index 0db163fd0..f09bec379 100644 --- a/backend/application/workflow/workflow.go +++ b/backend/application/workflow/workflow.go @@ -195,7 +195,11 @@ func (w *ApplicationService) CreateWorkflow(ctx context.Context, req *workflow.C if !req.IsSetProjectID() || mustParseInt64(req.GetProjectID()) == 0 || !createConversation { conversationName = "Default" } +<<<<<<< HEAD wf.InitCanvasSchema = vo.GetDefaultInitCanvasJsonSchemaChat(i18n.GetLocale(ctx), conversationName) +======= + wf.InitCanvasSchema = entity.GetDefaultInitCanvasJsonSchemaChat(i18n.GetLocale(ctx), conversationName) +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) } id, err := GetWorkflowDomainSVC().Create(ctx, wf) @@ -3917,11 +3921,15 @@ func (w *ApplicationService) GetChatFlowRole(ctx context.Context, req *workflow. var version string if wf.Meta.AppID != nil { +<<<<<<< HEAD if vl, err := GetWorkflowDomainSVC().GetWorkflowVersionsByConnector(ctx, mustParseInt64(req.GetConnectorID()), wf.ID, 1); err != nil { return nil, err } else if len(vl) > 0 { version = vl[0] } +======= + version = "" // TODO : search version from DB using AppID +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) } role, err := GetWorkflowDomainSVC().GetChatFlowRole(ctx, mustParseInt64(req.WorkflowID), version) @@ -4029,6 +4037,7 @@ func (w *ApplicationService) convertChatFlowRole(ctx context.Context, role *enti return res, nil } +<<<<<<< HEAD func (w *ApplicationService) OpenAPIGetWorkflowInfo(ctx context.Context, req *workflow.OpenAPIGetWorkflowInfoRequest) ( _ *workflow.OpenAPIGetWorkflowInfoResponse, err error) { @@ -4092,3 +4101,5 @@ func (w *ApplicationService) OpenAPIGetWorkflowInfo(ctx context.Context, req *wo }, }, nil } +======= +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) diff --git a/backend/crossdomain/impl/workflow/workflow.go b/backend/crossdomain/impl/workflow/workflow.go index 78f472da2..d3a4fbbc4 100644 --- a/backend/crossdomain/impl/workflow/workflow.go +++ b/backend/crossdomain/impl/workflow/workflow.go @@ -99,3 +99,7 @@ func (i *impl) GetWorkflowIDsByAppID(ctx context.Context, appID int64) ([]int64, return a.ID }), err } + +func (i *impl) InitApplicationDefaultConversationTemplate(ctx context.Context, spaceID int64, appID int64, userID int64) error { + return i.DomainSVC.InitApplicationDefaultConversationTemplate(ctx, spaceID, appID, userID) +} diff --git a/backend/crossdomain/workflow/conversation/conversation.go b/backend/crossdomain/workflow/conversation/conversation.go index e429ffab9..33bd6db73 100644 --- a/backend/crossdomain/workflow/conversation/conversation.go +++ b/backend/crossdomain/workflow/conversation/conversation.go @@ -1,3 +1,4 @@ +<<<<<<< HEAD /* * Copyright 2025 coze-dev Authors * @@ -171,7 +172,11 @@ func convertMessage(msgs []*msgentity.Message) ([]*conversation.Message, error) for _, m := range msgs { msg := &conversation.Message{ ID: m.ID, +<<<<<<< HEAD Role: m.Role, +======= + Role: string(m.Role), +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: string(m.ContentType)} if m.MultiContent != nil { @@ -180,13 +185,22 @@ func convertMessage(msgs []*msgentity.Message) ([]*conversation.Message, error) if c.FileData != nil { for _, fd := range c.FileData { mcs = append(mcs, &conversation.Content{ +<<<<<<< HEAD Type: c.Type, Uri: ptr.Of(fd.URI), +======= + Type: string(c.Type), + Uri: ptr.Of(fd.Url), +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) }) } } else { mcs = append(mcs, &conversation.Content{ +<<<<<<< HEAD Type: c.Type, +======= + Type: string(c.Type), +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) Text: ptr.Of(c.Text), }) } diff --git a/backend/crossdomain/workflow/conversation/conversation_test.go b/backend/crossdomain/workflow/conversation/conversation_test.go index 4b215dbcb..9d0ed05d2 100644 --- a/backend/crossdomain/workflow/conversation/conversation_test.go +++ b/backend/crossdomain/workflow/conversation/conversation_test.go @@ -1,3 +1,4 @@ +<<<<<<< HEAD /* * Copyright 2025 coze-dev Authors * @@ -14,12 +15,14 @@ * limitations under the License. */ + package conversation import ( "testing" "github.com/cloudwego/eino/schema" + apimessage "github.com/coze-dev/coze-studio/backend/api/model/crossdomain/message" "github.com/coze-dev/coze-studio/backend/domain/conversation/message/entity" "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/conversation" @@ -44,7 +47,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*entity.Message{ { ID: 1, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "text", MultiContent: []*apimessage.InputMetaData{ { @@ -60,7 +67,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*conversation.Message{ { ID: 1, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "text", MultiContent: []*conversation.Content{ {Type: "text", Text: ptr.Of("hello")}, @@ -76,14 +87,22 @@ func Test_convertMessage(t *testing.T) { Messages: []*entity.Message{ { ID: 2, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "file", MultiContent: []*apimessage.InputMetaData{ { Type: "file", FileData: []*apimessage.FileData{ { +<<<<<<< HEAD URI: "f_uri_1", +======= + Url: "f_uri_1", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) }, }, }, @@ -100,7 +119,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*conversation.Message{ { ID: 2, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "file", MultiContent: []*conversation.Content{ {Type: "file", Uri: ptr.Of("f_uri_1")}, @@ -117,7 +140,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*entity.Message{ { ID: 3, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "text_file", MultiContent: []*apimessage.InputMetaData{ { @@ -128,7 +155,11 @@ func Test_convertMessage(t *testing.T) { Type: "file", FileData: []*apimessage.FileData{ { +<<<<<<< HEAD URI: "f_uri_2", +======= + Url: "f_uri_2", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) }, }, }, @@ -141,7 +172,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*conversation.Message{ { ID: 3, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "text_file", MultiContent: []*conversation.Content{ {Type: "text", Text: ptr.Of("hello")}, @@ -158,17 +193,28 @@ func Test_convertMessage(t *testing.T) { Messages: []*entity.Message{ { ID: 4, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "file", MultiContent: []*apimessage.InputMetaData{ { Type: "file", FileData: []*apimessage.FileData{ { +<<<<<<< HEAD URI: "f_uri_3", }, { URI: "f_uri_4", +======= + Url: "f_uri_3", + }, + { + Url: "f_uri_4", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) }, }, }, @@ -185,7 +231,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*conversation.Message{ { ID: 4, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "file", MultiContent: []*conversation.Content{ {Type: "file", Uri: ptr.Of("f_uri_3")}, @@ -203,7 +253,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*entity.Message{ { ID: 5, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "text", MultiContent: []*apimessage.InputMetaData{ { @@ -219,7 +273,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*conversation.Message{ { ID: 5, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "text", MultiContent: []*conversation.Content{ {Type: "text", Text: ptr.Of("")}, @@ -235,14 +293,22 @@ func Test_convertMessage(t *testing.T) { Messages: []*entity.Message{ { ID: 6, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "image", MultiContent: []*apimessage.InputMetaData{ { Type: "image", FileData: []*apimessage.FileData{ { +<<<<<<< HEAD URI: "image_uri_5", +======= + Url: "image_uri_5", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) }, }, }, @@ -259,7 +325,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*conversation.Message{ { ID: 6, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "image", MultiContent: []*conversation.Content{ {Type: "image", Uri: ptr.Of("image_uri_5")}, @@ -276,17 +346,28 @@ func Test_convertMessage(t *testing.T) { Messages: []*entity.Message{ { ID: 7, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "image", MultiContent: []*apimessage.InputMetaData{ { Type: "image", FileData: []*apimessage.FileData{ { +<<<<<<< HEAD URI: "file_id_6", }, { URI: "file_id_7", +======= + Url: "file_id_6", + }, + { + Url: "file_id_7", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) }, }, }, @@ -303,7 +384,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*conversation.Message{ { ID: 7, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "image", MultiContent: []*conversation.Content{ {Type: "image", Uri: ptr.Of("file_id_6")}, @@ -321,7 +406,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*entity.Message{ { ID: 8, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "mix", MultiContent: []*apimessage.InputMetaData{ { @@ -332,7 +421,11 @@ func Test_convertMessage(t *testing.T) { Type: "image", FileData: []*apimessage.FileData{ { +<<<<<<< HEAD URI: "file_id_8", +======= + Url: "file_id_8", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) }, }, }, @@ -340,7 +433,11 @@ func Test_convertMessage(t *testing.T) { Type: "file", FileData: []*apimessage.FileData{ { +<<<<<<< HEAD URI: "file_id_9", +======= + Url: "file_id_9", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) }, }, }, @@ -353,7 +450,11 @@ func Test_convertMessage(t *testing.T) { Messages: []*conversation.Message{ { ID: 8, +<<<<<<< HEAD Role: schema.User, +======= + Role: "user", +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) ContentType: "mix", MultiContent: []*conversation.Content{ {Type: "text", Text: ptr.Of("hello")}, diff --git a/backend/domain/app/service/service_impl.go b/backend/domain/app/service/service_impl.go index 7fd55d872..33e425867 100644 --- a/backend/domain/app/service/service_impl.go +++ b/backend/domain/app/service/service_impl.go @@ -25,6 +25,7 @@ import ( connectorModel "github.com/coze-dev/coze-studio/backend/api/model/crossdomain/connector" databaseModel "github.com/coze-dev/coze-studio/backend/api/model/crossdomain/database" knowledgeModel "github.com/coze-dev/coze-studio/backend/api/model/crossdomain/knowledge" + crossconnector "github.com/coze-dev/coze-studio/backend/crossdomain/contract/connector" crossdatabase "github.com/coze-dev/coze-studio/backend/crossdomain/contract/database" crossknowledge "github.com/coze-dev/coze-studio/backend/crossdomain/contract/knowledge" diff --git a/backend/domain/workflow/component_interface.go b/backend/domain/workflow/component_interface.go index 6b858aacd..4b37d9462 100644 --- a/backend/domain/workflow/component_interface.go +++ b/backend/domain/workflow/component_interface.go @@ -69,6 +69,7 @@ type ConversationService interface { InitApplicationDefaultConversationTemplate(ctx context.Context, spaceID int64, appID int64, userID int64) error GetOrCreateConversation(ctx context.Context, env vo.Env, appID, connectorID, userID int64, conversationName string) (int64, error) UpdateConversation(ctx context.Context, env vo.Env, appID, connectorID, userID int64, conversationName string) (int64, error) + } type InterruptEventStore interface { @@ -129,6 +130,9 @@ type ConversationRepository interface { ListDynamicConversation(ctx context.Context, env vo.Env, policy *vo.ListConversationPolicy) ([]*entity.DynamicConversation, error) BatchCreateOnlineConversationTemplate(ctx context.Context, templates []*entity.ConversationTemplate, version string) error UpdateDynamicConversationNameByID(ctx context.Context, env vo.Env, templateID int64, name string) error +<<<<<<< HEAD UpdateStaticConversation(ctx context.Context, env vo.Env, templateID int64, connectorID int64, userID int64, newConversationID int64) error UpdateDynamicConversation(ctx context.Context, env vo.Env, conversationID, newConversationID int64) error +======= +>>>>>>> a86ea8d1 (feat(backend):workflow support conversation manager & add conversation/message nodes) } diff --git a/backend/domain/workflow/entity/node_type_literal.go b/backend/domain/workflow/entity/node_type_literal.go new file mode 100644 index 000000000..ad599ce1e --- /dev/null +++ b/backend/domain/workflow/entity/node_type_literal.go @@ -0,0 +1,1119 @@ +/* + * Copyright 2025 coze-dev Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package entity + +import ( + "fmt" + + "github.com/coze-dev/coze-studio/backend/pkg/i18n" + "github.com/coze-dev/coze-studio/backend/pkg/lang/ternary" +) + +var Categories = []Category{ + { + Key: "", // this is the default category. some of the most important nodes belong here, such as LLM, plugin, sub-workflow + Name: "", + EnUSName: "", + }, + { + Key: "logic", + Name: "业务逻辑", + EnUSName: "Logic", + }, + { + Key: "input&output", + Name: "输入&输出", + EnUSName: "Input&Output", + }, + { + Key: "database", + Name: "数据库", + EnUSName: "Database", + }, + { + Key: "data", + Name: "知识库&数据", + EnUSName: "Data", + }, + { + Key: "image", + Name: "图像处理", + EnUSName: "Image", + }, + { + Key: "audio&video", + Name: "音视频处理", + EnUSName: "Audio&Video", + }, + { + Key: "utilities", + Name: "组件", + EnUSName: "Utilities", + }, + { + Key: "conversation_management", + Name: "会话管理", + EnUSName: "Conversation management", + }, + { + Key: "conversation_history", + Name: "会话历史", + EnUSName: "Conversation history", + }, + { + Key: "message", + Name: "消息", + EnUSName: "Message", + }, +} + +// NodeTypeMetas holds the metadata for all available node types. +// It is initialized with built-in types and potentially extended by loading from external sources. +var NodeTypeMetas = []*NodeTypeMeta{ + { + ID: 1, + Name: "开始", + Type: NodeTypeEntry, + Category: "input&output", // Mapped from cate_list + Desc: "工作流的起始节点,用于设定启动工作流需要的信息", + Color: "#5C62FF", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Start-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Start", + EnUSDescription: "The starting node of the workflow, used to set the information needed to initiate the workflow.", + }, + { + ID: 2, + Name: "结束", + Type: NodeTypeExit, + Category: "input&output", // Mapped from cate_list + Desc: "工作流的最终节点,用于返回工作流运行后的结果信息", + Color: "#5C62FF", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-End-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + CallbackEnabled: true, + InputSourceAware: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true, Transform: true}, + StreamSourceEOFAware: true, + IncrementalOutput: true, + }, + EnUSName: "End", + EnUSDescription: "The final node of the workflow, used to return the result information after the workflow runs.", + }, + { + ID: 3, + Name: "大模型", + Type: NodeTypeLLM, + Category: "", // Mapped from cate_list + Desc: "调用大语言模型,使用变量和提示词生成回复", + Color: "#5C62FF", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-LLM-v2.jpg", + SupportBatch: true, // supportBatch: 2 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 3 * 60 * 1000, // 3 minutes + PreFillZero: true, + PostFillNil: true, + CallbackEnabled: true, + InputSourceAware: true, + MayUseChatModel: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true, Stream: true}, + }, + EnUSName: "LLM", + EnUSDescription: "Invoke the large language model, generate responses using variables and prompt words.", + }, + + { + ID: 4, + Name: "插件", + Type: NodeTypePlugin, + Category: "", // Mapped from cate_list + Desc: "通过添加工具访问实时数据和执行外部操作", + Color: "#CA61FF", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Plugin-v2.jpg", + SupportBatch: true, // supportBatch: 2 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 3 * 60 * 1000, // 3 minutes + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Plugin", + EnUSDescription: "Used to access external real-time data and perform operations", + }, + { + ID: 5, + Name: "代码", + Type: NodeTypeCodeRunner, + Category: "logic", // Mapped from cate_list + Desc: "编写代码,处理输入变量来生成返回值", + Color: "#00B2B2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Code-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Code", + EnUSDescription: "Write code to process input variables to generate return values.", + }, + { + ID: 6, + Name: "知识库检索", + Type: NodeTypeKnowledgeRetriever, + Category: "data", // Mapped from cate_list + Desc: "在选定的知识中,根据输入变量召回最匹配的信息,并以列表形式返回", + Color: "#FF811A", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-KnowledgeQuery-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Knowledge retrieval", + EnUSDescription: "In the selected knowledge, the best matching information is recalled based on the input variable and returned as an Array.", + }, + { + ID: 8, + Name: "选择器", + Type: NodeTypeSelector, + Category: "logic", // Mapped from cate_list + Desc: "连接多个下游分支,若设定的条件成立则仅运行对应的分支,若均不成立则只运行“否则”分支", + Color: "#00B2B2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Condition-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Condition", + EnUSDescription: "Connect multiple downstream branches. Only the corresponding branch will be executed if the set conditions are met. If none are met, only the 'else' branch will be executed.", + }, + { + ID: 9, + Name: "工作流", + Type: NodeTypeSubWorkflow, + Category: "", // Mapped from cate_list + Desc: "集成已发布工作流,可以执行嵌套子任务", + Color: "#00B83E", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Workflow-v2.jpg", + SupportBatch: true, // supportBatch: 2 + ExecutableMeta: ExecutableMeta{ + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Workflow", + EnUSDescription: "Add published workflows to execute subtasks", + }, + { + ID: 12, + Name: "SQL自定义", + Type: NodeTypeDatabaseCustomSQL, + Category: "database", // Mapped from cate_list + Desc: "基于用户自定义的 SQL 完成对数据库的增删改查操作", + Color: "#FF811A", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Database-v2.jpg", + SupportBatch: false, // supportBatch: 2 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "SQL Customization", + EnUSDescription: "Complete the operations of adding, deleting, modifying and querying the database based on user-defined SQL", + }, + { + ID: 13, + Name: "输出", + Type: NodeTypeOutputEmitter, + Category: "input&output", // Mapped from cate_list + Desc: "节点从“消息”更名为“输出”,支持中间过程的消息输出,支持流式和非流式两种方式", + Color: "#5C62FF", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Output-v2.jpg", + SupportBatch: false, + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + CallbackEnabled: true, + InputSourceAware: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true, Stream: true}, + StreamSourceEOFAware: true, + IncrementalOutput: true, + }, + EnUSName: "Output", + EnUSDescription: "The node is renamed from \"message\" to \"output\", Supports message output in the intermediate process and streaming and non-streaming methods", + }, + { + ID: 15, + Name: "文本处理", + Type: NodeTypeTextProcessor, + Category: "utilities", // Mapped from cate_list + Desc: "用于处理多个字符串类型变量的格式", + Color: "#3071F2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-StrConcat-v2.jpg", + SupportBatch: false, // supportBatch: 2 + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + CallbackEnabled: true, + InputSourceAware: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Text Processing", + EnUSDescription: "The format used for handling multiple string-type variables.", + }, + { + ID: 18, + Name: "问答", + Type: NodeTypeQuestionAnswer, + Category: "utilities", // Mapped from cate_list + Desc: "支持中间向用户提问问题,支持预置选项提问和开放式问题提问两种方式", + Color: "#3071F2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Direct-Question-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + CallbackEnabled: true, + MayUseChatModel: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Question", + EnUSDescription: "Support asking questions to the user in the middle of the conversation, with both preset options and open-ended questions", + }, + { + ID: 19, + Name: "终止循环", + Type: NodeTypeBreak, + Category: "logic", // Mapped from cate_list + Desc: "用于立即终止当前所在的循环,跳出循环体", + Color: "#00B2B2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Break-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Break", + EnUSDescription: "Used to immediately terminate the current loop and jump out of the loop", + }, + { + ID: 20, + Name: "设置变量", + Type: NodeTypeVariableAssignerWithinLoop, + Category: "logic", // Mapped from cate_list + Desc: "用于重置循环变量的值,使其下次循环使用重置后的值", + Color: "#00B2B2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-LoopSetVariable-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Set Variable", + EnUSDescription: "Used to reset the value of the loop variable so that it uses the reset value in the next iteration", + }, + { + ID: 21, + Name: "循环", + Type: NodeTypeLoop, + Category: "logic", // Mapped from cate_list + Desc: "用于通过设定循环次数和逻辑,重复执行一系列任务", + Color: "#00B2B2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Loop-v2.jpg", + SupportBatch: false, + ExecutableMeta: ExecutableMeta{ + IsComposite: true, + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Loop", + EnUSDescription: "Used to repeatedly execute a series of tasks by setting the number of iterations and logic", + }, + { + ID: 22, + Name: "意图识别", + Type: NodeTypeIntentDetector, + Category: "logic", // Mapped from cate_list + Desc: "用于用户输入的意图识别,并将其与预设意图选项进行匹配。", + Color: "#00B2B2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Intent-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + CallbackEnabled: true, + MayUseChatModel: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Intent recognition", + EnUSDescription: "Used for recognizing the intent in user input and matching it with preset intent options.", + }, + { + ID: 27, + Name: "知识库写入", + Type: NodeTypeKnowledgeIndexer, + Category: "data", // Mapped from cate_list + Desc: "写入节点可以添加 文本类型 的知识库,仅可以添加一个知识库", + Color: "#FF811A", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-KnowledgeWriting-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Knowledge writing", + EnUSDescription: "The write node can add a knowledge base of type text. Only one knowledge base can be added.", + }, + { + ID: 28, + Name: "批处理", + Type: NodeTypeBatch, + Category: "logic", // Mapped from cate_list + Desc: "通过设定批量运行次数和逻辑,运行批处理体内的任务", + Color: "#00B2B2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Batch-v2.jpg", + SupportBatch: false, // supportBatch: 1 (Corrected from previous assumption) + ExecutableMeta: ExecutableMeta{ + IsComposite: true, + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Batch", + EnUSDescription: "By setting the number of batch runs and logic, run the tasks in the batch body.", + }, + { + ID: 29, + Name: "继续循环", + Type: NodeTypeContinue, + Category: "logic", // Mapped from cate_list + Desc: "用于终止当前循环,执行下次循环", + Color: "#00B2B2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Continue-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Continue", + EnUSDescription: "Used to immediately terminate the current loop and execute next loop", + }, + { + ID: 30, + Name: "输入", + Type: NodeTypeInputReceiver, + Category: "input&output", // Mapped from cate_list + Desc: "支持中间过程的信息输入", + Color: "#5C62FF", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Input-v2.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PostFillNil: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Input", + EnUSDescription: "Support intermediate information input", + }, + { + ID: 31, + Name: "注释", + Type: "", + Category: "", // Not found in cate_list + Desc: "comment_desc", // Placeholder from JSON + Color: "", + IconURL: "comment_icon", // Placeholder from JSON + SupportBatch: false, // supportBatch: 1 + EnUSName: "Comment", + }, + { + ID: 32, + Name: "变量聚合", + Type: NodeTypeVariableAggregator, + Category: "logic", // Mapped from cate_list + Desc: "对多个分支的输出进行聚合处理", + Color: "#00B2B2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/VariableMerge-icon.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PostFillNil: true, + CallbackEnabled: true, + InputSourceAware: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true, Transform: true}, + }, + EnUSName: "Variable Merge", + EnUSDescription: "Aggregate the outputs of multiple branches.", + }, + { + ID: 37, + Name: "查询消息列表", + Type: NodeTypeMessageList, + Category: "message", // Mapped from cate_list + Desc: "用于查询消息列表", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Conversation-List.jpeg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Query message list", + EnUSDescription: "Used to query the message list", + }, + { + ID: 38, + Name: "清空会话历史", + Type: NodeTypeClearConversationHistory, + Category: "conversation_history", // Mapped from cate_list + Desc: "用于清空会话历史,清空后LLM看到的会话历史为空", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Conversation-Delete.jpeg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Clear conversation history", + EnUSDescription: "Used to clear conversation history. After clearing, the conversation history visible to the LLM node will be empty.", + }, + + { + ID: 54, + Name: "查询会话历史", + Type: NodeTypeConversationHistory, + Category: "conversation_history", // Mapped from cate_list + Desc: "用于查询会话历史,返回LLM可见的会话消息", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-查询会话历史.jpg", + SupportBatch: false, + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Query Conversation History", + EnUSDescription: "Used to query conversation history, returns conversation messages visible to the LLM", + }, + + { + ID: 39, + Name: "创建会话", + Type: NodeTypeCreateConversation, + Category: "conversation_management", // Mapped from cate_list + Desc: "用于创建会话", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Conversation-Create.jpeg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Create Conversation", + EnUSDescription: "This node is used to create a conversation.", + }, + + { + ID: 51, + Name: "修改会话", + Type: NodeTypeConversationUpdate, + Category: "conversation_management", // Mapped from cate_list + Desc: "用于修改会话的名字", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-编辑会话.jpg", + SupportBatch: false, + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Edit Conversation", + EnUSDescription: "Used to modify the name of a conversation.", + }, + + { + ID: 52, + Name: "删除会话", + Type: NodeTypeConversationDelete, + Category: "conversation_management", // Mapped from cate_list + Desc: "用于删除会话", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-删除会话.jpg", + SupportBatch: false, + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Delete Conversation", + EnUSDescription: "Used to delete a conversation.", + }, + + { + ID: 40, + Name: "变量赋值", + Type: NodeTypeVariableAssigner, + Category: "data", // Mapped from cate_list + Desc: "用于给支持写入的变量赋值,包括应用变量、用户变量", + Color: "#FF811A", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/Variable.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Variable assign", + EnUSDescription: "Assigns values to variables that support the write operation, including app and user variables.", + }, + { + ID: 42, + Name: "更新数据", + Type: NodeTypeDatabaseUpdate, + Category: "database", // Mapped from cate_list + Desc: "修改表中已存在的数据记录,用户指定更新条件和内容来更新数据", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-database-update.jpg", // Corrected Icon URL from JSON + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Update Data", + EnUSDescription: "Modify the existing data records in the table, and the user specifies the update conditions and contents to update the data", + }, + { + ID: 43, + Name: "查询数据", // Corrected Name from JSON (was "插入数据") + Type: NodeTypeDatabaseQuery, + Category: "database", // Mapped from cate_list + Desc: "从表获取数据,用户可定义查询条件、选择列等,输出符合条件的数据", // Corrected Desc from JSON + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icaon-database-select.jpg", // Corrected Icon URL from JSON + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Query Data", + EnUSDescription: "Query data from the table, and the user can define query conditions, select columns, etc., and output the data that meets the conditions", + }, + { + ID: 44, + Name: "删除数据", + Type: NodeTypeDatabaseDelete, + Category: "database", // Mapped from cate_list + Desc: "从表中删除数据记录,用户指定删除条件来删除符合条件的记录", // Corrected Desc from JSON + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-database-delete.jpg", // Corrected Icon URL from JSON + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Delete Data", + EnUSDescription: "Delete data records from the table, and the user specifies the deletion conditions to delete the records that meet the conditions", + }, + { + ID: 45, + Name: "HTTP 请求", + Type: NodeTypeHTTPRequester, + Category: "utilities", // Mapped from cate_list + Desc: "用于发送API请求,从接口返回数据", // Corrected Desc from JSON + Color: "#3071F2", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-HTTP.png", // Corrected Icon URL from JSON + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "HTTP request", + EnUSDescription: "It is used to send API requests and return data from the interface.", + }, + { + ID: 46, + Name: "新增数据", // Corrected Name from JSON (was "查询数据") + Type: NodeTypeDatabaseInsert, + Category: "database", // Mapped from cate_list + Desc: "向表添加新数据记录,用户输入数据内容后插入数据库", // Corrected Desc from JSON + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-database-insert.jpg", // Corrected Icon URL from JSON + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Add Data", + EnUSDescription: "Add new data records to the table, and insert them into the database after the user enters the data content", + }, + { + ID: 53, + Name: "查询会话列表", + Type: NodeTypeConversationList, + Category: "conversation_management", + Desc: "用于查询所有会话,包含静态会话、动态会话", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-查询会话.jpg", + SupportBatch: false, + ExecutableMeta: ExecutableMeta{ + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Query Conversation List", + EnUSDescription: "Used to query all conversations, including static conversations and dynamic conversations", + }, + { + ID: 55, + Name: "创建消息", + Type: NodeTypeCreateMessage, + Category: "message", + Desc: "用于创建消息", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-创建消息.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Create message", + EnUSDescription: "Used to create messages", + }, + + { + ID: 56, + Name: "修改消息", + Type: NodeTypeEditMessage, + Category: "message", + Desc: "用于修改消息", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-修改消息.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Edit message", + EnUSDescription: "Used to edit messages", + }, + + { + ID: 57, + Name: "删除消息", + Type: NodeTypeDeleteMessage, + Category: "message", + Desc: "用于删除消息", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-删除消息.jpg", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Delete message", + EnUSDescription: "Used to delete messages", + }, + + { + ID: 58, + Name: "JSON 序列化", + Type: NodeTypeJsonSerialization, + Category: "utilities", + Desc: "用于把变量转化为JSON字符串", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-to_json.png", + SupportBatch: false, + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "JSON serialization", + EnUSDescription: "Convert variable to JSON string", + }, + { + ID: 59, + Name: "JSON 反序列化", + Type: NodeTypeJsonDeserialization, + Category: "utilities", + Desc: "用于将JSON字符串解析为变量", + Color: "#F2B600", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-from_json.png", + SupportBatch: false, + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + CallbackEnabled: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "JSON deserialization", + EnUSDescription: "Parse JSON string to variable", + }, + { + ID: 60, + Name: "知识库删除", + Type: NodeTypeKnowledgeDeleter, + Category: "data", // Mapped from cate_list + Desc: "用于删除知识库中的文档", + Color: "#FF811A", + IconURL: "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icons-dataset-delete.png", + SupportBatch: false, // supportBatch: 1 + ExecutableMeta: ExecutableMeta{ + DefaultTimeoutMS: 60 * 1000, // 1 minute + PreFillZero: true, + PostFillNil: true, + StreamingParadigms: map[StreamingParadigm]bool{Invoke: true}, + }, + EnUSName: "Knowledge delete", + EnUSDescription: "The delete node can delete a document in knowledge base.", + }, + // --- End of nodes parsed from template_list --- +} + +// PluginNodeMetas holds metadata for specific plugin API entity. +var PluginNodeMetas []*PluginNodeMeta + +// PluginCategoryMetas holds metadata for plugin category entity. +var PluginCategoryMetas []*PluginCategoryMeta + +func NodeMetaByNodeType(t NodeType) *NodeTypeMeta { + for _, meta := range NodeTypeMetas { + if meta.Type == t { + return meta + } + } + + return nil +} + +const defaultZhCNInitCanvasJsonSchema = `{ + "nodes": [ + { + "id": "100001", + "type": "1", + "meta": { + "position": { + "x": 0, + "y": 0 + } + }, + "data": { + "nodeMeta": { + "description": "工作流的起始节点,用于设定启动工作流需要的信息", + "icon": "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Start.png", + "subTitle": "", + "title": "开始" + }, + "outputs": [ + { + "type": "string", + "name": "input", + "required": false + } + ], + "trigger_parameters": [ + { + "type": "string", + "name": "input", + "required": false + } + ] + } + }, + { + "id": "900001", + "type": "2", + "meta": { + "position": { + "x": 1000, + "y": 0 + } + }, + "data": { + "nodeMeta": { + "description": "工作流的最终节点,用于返回工作流运行后的结果信息", + "icon": "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-End.png", + "subTitle": "", + "title": "结束" + }, + "inputs": { + "terminatePlan": "returnVariables", + "inputParameters": [ + { + "name": "output", + "input": { + "type": "string", + "value": { + "type": "ref", + "content": { + "source": "block-output", + "blockID": "", + "name": "" + } + } + } + } + ] + } + } + } + ], + "edges": [], + "versions": { + "loop": "v2" + } +}` + +const defaultEnUSInitCanvasJsonSchema = `{ + "nodes": [ + { + "id": "100001", + "type": "1", + "meta": { + "position": { + "x": 0, + "y": 0 + } + }, + "data": { + "nodeMeta": { + "description": "The starting node of the workflow, used to set the information needed to initiate the workflow.", + "icon": "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Start.png", + "subTitle": "", + "title": "Start" + }, + "outputs": [ + { + "type": "string", + "name": "input", + "required": false + } + ], + "trigger_parameters": [ + { + "type": "string", + "name": "input", + "required": false + } + ] + } + }, + { + "id": "900001", + "type": "2", + "meta": { + "position": { + "x": 1000, + "y": 0 + } + }, + "data": { + "nodeMeta": { + "description": "The final node of the workflow, used to return the result information after the workflow runs.", + "icon": "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-End.png", + "subTitle": "", + "title": "End" + }, + "inputs": { + "terminatePlan": "returnVariables", + "inputParameters": [ + { + "name": "output", + "input": { + "type": "string", + "value": { + "type": "ref", + "content": { + "source": "block-output", + "blockID": "", + "name": "" + } + } + } + } + ] + } + } + } + ], + "edges": [], + "versions": { + "loop": "v2" + } +}` + +const defaultZhCNInitCanvasJsonSchemaChat = `{ + "nodes": [{ + "id": "100001", + "type": "1", + "meta": { + "position": { + "x": 0, + "y": 0 + } + }, + "data": { + "outputs": [{ + "type": "string", + "name": "USER_INPUT", + "required": true + }, { + "type": "string", + "name": "CONVERSATION_NAME", + "required": false, + "description": "本次请求绑定的会话,会自动写入消息、会从该会话读对话历史。", + "defaultValue": "%s" + }], + "nodeMeta": { + "title": "开始", + "icon": "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Start.png", + "description": "工作流的起始节点,用于设定启动工作流需要的信息", + "subTitle": "" + } + } + }, { + "id": "900001", + "type": "2", + "meta": { + "position": { + "x": 1000, + "y": 0 + } + }, + "data": { + "nodeMeta": { + "title": "结束", + "icon": "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-End.png", + "description": "工作流的最终节点,用于返回工作流运行后的结果信息", + "subTitle": "" + }, + "inputs": { + "terminatePlan": "useAnswerContent", + "streamingOutput": true, + "inputParameters": [{ + "name": "output", + "input": { + "type": "string", + "value": { + "type": "ref" + } + } + }] + } + } + }] +}` +const defaultEnUSInitCanvasJsonSchemaChat = `{ + "nodes": [{ + "id": "100001", + "type": "1", + "meta": { + "position": { + "x": 0, + "y": 0 + } + }, + "data": { + "outputs": [{ + "type": "string", + "name": "USER_INPUT", + "required": true + }, { + "type": "string", + "name": "CONVERSATION_NAME", + "required": false, + "description": "The conversation bound to this request will automatically write messages and read conversation history from that conversation.", + "defaultValue": "%s" + }], + "nodeMeta": { + "title": "Start", + "icon": "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-Start.png", + "description": "The starting node of the workflow, used to set the information needed to initiate the workflow.", + "subTitle": "" + } + } + }, { + "id": "900001", + "type": "2", + "meta": { + "position": { + "x": 1000, + "y": 0 + } + }, + "data": { + "nodeMeta": { + "title": "End", + "icon": "https://lf3-static.bytednsdoc.com/obj/eden-cn/dvsmryvd_avi_dvsm/ljhwZthlaukjlkulzlp/icon/icon-End.png", + "description": "The final node of the workflow, used to return the result information after the workflow runs.", + "subTitle": "" + }, + "inputs": { + "terminatePlan": "useAnswerContent", + "streamingOutput": true, + "inputParameters": [{ + "name": "output", + "input": { + "type": "string", + "value": { + "type": "ref" + } + } + }] + } + } + }] +}` + +func GetDefaultInitCanvasJsonSchema(locale i18n.Locale) string { + return ternary.IFElse(locale == i18n.LocaleEN, defaultEnUSInitCanvasJsonSchema, defaultZhCNInitCanvasJsonSchema) +} + +func GetDefaultInitCanvasJsonSchemaChat(locale i18n.Locale, name string) string { + return ternary.IFElse(locale == i18n.LocaleEN, fmt.Sprintf(defaultEnUSInitCanvasJsonSchemaChat, name), fmt.Sprintf(defaultZhCNInitCanvasJsonSchemaChat, name)) +} diff --git a/backend/domain/workflow/interface.go b/backend/domain/workflow/interface.go index d21414751..2d36e4529 100644 --- a/backend/domain/workflow/interface.go +++ b/backend/domain/workflow/interface.go @@ -65,6 +65,7 @@ type Service interface { SyncRelatedWorkflowResources(ctx context.Context, appID int64, relatedWorkflows map[int64]entity.IDVersionPair, related vo.ExternalResourceRelated) error ConversationService + BindConvRelatedInfo(ctx context.Context, convID int64, info entity.ConvRelatedInfo) error GetConvRelatedInfo(ctx context.Context, convID int64) (*entity.ConvRelatedInfo, bool, func() error, error) } diff --git a/backend/domain/workflow/internal/canvas/adaptor/to_schema.go b/backend/domain/workflow/internal/canvas/adaptor/to_schema.go index 651c05966..1bde1c6b9 100644 --- a/backend/domain/workflow/internal/canvas/adaptor/to_schema.go +++ b/backend/domain/workflow/internal/canvas/adaptor/to_schema.go @@ -23,13 +23,17 @@ import ( "runtime/debug" "strconv" "strings" + "time" einoCompose "github.com/cloudwego/eino/compose" + "github.com/spf13/cast" "github.com/coze-dev/coze-studio/backend/domain/workflow" + "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/model" "github.com/coze-dev/coze-studio/backend/domain/workflow/entity" "github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo" "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/canvas/convert" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/compose" "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes" "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/batch" "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/code" @@ -386,6 +390,1094 @@ func toSubWorkflowNodeSchema(ctx context.Context, n *vo.Node) (*schema.NodeSchem return ns, nil } +func toIntentDetectorSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeIntentDetector, + Name: n.Data.Meta.Title, + } + + param := n.Data.Inputs.LLMParam + if param == nil { + return nil, fmt.Errorf("intent detector node's llmParam is nil") + } + + llmParam, ok := param.(vo.IntentDetectorLLMParam) + if !ok { + return nil, fmt.Errorf("llm node's llmParam must be LLMParam, got %v", llmParam) + } + + paramBytes, err := sonic.Marshal(param) + if err != nil { + return nil, err + } + var intentDetectorConfig = &vo.IntentDetectorLLMConfig{} + + err = sonic.Unmarshal(paramBytes, &intentDetectorConfig) + if err != nil { + return nil, err + } + + modelLLMParams := &model.LLMParams{} + modelLLMParams.ModelType = int64(intentDetectorConfig.ModelType) + modelLLMParams.ModelName = intentDetectorConfig.ModelName + modelLLMParams.TopP = intentDetectorConfig.TopP + modelLLMParams.Temperature = intentDetectorConfig.Temperature + modelLLMParams.MaxTokens = intentDetectorConfig.MaxTokens + modelLLMParams.ResponseFormat = model.ResponseFormat(intentDetectorConfig.ResponseFormat) + modelLLMParams.SystemPrompt = intentDetectorConfig.SystemPrompt.Value.Content.(string) + + ns.SetConfigKV("LLMParams", modelLLMParams) + ns.SetConfigKV("SystemPrompt", modelLLMParams.SystemPrompt) + + var intents = make([]string, 0, len(n.Data.Inputs.Intents)) + for _, it := range n.Data.Inputs.Intents { + intents = append(intents, it.Name) + } + ns.SetConfigKV("Intents", intents) + + if n.Data.Inputs.Mode == "top_speed" { + ns.SetConfigKV("IsFastMode", true) + } + + if err = SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err = SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toDatabaseCustomSQLSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeDatabaseCustomSQL, + Name: n.Data.Meta.Title, + } + + dsList := n.Data.Inputs.DatabaseInfoList + if len(dsList) == 0 { + return nil, fmt.Errorf("database info is requird") + } + databaseInfo := dsList[0] + + dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64) + if err != nil { + return nil, err + } + ns.SetConfigKV("DatabaseInfoID", dsID) + + sql := n.Data.Inputs.SQL + if len(sql) == 0 { + return nil, fmt.Errorf("sql is requird") + } + + ns.SetConfigKV("SQLTemplate", sql) + + if err = SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err = SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toDatabaseQuerySchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeDatabaseQuery, + Name: n.Data.Meta.Title, + } + + dsList := n.Data.Inputs.DatabaseInfoList + if len(dsList) == 0 { + return nil, fmt.Errorf("database info is requird") + } + databaseInfo := dsList[0] + + dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64) + if err != nil { + return nil, err + } + ns.SetConfigKV("DatabaseInfoID", dsID) + + selectParam := n.Data.Inputs.SelectParam + ns.SetConfigKV("Limit", selectParam.Limit) + + queryFields := make([]string, 0) + for _, v := range selectParam.FieldList { + queryFields = append(queryFields, strconv.FormatInt(v.FieldID, 10)) + } + ns.SetConfigKV("QueryFields", queryFields) + + orderClauses := make([]*database.OrderClause, 0, len(selectParam.OrderByList)) + for _, o := range selectParam.OrderByList { + orderClauses = append(orderClauses, &database.OrderClause{ + FieldID: strconv.FormatInt(o.FieldID, 10), + IsAsc: o.IsAsc, + }) + } + ns.SetConfigKV("OrderClauses", orderClauses) + + clauseGroup := &database.ClauseGroup{} + + if selectParam.Condition != nil { + clauseGroup, err = buildClauseGroupFromCondition(selectParam.Condition) + if err != nil { + return nil, err + } + } + + ns.SetConfigKV("ClauseGroup", clauseGroup) + + if err = SetDatabaseInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err = SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toDatabaseInsertSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeDatabaseInsert, + Name: n.Data.Meta.Title, + } + + dsList := n.Data.Inputs.DatabaseInfoList + if len(dsList) == 0 { + return nil, fmt.Errorf("database info is requird") + } + databaseInfo := dsList[0] + + dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64) + if err != nil { + return nil, err + } + ns.SetConfigKV("DatabaseInfoID", dsID) + + if err = SetDatabaseInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err = SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toDatabaseDeleteSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeDatabaseDelete, + Name: n.Data.Meta.Title, + } + + dsList := n.Data.Inputs.DatabaseInfoList + if len(dsList) == 0 { + return nil, fmt.Errorf("database info is requird") + } + databaseInfo := dsList[0] + + dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64) + if err != nil { + return nil, err + } + ns.SetConfigKV("DatabaseInfoID", dsID) + + deleteParam := n.Data.Inputs.DeleteParam + + clauseGroup, err := buildClauseGroupFromCondition(&deleteParam.Condition) + if err != nil { + return nil, err + } + ns.SetConfigKV("ClauseGroup", clauseGroup) + + if err = SetDatabaseInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err = SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toDatabaseUpdateSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeDatabaseUpdate, + Name: n.Data.Meta.Title, + } + + dsList := n.Data.Inputs.DatabaseInfoList + if len(dsList) == 0 { + return nil, fmt.Errorf("database info is requird") + } + databaseInfo := dsList[0] + + dsID, err := strconv.ParseInt(databaseInfo.DatabaseInfoID, 10, 64) + if err != nil { + return nil, err + } + ns.SetConfigKV("DatabaseInfoID", dsID) + + updateParam := n.Data.Inputs.UpdateParam + if updateParam == nil { + return nil, fmt.Errorf("update param is requird") + } + clauseGroup, err := buildClauseGroupFromCondition(&updateParam.Condition) + if err != nil { + return nil, err + } + ns.SetConfigKV("ClauseGroup", clauseGroup) + if err = SetDatabaseInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err = SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toHttpRequesterSchema(n *vo.Node, opts ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeHTTPRequester, + Name: n.Data.Meta.Title, + } + option := &option{} + for _, opt := range opts { + opt(option) + } + + implicitNodeDependencies := option.implicitNodeDependencies + + inputs := n.Data.Inputs + + md5FieldMapping := &httprequester.MD5FieldMapping{} + + method := inputs.APIInfo.Method + ns.SetConfigKV("Method", method) + url := inputs.APIInfo.URL + ns.SetConfigKV("URLConfig", httprequester.URLConfig{ + Tpl: strings.TrimSpace(url), + }) + + urlVars := extractBracesContent(url) + md5FieldMapping.SetURLFields(urlVars...) + + md5FieldMapping.SetHeaderFields(slices.Transform(inputs.Headers, func(a *vo.Param) string { + return a.Name + })...) + + md5FieldMapping.SetParamFields(slices.Transform(inputs.Params, func(a *vo.Param) string { + return a.Name + })...) + + if inputs.Auth != nil && inputs.Auth.AuthOpen { + auth := &httprequester.AuthenticationConfig{} + ty, err := ConvertAuthType(inputs.Auth.AuthType) + if err != nil { + return nil, err + } + auth.Type = ty + location, err := ConvertLocation(inputs.Auth.AuthData.CustomData.AddTo) + if err != nil { + return nil, err + } + auth.Location = location + + ns.SetConfigKV("AuthConfig", auth) + + } + + bodyConfig := httprequester.BodyConfig{} + + bodyConfig.BodyType = httprequester.BodyType(inputs.Body.BodyType) + switch httprequester.BodyType(inputs.Body.BodyType) { + case httprequester.BodyTypeJSON: + jsonTpl := inputs.Body.BodyData.Json + bodyConfig.TextJsonConfig = &httprequester.TextJsonConfig{ + Tpl: jsonTpl, + } + jsonVars := extractBracesContent(jsonTpl) + md5FieldMapping.SetBodyFields(jsonVars...) + case httprequester.BodyTypeFormData: + bodyConfig.FormDataConfig = &httprequester.FormDataConfig{ + FileTypeMapping: map[string]bool{}, + } + formDataVars := make([]string, 0) + for i := range inputs.Body.BodyData.FormData.Data { + p := inputs.Body.BodyData.FormData.Data[i] + if p.Input.Type == vo.VariableTypeString && p.Input.AssistType > vo.AssistTypeNotSet && p.Input.AssistType < vo.AssistTypeTime { + bodyConfig.FormDataConfig.FileTypeMapping[p.Name] = true + formDataVars = append(formDataVars, p.Name) + } + } + md5FieldMapping.SetBodyFields(formDataVars...) + case httprequester.BodyTypeRawText: + TextTpl := inputs.Body.BodyData.RawText + bodyConfig.TextPlainConfig = &httprequester.TextPlainConfig{ + Tpl: TextTpl, + } + textPlainVars := extractBracesContent(TextTpl) + md5FieldMapping.SetBodyFields(textPlainVars...) + case httprequester.BodyTypeFormURLEncoded: + formURLEncodedVars := make([]string, 0) + for _, p := range inputs.Body.BodyData.FormURLEncoded { + formURLEncodedVars = append(formURLEncodedVars, p.Name) + } + md5FieldMapping.SetBodyFields(formURLEncodedVars...) + } + ns.SetConfigKV("BodyConfig", bodyConfig) + ns.SetConfigKV("MD5FieldMapping", *md5FieldMapping) + + if inputs.Setting != nil { + ns.SetConfigKV("Timeout", time.Duration(inputs.Setting.Timeout)*time.Second) + ns.SetConfigKV("RetryTimes", uint64(inputs.Setting.RetryTimes)) + } + + if err := SetHttpRequesterInputsForNodeSchema(n, ns, implicitNodeDependencies); err != nil { + return nil, err + } + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + return ns, nil +} + +func toKnowledgeIndexerSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeKnowledgeIndexer, + Name: n.Data.Meta.Title, + } + + inputs := n.Data.Inputs + datasetListInfoParam := inputs.DatasetParam[0] + datasetIDs := datasetListInfoParam.Input.Value.Content.([]any) + if len(datasetIDs) == 0 { + return nil, fmt.Errorf("dataset ids is required") + } + knowledgeID, err := cast.ToInt64E(datasetIDs[0]) + if err != nil { + return nil, err + } + + ns.SetConfigKV("KnowledgeID", knowledgeID) + ps := inputs.StrategyParam.ParsingStrategy + parseMode, err := ConvertParsingType(ps.ParsingType) + if err != nil { + return nil, err + } + parsingStrategy := &knowledge.ParsingStrategy{ + ParseMode: parseMode, + ImageOCR: ps.ImageOcr, + ExtractImage: ps.ImageExtraction, + ExtractTable: ps.TableExtraction, + } + + ns.SetConfigKV("ParsingStrategy", parsingStrategy) + cs := inputs.StrategyParam.ChunkStrategy + chunkType, err := ConvertChunkType(cs.ChunkType) + if err != nil { + return nil, err + } + chunkingStrategy := &knowledge.ChunkingStrategy{ + ChunkType: chunkType, + Separator: cs.Separator, + ChunkSize: cs.MaxToken, + Overlap: int64(cs.Overlap * float64(cs.MaxToken)), + } + ns.SetConfigKV("ChunkingStrategy", chunkingStrategy) + + if err = SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err = SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toKnowledgeRetrieverSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeKnowledgeRetriever, + Name: n.Data.Meta.Title, + } + + inputs := n.Data.Inputs + datasetListInfoParam := inputs.DatasetParam[0] + datasetIDs := datasetListInfoParam.Input.Value.Content.([]any) + knowledgeIDs := make([]int64, 0, len(datasetIDs)) + for _, id := range datasetIDs { + k, err := cast.ToInt64E(id) + if err != nil { + return nil, err + } + knowledgeIDs = append(knowledgeIDs, k) + } + ns.SetConfigKV("KnowledgeIDs", knowledgeIDs) + + retrievalStrategy := &knowledge.RetrievalStrategy{} + + var getDesignatedParamContent = func(name string) (any, bool) { + for _, param := range inputs.DatasetParam { + if param.Name == name { + return param.Input.Value.Content, true + } + } + return nil, false + + } + + if content, ok := getDesignatedParamContent("topK"); ok { + topK, err := cast.ToInt64E(content) + if err != nil { + return nil, err + } + retrievalStrategy.TopK = &topK + } + + if content, ok := getDesignatedParamContent("useRerank"); ok { + useRerank, err := cast.ToBoolE(content) + if err != nil { + return nil, err + } + retrievalStrategy.EnableRerank = useRerank + } + + if content, ok := getDesignatedParamContent("useRewrite"); ok { + useRewrite, err := cast.ToBoolE(content) + if err != nil { + return nil, err + } + retrievalStrategy.EnableQueryRewrite = useRewrite + } + + if content, ok := getDesignatedParamContent("isPersonalOnly"); ok { + isPersonalOnly, err := cast.ToBoolE(content) + if err != nil { + return nil, err + } + retrievalStrategy.IsPersonalOnly = isPersonalOnly + } + + if content, ok := getDesignatedParamContent("useNl2sql"); ok { + useNl2sql, err := cast.ToBoolE(content) + if err != nil { + return nil, err + } + retrievalStrategy.EnableNL2SQL = useNl2sql + } + + if content, ok := getDesignatedParamContent("minScore"); ok { + minScore, err := cast.ToFloat64E(content) + if err != nil { + return nil, err + } + retrievalStrategy.MinScore = &minScore + } + + if content, ok := getDesignatedParamContent("strategy"); ok { + strategy, err := cast.ToInt64E(content) + if err != nil { + return nil, err + } + searchType, err := ConvertRetrievalSearchType(strategy) + if err != nil { + return nil, err + } + retrievalStrategy.SearchType = searchType + } + + ns.SetConfigKV("RetrievalStrategy", retrievalStrategy) + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toKnowledgeDeleterSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeKnowledgeDeleter, + Name: n.Data.Meta.Title, + } + + inputs := n.Data.Inputs + datasetListInfoParam := inputs.DatasetParam[0] + datasetIDs := datasetListInfoParam.Input.Value.Content.([]any) + if len(datasetIDs) == 0 { + return nil, fmt.Errorf("dataset ids is required") + } + knowledgeID, err := cast.ToInt64E(datasetIDs[0]) + if err != nil { + return nil, err + } + + ns.SetConfigKV("KnowledgeID", knowledgeID) + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toVariableAssignerSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeVariableAssigner, + Name: n.Data.Meta.Title, + } + + var pairs = make([]*variableassigner.Pair, 0, len(n.Data.Inputs.InputParameters)) + for i, param := range n.Data.Inputs.InputParameters { + if param.Left == nil || param.Input == nil { + return nil, fmt.Errorf("variable assigner node's param left or input is nil") + } + + leftSources, err := CanvasBlockInputToFieldInfo(param.Left, einoCompose.FieldPath{fmt.Sprintf("left_%d", i)}, n.Parent()) + if err != nil { + return nil, err + } + + if leftSources[0].Source.Ref == nil { + return nil, fmt.Errorf("variable assigner node's param left source ref is nil") + } + + if leftSources[0].Source.Ref.VariableType == nil { + return nil, fmt.Errorf("variable assigner node's param left source ref's variable type is nil") + } + + if *leftSources[0].Source.Ref.VariableType == vo.GlobalSystem { + return nil, fmt.Errorf("variable assigner node's param left's ref's variable type cannot be variable.GlobalSystem") + } + + inputSource, err := CanvasBlockInputToFieldInfo(param.Input, leftSources[0].Source.Ref.FromPath, n.Parent()) + if err != nil { + return nil, err + } + ns.AddInputSource(inputSource...) + pair := &variableassigner.Pair{ + Left: *leftSources[0].Source.Ref, + Right: inputSource[0].Path, + } + pairs = append(pairs, pair) + } + ns.Configs = pairs + + return ns, nil +} + +func toCodeRunnerSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeCodeRunner, + Name: n.Data.Meta.Title, + } + inputs := n.Data.Inputs + + code := inputs.Code + ns.SetConfigKV("Code", code) + + language, err := ConvertCodeLanguage(inputs.Language) + if err != nil { + return nil, err + } + ns.SetConfigKV("Language", language) + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toPluginSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypePlugin, + Name: n.Data.Meta.Title, + } + inputs := n.Data.Inputs + + apiParams := slices.ToMap(inputs.APIParams, func(e *vo.Param) (string, *vo.Param) { + return e.Name, e + }) + + ps, ok := apiParams["pluginID"] + if !ok { + return nil, fmt.Errorf("plugin id param is not found") + } + + pID, err := strconv.ParseInt(ps.Input.Value.Content.(string), 10, 64) + + ns.SetConfigKV("PluginID", pID) + + ps, ok = apiParams["apiID"] + if !ok { + return nil, fmt.Errorf("plugin id param is not found") + } + + tID, err := strconv.ParseInt(ps.Input.Value.Content.(string), 10, 64) + if err != nil { + return nil, err + } + + ns.SetConfigKV("ToolID", tID) + + ps, ok = apiParams["pluginVersion"] + if !ok { + return nil, fmt.Errorf("plugin version param is not found") + } + version := ps.Input.Value.Content.(string) + ns.SetConfigKV("PluginVersion", version) + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toVariableAggregatorSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeVariableAggregator, + Name: n.Data.Meta.Title, + } + + ns.SetConfigKV("MergeStrategy", variableaggregator.FirstNotNullValue) + inputs := n.Data.Inputs + + groupToLen := make(map[string]int, len(inputs.VariableAggregator.MergeGroups)) + for i := range inputs.VariableAggregator.MergeGroups { + group := inputs.VariableAggregator.MergeGroups[i] + tInfo := &vo.TypeInfo{ + Type: vo.DataTypeObject, + Properties: make(map[string]*vo.TypeInfo), + } + ns.SetInputType(group.Name, tInfo) + for ii, v := range group.Variables { + name := strconv.Itoa(ii) + valueTypeInfo, err := CanvasBlockInputToTypeInfo(v) + if err != nil { + return nil, err + } + tInfo.Properties[name] = valueTypeInfo + sources, err := CanvasBlockInputToFieldInfo(v, einoCompose.FieldPath{group.Name, name}, n.Parent()) + if err != nil { + return nil, err + } + ns.AddInputSource(sources...) + } + + length := len(group.Variables) + groupToLen[group.Name] = length + } + + groupOrder := make([]string, 0, len(groupToLen)) + for i := range inputs.VariableAggregator.MergeGroups { + group := inputs.VariableAggregator.MergeGroups[i] + groupOrder = append(groupOrder, group.Name) + } + + ns.SetConfigKV("GroupToLen", groupToLen) + ns.SetConfigKV("GroupOrder", groupOrder) + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + return ns, nil +} + +func toInputReceiverSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeInputReceiver, + Name: n.Data.Meta.Title, + } + + ns.SetConfigKV("OutputSchema", n.Data.Inputs.OutputSchema) + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toQASchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeQuestionAnswer, + Name: n.Data.Meta.Title, + } + + qaConf := n.Data.Inputs.QA + if qaConf == nil { + return nil, fmt.Errorf("qa config is nil") + } + ns.SetConfigKV("QuestionTpl", qaConf.Question) + + var llmParams *model.LLMParams + if n.Data.Inputs.LLMParam != nil { + llmParamBytes, err := sonic.Marshal(n.Data.Inputs.LLMParam) + if err != nil { + return nil, err + } + var qaLLMParams vo.QALLMParam + err = sonic.Unmarshal(llmParamBytes, &qaLLMParams) + if err != nil { + return nil, err + } + + llmParams, err = qaLLMParamsToLLMParams(qaLLMParams) + if err != nil { + return nil, err + } + + ns.SetConfigKV("LLMParams", llmParams) + } + + answerType, err := qaAnswerTypeToAnswerType(qaConf.AnswerType) + if err != nil { + return nil, err + } + ns.SetConfigKV("AnswerType", answerType) + + var choiceType qa.ChoiceType + if len(qaConf.OptionType) > 0 { + choiceType, err = qaOptionTypeToChoiceType(qaConf.OptionType) + if err != nil { + return nil, err + } + ns.SetConfigKV("ChoiceType", choiceType) + } + + if answerType == qa.AnswerByChoices { + switch choiceType { + case qa.FixedChoices: + var options []string + for _, option := range qaConf.Options { + options = append(options, option.Name) + } + ns.SetConfigKV("FixedChoices", options) + case qa.DynamicChoices: + inputSources, err := CanvasBlockInputToFieldInfo(qaConf.DynamicOption, einoCompose.FieldPath{qa.DynamicChoicesKey}, n.Parent()) + if err != nil { + return nil, err + } + ns.AddInputSource(inputSources...) + + inputTypes, err := CanvasBlockInputToTypeInfo(qaConf.DynamicOption) + if err != nil { + return nil, err + } + ns.SetInputType(qa.DynamicChoicesKey, inputTypes) + default: + return nil, fmt.Errorf("qa node is answer by options, but option type not provided") + } + } else if answerType == qa.AnswerDirectly { + ns.SetConfigKV("ExtractFromAnswer", qaConf.ExtractOutput) + if qaConf.ExtractOutput { + if llmParams == nil { + return nil, fmt.Errorf("qa node needs to extract from answer, but LLMParams not provided") + } + ns.SetConfigKV("AdditionalSystemPromptTpl", llmParams.SystemPrompt) + ns.SetConfigKV("MaxAnswerCount", qaConf.Limit) + if err = SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + } + } + + if err = SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toJSONSerializeSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeJsonSerialization, + Name: n.Data.Meta.Title, + } + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toJSONDeserializeSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeJsonDeserialization, + Name: n.Data.Meta.Title, + } + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toCreateConversationSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeCreateConversation, + Name: n.Data.Meta.Title, + } + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toConversationUpdateSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeConversationUpdate, + Name: n.Data.Meta.Title, + } + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toConversationListSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeConversationList, + Name: n.Data.Meta.Title, + } + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toCreateMessageSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeCreateMessage, + Name: n.Data.Meta.Title, + } + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toMessageListSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeMessageList, + Name: n.Data.Meta.Title, + } + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toDeleteMessageSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeDeleteMessage, + Name: n.Data.Meta.Title, + } + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toEditMessageSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeEditMessage, + Name: n.Data.Meta.Title, + } + + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toConversationHistorySchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeConversationHistory, + Name: n.Data.Meta.Title, + } + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toClearConversationHistorySchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeClearConversationHistory, + Name: n.Data.Meta.Title, + } + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func toConversationDeleteSchema(n *vo.Node, _ ...OptionFn) (*compose.NodeSchema, error) { + ns := &compose.NodeSchema{ + Key: vo.NodeKey(n.ID), + Type: entity.NodeTypeConversationDelete, + Name: n.Data.Meta.Title, + } + if err := SetInputsForNodeSchema(n, ns); err != nil { + return nil, err + } + + if err := SetOutputTypesForNodeSchema(n, ns); err != nil { + return nil, err + } + + return ns, nil +} + +func buildClauseGroupFromCondition(condition *vo.DBCondition) (*database.ClauseGroup, error) { + clauseGroup := &database.ClauseGroup{} + if len(condition.ConditionList) == 1 { + params := condition.ConditionList[0] + clause, err := buildClauseFromParams(params) + if err != nil { + return nil, err + } + clauseGroup.Single = clause + } else { + relation, err := ConvertLogicTypeToRelation(condition.Logic) + if err != nil { + return nil, err + } + clauseGroup.Multi = &database.MultiClause{ + Clauses: make([]*database.Clause, 0, len(condition.ConditionList)), + Relation: relation, + } + for i := range condition.ConditionList { + params := condition.ConditionList[i] + clause, err := buildClauseFromParams(params) + if err != nil { + return nil, err + } + clauseGroup.Multi.Clauses = append(clauseGroup.Multi.Clauses, clause) + } + } + + return clauseGroup, nil +} + func PruneIsolatedNodes(nodes []*vo.Node, edges []*vo.Edge, parentNode *vo.Node) ([]*vo.Node, []*vo.Edge) { nodeDependencyCount := map[string]int{} if parentNode != nil { diff --git a/backend/domain/workflow/internal/compose/node_schema.go b/backend/domain/workflow/internal/compose/node_schema.go new file mode 100644 index 000000000..d291a377a --- /dev/null +++ b/backend/domain/workflow/internal/compose/node_schema.go @@ -0,0 +1,646 @@ +/* + * Copyright 2025 coze-dev Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package compose + +import ( + "context" + "fmt" + + "runtime/debug" + + "github.com/cloudwego/eino/compose" + + "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/model" + "github.com/coze-dev/coze-studio/backend/domain/workflow/entity" + "github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/batch" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/code" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/conversation" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/database" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/emitter" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/entry" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/httprequester" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/intentdetector" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/json" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/knowledge" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/llm" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/loop" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/plugin" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/qa" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/receiver" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/selector" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/subworkflow" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/textprocessor" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/variableaggregator" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/variableassigner" + "github.com/coze-dev/coze-studio/backend/pkg/ctxcache" + "github.com/coze-dev/coze-studio/backend/pkg/errorx" + "github.com/coze-dev/coze-studio/backend/pkg/safego" + "github.com/coze-dev/coze-studio/backend/types/errno" + + "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/variable" +) + +type NodeSchema struct { + Key vo.NodeKey `json:"key"` + Name string `json:"name"` + + Type entity.NodeType `json:"type"` + + // Configs are node specific configurations with pre-defined config key and config value. + // Will not participate in request-time field mapping, nor as node's static values. + // In a word, these Configs are INTERNAL to node's implementation, the workflow layer is not aware of them. + Configs any `json:"configs,omitempty"` + + InputTypes map[string]*vo.TypeInfo `json:"input_types,omitempty"` + InputSources []*vo.FieldInfo `json:"input_sources,omitempty"` + + OutputTypes map[string]*vo.TypeInfo `json:"output_types,omitempty"` + OutputSources []*vo.FieldInfo `json:"output_sources,omitempty"` // only applicable to composite nodes such as Batch or Loop + + ExceptionConfigs *ExceptionConfig `json:"exception_configs,omitempty"` // generic configurations applicable to most nodes + StreamConfigs *StreamConfig `json:"stream_configs,omitempty"` + + SubWorkflowBasic *entity.WorkflowBasic `json:"sub_workflow_basic,omitempty"` + SubWorkflowSchema *WorkflowSchema `json:"sub_workflow_schema,omitempty"` + + Lambda *compose.Lambda // not serializable, used for internal test. +} + +type ExceptionConfig struct { + TimeoutMS int64 `json:"timeout_ms,omitempty"` // timeout in milliseconds, 0 means no timeout + MaxRetry int64 `json:"max_retry,omitempty"` // max retry times, 0 means no retry + ProcessType *vo.ErrorProcessType `json:"process_type,omitempty"` // error process type, 0 means throw error + DataOnErr string `json:"data_on_err,omitempty"` // data to return when error, effective when ProcessType==Default occurs +} + +type StreamConfig struct { + // whether this node has the ability to produce genuine streaming output. + // not include nodes that only passes stream down as they receives them + CanGeneratesStream bool `json:"can_generates_stream,omitempty"` + // whether this node prioritize streaming input over none-streaming input. + // not include nodes that can accept both and does not have preference. + RequireStreamingInput bool `json:"can_process_stream,omitempty"` +} + +type Node struct { + Lambda *compose.Lambda +} + +func (s *NodeSchema) New(ctx context.Context, inner compose.Runnable[map[string]any, map[string]any], + sc *WorkflowSchema, deps *dependencyInfo) (_ *Node, err error) { + defer func() { + if panicErr := recover(); panicErr != nil { + err = safego.NewPanicErr(panicErr, debug.Stack()) + } + + if err != nil { + err = vo.WrapIfNeeded(errno.ErrCreateNodeFail, err, errorx.KV("node_name", s.Name), errorx.KV("cause", err.Error())) + } + }() + + if m := entity.NodeMetaByNodeType(s.Type); m != nil && m.InputSourceAware { + if err = s.SetFullSources(sc.GetAllNodes(), deps); err != nil { + return nil, err + } + } + + switch s.Type { + case entity.NodeTypeLambda: + if s.Lambda == nil { + return nil, fmt.Errorf("lambda is not defined for NodeTypeLambda") + } + + return &Node{Lambda: s.Lambda}, nil + case entity.NodeTypeLLM: + conf, err := s.ToLLMConfig(ctx) + if err != nil { + return nil, err + } + + l, err := llm.New(ctx, conf) + if err != nil { + return nil, err + } + + return invokableStreamableNodeWO(s, l.Chat, l.ChatStream, withCallbackOutputConverter(l.ToCallbackOutput)), nil + case entity.NodeTypeSelector: + conf := s.ToSelectorConfig() + + sl, err := selector.NewSelector(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNode(s, sl.Select, withCallbackInputConverter(s.toSelectorCallbackInput(sc)), withCallbackOutputConverter(sl.ToCallbackOutput)), nil + case entity.NodeTypeBatch: + if inner == nil { + return nil, fmt.Errorf("inner workflow must not be nil when creating batch node") + } + + conf, err := s.ToBatchConfig(inner) + if err != nil { + return nil, err + } + + b, err := batch.NewBatch(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNodeWO(s, b.Execute, withCallbackInputConverter(b.ToCallbackInput)), nil + case entity.NodeTypeVariableAggregator: + conf, err := s.ToVariableAggregatorConfig() + if err != nil { + return nil, err + } + + va, err := variableaggregator.NewVariableAggregator(ctx, conf) + if err != nil { + return nil, err + } + + return invokableTransformableNode(s, va.Invoke, va.Transform, + withCallbackInputConverter(va.ToCallbackInput), + withCallbackOutputConverter(va.ToCallbackOutput), + withInit(va.Init)), nil + case entity.NodeTypeTextProcessor: + conf, err := s.ToTextProcessorConfig() + if err != nil { + return nil, err + } + + tp, err := textprocessor.NewTextProcessor(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNode(s, tp.Invoke), nil + case entity.NodeTypeHTTPRequester: + conf, err := s.ToHTTPRequesterConfig() + if err != nil { + return nil, err + } + + hr, err := httprequester.NewHTTPRequester(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNode(s, hr.Invoke, withCallbackInputConverter(hr.ToCallbackInput), withCallbackOutputConverter(hr.ToCallbackOutput)), nil + case entity.NodeTypeContinue: + i := func(ctx context.Context, in map[string]any) (map[string]any, error) { + return map[string]any{}, nil + } + return invokableNode(s, i), nil + case entity.NodeTypeBreak: + b, err := loop.NewBreak(ctx, &nodes.ParentIntermediateStore{}) + if err != nil { + return nil, err + } + return invokableNode(s, b.DoBreak), nil + case entity.NodeTypeVariableAssigner: + handler := variable.GetVariableHandler() + + conf, err := s.ToVariableAssignerConfig(handler) + if err != nil { + return nil, err + } + va, err := variableassigner.NewVariableAssigner(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNode(s, va.Assign), nil + case entity.NodeTypeVariableAssignerWithinLoop: + conf, err := s.ToVariableAssignerInLoopConfig() + if err != nil { + return nil, err + } + + va, err := variableassigner.NewVariableAssignerInLoop(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNode(s, va.Assign), nil + case entity.NodeTypeLoop: + conf, err := s.ToLoopConfig(inner) + if err != nil { + return nil, err + } + l, err := loop.NewLoop(ctx, conf) + if err != nil { + return nil, err + } + return invokableNodeWO(s, l.Execute, withCallbackInputConverter(l.ToCallbackInput)), nil + case entity.NodeTypeQuestionAnswer: + conf, err := s.ToQAConfig(ctx) + if err != nil { + return nil, err + } + qA, err := qa.NewQuestionAnswer(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, qA.Execute, withCallbackOutputConverter(qA.ToCallbackOutput)), nil + case entity.NodeTypeInputReceiver: + conf, err := s.ToInputReceiverConfig() + if err != nil { + return nil, err + } + inputR, err := receiver.New(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, inputR.Invoke, withCallbackOutputConverter(inputR.ToCallbackOutput)), nil + case entity.NodeTypeOutputEmitter: + conf, err := s.ToOutputEmitterConfig(sc) + if err != nil { + return nil, err + } + + e, err := emitter.New(ctx, conf) + if err != nil { + return nil, err + } + + return invokableTransformableNode(s, e.Emit, e.EmitStream), nil + case entity.NodeTypeEntry: + conf, err := s.ToEntryConfig(ctx) + if err != nil { + return nil, err + } + e, err := entry.NewEntry(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, e.Invoke), nil + case entity.NodeTypeExit: + terminalPlan := mustGetKey[vo.TerminatePlan]("TerminalPlan", s.Configs) + if terminalPlan == vo.ReturnVariables { + i := func(ctx context.Context, in map[string]any) (map[string]any, error) { + if in == nil { + return map[string]any{}, nil + } + return in, nil + } + return invokableNode(s, i), nil + } + + conf, err := s.ToOutputEmitterConfig(sc) + if err != nil { + return nil, err + } + + e, err := emitter.New(ctx, conf) + if err != nil { + return nil, err + } + + return invokableTransformableNode(s, e.Emit, e.EmitStream), nil + case entity.NodeTypeDatabaseCustomSQL: + conf, err := s.ToDatabaseCustomSQLConfig() + if err != nil { + return nil, err + } + + sqlER, err := database.NewCustomSQL(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, sqlER.Execute), nil + case entity.NodeTypeDatabaseQuery: + conf, err := s.ToDatabaseQueryConfig() + if err != nil { + return nil, err + } + + query, err := database.NewQuery(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNode(s, query.Query, withCallbackInputConverter(query.ToCallbackInput)), nil + case entity.NodeTypeDatabaseInsert: + conf, err := s.ToDatabaseInsertConfig() + if err != nil { + return nil, err + } + + insert, err := database.NewInsert(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNode(s, insert.Insert, withCallbackInputConverter(insert.ToCallbackInput)), nil + case entity.NodeTypeDatabaseUpdate: + conf, err := s.ToDatabaseUpdateConfig() + if err != nil { + return nil, err + } + update, err := database.NewUpdate(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, update.Update, withCallbackInputConverter(update.ToCallbackInput)), nil + case entity.NodeTypeDatabaseDelete: + conf, err := s.ToDatabaseDeleteConfig() + if err != nil { + return nil, err + } + del, err := database.NewDelete(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNode(s, del.Delete, withCallbackInputConverter(del.ToCallbackInput)), nil + case entity.NodeTypeKnowledgeIndexer: + conf, err := s.ToKnowledgeIndexerConfig() + if err != nil { + return nil, err + } + w, err := knowledge.NewKnowledgeIndexer(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, w.Store), nil + case entity.NodeTypeKnowledgeRetriever: + conf, err := s.ToKnowledgeRetrieveConfig() + if err != nil { + return nil, err + } + r, err := knowledge.NewKnowledgeRetrieve(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, r.Retrieve), nil + case entity.NodeTypeKnowledgeDeleter: + conf, err := s.ToKnowledgeDeleterConfig() + if err != nil { + return nil, err + } + r, err := knowledge.NewKnowledgeDeleter(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, r.Delete), nil + case entity.NodeTypeCodeRunner: + conf, err := s.ToCodeRunnerConfig() + if err != nil { + return nil, err + } + r, err := code.NewCodeRunner(ctx, conf) + if err != nil { + return nil, err + } + initFn := func(ctx context.Context) (context.Context, error) { + return ctxcache.Init(ctx), nil + } + return invokableNode(s, r.RunCode, withCallbackOutputConverter(r.ToCallbackOutput), withInit(initFn)), nil + case entity.NodeTypePlugin: + conf, err := s.ToPluginConfig() + if err != nil { + return nil, err + } + r, err := plugin.NewPlugin(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, r.Invoke), nil + case entity.NodeTypeCreateConversation: + conf, err := s.ToCreateConversationConfig() + if err != nil { + return nil, err + } + r, err := conversation.NewCreateConversation(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, r.Create), nil + + case entity.NodeTypeConversationUpdate: + r := conversation.NewUpdateConversation(ctx) + if err != nil { + return nil, err + } + return invokableNode(s, r.Update), nil + case entity.NodeTypeConversationList: + r, err := conversation.NewConversationList(ctx) + if err != nil { + return nil, err + } + return invokableNode(s, r.List), nil + case entity.NodeTypeConversationDelete: + r := conversation.NewDeleteConversation(ctx) + if err != nil { + return nil, err + } + return invokableNode(s, r.Delete), nil + case entity.NodeTypeCreateMessage: + conf, err := s.ToCreateMessageConfig() + if err != nil { + return nil, err + } + r, err := conversation.NewCreateMessage(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, r.Create), nil + case entity.NodeTypeClearConversationHistory: + cfg, err := s.ToClearConversationHistoryConfig() + if err != nil { + return nil, err + } + + r, err := conversation.NewClearConversationHistory(ctx, cfg) + if err != nil { + return nil, err + } + + return invokableNode(s, r.Clear), nil + + case entity.NodeTypeConversationHistory: + cfg, err := s.ToConversationHistoryConfig() + if err != nil { + return nil, err + } + + r, err := conversation.NewConversationHistory(ctx, cfg) + if err != nil { + return nil, err + } + + return invokableNode(s, r.HistoryMessages), nil + + case entity.NodeTypeMessageList: + conf, err := s.ToMessageListConfig() + if err != nil { + return nil, err + } + r, err := conversation.NewMessageList(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, r.List), nil + case entity.NodeTypeDeleteMessage: + conf, err := s.ToDeleteMessageConfig() + if err != nil { + return nil, err + } + r, err := conversation.NewDeleteMessage(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, r.Delete), nil + case entity.NodeTypeEditMessage: + conf, err := s.ToEditMessageConfig() + if err != nil { + return nil, err + } + r, err := conversation.NewEditMessage(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, r.Edit), nil + case entity.NodeTypeIntentDetector: + conf, err := s.ToIntentDetectorConfig(ctx) + if err != nil { + return nil, err + } + r, err := intentdetector.NewIntentDetector(ctx, conf) + if err != nil { + return nil, err + } + + return invokableNode(s, r.Invoke), nil + case entity.NodeTypeSubWorkflow: + conf, err := s.ToSubWorkflowConfig(ctx, sc.requireCheckPoint) + if err != nil { + return nil, err + } + r, err := subworkflow.NewSubWorkflow(ctx, conf) + if err != nil { + return nil, err + } + return invokableStreamableNodeWO(s, r.Invoke, r.Stream), nil + case entity.NodeTypeJsonSerialization: + conf, err := s.ToJsonSerializationConfig() + if err != nil { + return nil, err + } + js, err := json.NewJsonSerializer(ctx, conf) + if err != nil { + return nil, err + } + return invokableNode(s, js.Invoke), nil + case entity.NodeTypeJsonDeserialization: + conf, err := s.ToJsonDeserializationConfig() + if err != nil { + return nil, err + } + jd, err := json.NewJsonDeserializer(ctx, conf) + if err != nil { + return nil, err + } + initFn := func(ctx context.Context) (context.Context, error) { + return ctxcache.Init(ctx), nil + } + return invokableNode(s, jd.Invoke, withCallbackOutputConverter(jd.ToCallbackOutput), withInit(initFn)), nil + default: + panic("not implemented") + } +} + +func (s *NodeSchema) IsEnableUserQuery() bool { + if s == nil { + return false + } + if s.Type != entity.NodeTypeEntry { + return false + } + + if len(s.OutputSources) == 0 { + return false + } + + for _, source := range s.OutputSources { + fieldPath := source.Path + if len(fieldPath) == 1 && (fieldPath[0] == "BOT_USER_INPUT" || fieldPath[0] == "USER_INPUT") { + return true + } + } + + return false + +} + +func (s *NodeSchema) IsEnableChatHistory() bool { + if s == nil { + return false + } + + switch s.Type { + + case entity.NodeTypeLLM: + llmParam := mustGetKey[*model.LLMParams]("LLMParams", s.Configs) + return llmParam.EnableChatHistory + case entity.NodeTypeIntentDetector: + llmParam := mustGetKey[*model.LLMParams]("LLMParams", s.Configs) + return llmParam.EnableChatHistory + default: + return false + } + +} + +func (s *NodeSchema) IsRefGlobalVariable() bool { + for _, source := range s.InputSources { + if source.IsRefGlobalVariable() { + return true + } + } + for _, source := range s.OutputSources { + if source.IsRefGlobalVariable() { + return true + } + } + return false +} + +func (s *NodeSchema) requireCheckpoint() bool { + if s.Type == entity.NodeTypeQuestionAnswer || s.Type == entity.NodeTypeInputReceiver { + return true + } + + if s.Type == entity.NodeTypeLLM { + fcParams := getKeyOrZero[*vo.FCParam]("FCParam", s.Configs) + if fcParams != nil && fcParams.WorkflowFCParam != nil { + return true + } + } + + if s.Type == entity.NodeTypeSubWorkflow { + s.SubWorkflowSchema.Init() + if s.SubWorkflowSchema.requireCheckPoint { + return true + } + } + + return false +} diff --git a/backend/domain/workflow/internal/compose/to_node.go b/backend/domain/workflow/internal/compose/to_node.go new file mode 100644 index 000000000..254841b35 --- /dev/null +++ b/backend/domain/workflow/internal/compose/to_node.go @@ -0,0 +1,692 @@ +/* + * Copyright 2025 coze-dev Authors + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package compose + +import ( + "context" + "errors" + "fmt" + + "runtime/debug" + "strconv" + "time" + + einomodel "github.com/cloudwego/eino/components/model" + "github.com/cloudwego/eino/components/tool" + "github.com/cloudwego/eino/compose" + "github.com/cloudwego/eino/schema" + + workflow3 "github.com/coze-dev/coze-studio/backend/api/model/ocean/cloud/workflow" + workflow2 "github.com/coze-dev/coze-studio/backend/domain/workflow" + crosscode "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/code" + crossconversation "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/conversation" + crossdatabase "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/database" + crossknowledge "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/knowledge" + "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/model" + crossplugin "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/plugin" + "github.com/coze-dev/coze-studio/backend/domain/workflow/crossdomain/variable" + "github.com/coze-dev/coze-studio/backend/domain/workflow/entity/vo" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/execute" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/batch" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/code" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/conversation" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/database" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/emitter" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/entry" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/httprequester" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/intentdetector" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/json" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/knowledge" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/llm" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/loop" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/plugin" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/qa" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/receiver" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/selector" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/subworkflow" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/textprocessor" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/variableaggregator" + "github.com/coze-dev/coze-studio/backend/domain/workflow/internal/nodes/variableassigner" + "github.com/coze-dev/coze-studio/backend/infra/contract/coderunner" + "github.com/coze-dev/coze-studio/backend/infra/contract/modelmgr" + "github.com/coze-dev/coze-studio/backend/pkg/lang/ptr" + "github.com/coze-dev/coze-studio/backend/pkg/safego" +) + +func (s *NodeSchema) ToEntryConfig(_ context.Context) (*entry.Config, error) { + return &entry.Config{ + DefaultValues: getKeyOrZero[map[string]any]("DefaultValues", s.Configs), + OutputTypes: s.OutputTypes, + }, nil +} + +func (s *NodeSchema) ToLLMConfig(ctx context.Context) (*llm.Config, error) { + llmConf := &llm.Config{ + SystemPrompt: getKeyOrZero[string]("SystemPrompt", s.Configs), + UserPrompt: getKeyOrZero[string]("UserPrompt", s.Configs), + OutputFormat: mustGetKey[llm.Format]("OutputFormat", s.Configs), + InputFields: s.InputTypes, + OutputFields: s.OutputTypes, + FullSources: getKeyOrZero[map[string]*nodes.SourceInfo]("FullSources", s.Configs), + } + + llmParams := getKeyOrZero[*model.LLMParams]("LLMParams", s.Configs) + + if llmParams == nil { + return nil, fmt.Errorf("llm node llmParams is required") + } + var ( + err error + chatModel, fallbackM einomodel.BaseChatModel + info, fallbackI *modelmgr.Model + modelWithInfo llm.ModelWithInfo + ) + + chatModel, info, err = model.GetManager().GetModel(ctx, llmParams) + if err != nil { + return nil, err + } + + metaConfigs := s.ExceptionConfigs + if metaConfigs != nil && metaConfigs.MaxRetry > 0 { + backupModelParams := getKeyOrZero[*model.LLMParams]("BackupLLMParams", s.Configs) + if backupModelParams != nil { + fallbackM, fallbackI, err = model.GetManager().GetModel(ctx, backupModelParams) + if err != nil { + return nil, err + } + } + } + + if fallbackM == nil { + modelWithInfo = llm.NewModel(chatModel, info) + } else { + modelWithInfo = llm.NewModelWithFallback(chatModel, fallbackM, info, fallbackI) + } + llmConf.ChatModel = modelWithInfo + + fcParams := getKeyOrZero[*vo.FCParam]("FCParam", s.Configs) + if fcParams != nil { + if fcParams.WorkflowFCParam != nil { + for _, wf := range fcParams.WorkflowFCParam.WorkflowList { + wfIDStr := wf.WorkflowID + wfID, err := strconv.ParseInt(wfIDStr, 10, 64) + if err != nil { + return nil, fmt.Errorf("invalid workflow id: %s", wfIDStr) + } + + workflowToolConfig := vo.WorkflowToolConfig{} + if wf.FCSetting != nil { + workflowToolConfig.InputParametersConfig = wf.FCSetting.RequestParameters + workflowToolConfig.OutputParametersConfig = wf.FCSetting.ResponseParameters + } + + locator := vo.FromDraft + if wf.WorkflowVersion != "" { + locator = vo.FromSpecificVersion + } + + wfTool, err := workflow2.GetRepository().WorkflowAsTool(ctx, vo.GetPolicy{ + ID: wfID, + QType: locator, + Version: wf.WorkflowVersion, + }, workflowToolConfig) + if err != nil { + return nil, err + } + llmConf.Tools = append(llmConf.Tools, wfTool) + if wfTool.TerminatePlan() == vo.UseAnswerContent { + if llmConf.ToolsReturnDirectly == nil { + llmConf.ToolsReturnDirectly = make(map[string]bool) + } + toolInfo, err := wfTool.Info(ctx) + if err != nil { + return nil, err + } + llmConf.ToolsReturnDirectly[toolInfo.Name] = true + } + } + } + + if fcParams.PluginFCParam != nil { + pluginToolsInvokableReq := make(map[int64]*crossplugin.ToolsInvokableRequest) + for _, p := range fcParams.PluginFCParam.PluginList { + pid, err := strconv.ParseInt(p.PluginID, 10, 64) + if err != nil { + return nil, fmt.Errorf("invalid plugin id: %s", p.PluginID) + } + toolID, err := strconv.ParseInt(p.ApiId, 10, 64) + if err != nil { + return nil, fmt.Errorf("invalid plugin id: %s", p.PluginID) + } + + var ( + requestParameters []*workflow3.APIParameter + responseParameters []*workflow3.APIParameter + ) + if p.FCSetting != nil { + requestParameters = p.FCSetting.RequestParameters + responseParameters = p.FCSetting.ResponseParameters + } + + if req, ok := pluginToolsInvokableReq[pid]; ok { + req.ToolsInvokableInfo[toolID] = &crossplugin.ToolsInvokableInfo{ + ToolID: toolID, + RequestAPIParametersConfig: requestParameters, + ResponseAPIParametersConfig: responseParameters, + } + } else { + pluginToolsInfoRequest := &crossplugin.ToolsInvokableRequest{ + PluginEntity: crossplugin.Entity{ + PluginID: pid, + PluginVersion: ptr.Of(p.PluginVersion), + }, + ToolsInvokableInfo: map[int64]*crossplugin.ToolsInvokableInfo{ + toolID: { + ToolID: toolID, + RequestAPIParametersConfig: requestParameters, + ResponseAPIParametersConfig: responseParameters, + }, + }, + IsDraft: p.IsDraft, + } + pluginToolsInvokableReq[pid] = pluginToolsInfoRequest + } + } + inInvokableTools := make([]tool.BaseTool, 0, len(fcParams.PluginFCParam.PluginList)) + for _, req := range pluginToolsInvokableReq { + toolMap, err := crossplugin.GetPluginService().GetPluginInvokableTools(ctx, req) + if err != nil { + return nil, err + } + for _, t := range toolMap { + inInvokableTools = append(inInvokableTools, crossplugin.NewInvokableTool(t)) + } + } + if len(inInvokableTools) > 0 { + llmConf.Tools = inInvokableTools + } + } + + if fcParams.KnowledgeFCParam != nil && len(fcParams.KnowledgeFCParam.KnowledgeList) > 0 { + kwChatModel, err := knowledgeRecallChatModel(ctx) + if err != nil { + return nil, err + } + knowledgeOperator := crossknowledge.GetKnowledgeOperator() + setting := fcParams.KnowledgeFCParam.GlobalSetting + cfg := &llm.KnowledgeRecallConfig{ + ChatModel: kwChatModel, + Retriever: knowledgeOperator, + } + searchType, err := totRetrievalSearchType(setting.SearchMode) + if err != nil { + return nil, err + } + cfg.RetrievalStrategy = &llm.RetrievalStrategy{ + RetrievalStrategy: &crossknowledge.RetrievalStrategy{ + TopK: ptr.Of(setting.TopK), + MinScore: ptr.Of(setting.MinScore), + SearchType: searchType, + EnableNL2SQL: setting.UseNL2SQL, + EnableQueryRewrite: setting.UseRewrite, + EnableRerank: setting.UseRerank, + }, + NoReCallReplyMode: llm.NoReCallReplyMode(setting.NoRecallReplyMode), + NoReCallReplyCustomizePrompt: setting.NoRecallReplyCustomizePrompt, + } + + knowledgeIDs := make([]int64, 0, len(fcParams.KnowledgeFCParam.KnowledgeList)) + for _, kw := range fcParams.KnowledgeFCParam.KnowledgeList { + kid, err := strconv.ParseInt(kw.ID, 10, 64) + if err != nil { + return nil, err + } + knowledgeIDs = append(knowledgeIDs, kid) + } + + detailResp, err := knowledgeOperator.ListKnowledgeDetail(ctx, &crossknowledge.ListKnowledgeDetailRequest{ + KnowledgeIDs: knowledgeIDs, + }) + if err != nil { + return nil, err + } + cfg.SelectedKnowledgeDetails = detailResp.KnowledgeDetails + llmConf.KnowledgeRecallConfig = cfg + } + + } + + return llmConf, nil +} + +func (s *NodeSchema) ToSelectorConfig() *selector.Config { + return &selector.Config{ + Clauses: mustGetKey[[]*selector.OneClauseSchema]("Clauses", s.Configs), + } +} + +func (s *NodeSchema) SelectorInputConverter(in map[string]any) (out []selector.Operants, err error) { + conf := mustGetKey[[]*selector.OneClauseSchema]("Clauses", s.Configs) + + for i, oneConf := range conf { + if oneConf.Single != nil { + left, ok := nodes.TakeMapValue(in, compose.FieldPath{strconv.Itoa(i), selector.LeftKey}) + if !ok { + return nil, fmt.Errorf("failed to take left operant from input map: %v, clause index= %d", in, i) + } + + right, ok := nodes.TakeMapValue(in, compose.FieldPath{strconv.Itoa(i), selector.RightKey}) + if ok { + out = append(out, selector.Operants{Left: left, Right: right}) + } else { + out = append(out, selector.Operants{Left: left}) + } + } else if oneConf.Multi != nil { + multiClause := make([]*selector.Operants, 0) + for j := range oneConf.Multi.Clauses { + left, ok := nodes.TakeMapValue(in, compose.FieldPath{strconv.Itoa(i), strconv.Itoa(j), selector.LeftKey}) + if !ok { + return nil, fmt.Errorf("failed to take left operant from input map: %v, clause index= %d, single clause index= %d", in, i, j) + } + right, ok := nodes.TakeMapValue(in, compose.FieldPath{strconv.Itoa(i), strconv.Itoa(j), selector.RightKey}) + if ok { + multiClause = append(multiClause, &selector.Operants{Left: left, Right: right}) + } else { + multiClause = append(multiClause, &selector.Operants{Left: left}) + } + } + out = append(out, selector.Operants{Multi: multiClause}) + } else { + return nil, fmt.Errorf("invalid clause config, both single and multi are nil: %v", oneConf) + } + } + + return out, nil +} + +func (s *NodeSchema) ToBatchConfig(inner compose.Runnable[map[string]any, map[string]any]) (*batch.Config, error) { + conf := &batch.Config{ + BatchNodeKey: s.Key, + InnerWorkflow: inner, + Outputs: s.OutputSources, + } + + for key, tInfo := range s.InputTypes { + if tInfo.Type != vo.DataTypeArray { + continue + } + + conf.InputArrays = append(conf.InputArrays, key) + } + + return conf, nil +} + +func (s *NodeSchema) ToVariableAggregatorConfig() (*variableaggregator.Config, error) { + return &variableaggregator.Config{ + MergeStrategy: s.Configs.(map[string]any)["MergeStrategy"].(variableaggregator.MergeStrategy), + GroupLen: s.Configs.(map[string]any)["GroupToLen"].(map[string]int), + FullSources: getKeyOrZero[map[string]*nodes.SourceInfo]("FullSources", s.Configs), + NodeKey: s.Key, + InputSources: s.InputSources, + GroupOrder: mustGetKey[[]string]("GroupOrder", s.Configs), + }, nil +} + +func (s *NodeSchema) variableAggregatorInputConverter(in map[string]any) (converted map[string]map[int]any) { + converted = make(map[string]map[int]any) + + for k, value := range in { + m, ok := value.(map[string]any) + if !ok { + panic(errors.New("value is not a map[string]any")) + } + converted[k] = make(map[int]any, len(m)) + for i, sv := range m { + index, err := strconv.Atoi(i) + if err != nil { + panic(fmt.Errorf(" converting %s to int failed, err=%v", i, err)) + } + converted[k][index] = sv + } + } + + return converted +} + +func (s *NodeSchema) variableAggregatorStreamInputConverter(in *schema.StreamReader[map[string]any]) *schema.StreamReader[map[string]map[int]any] { + converter := func(input map[string]any) (output map[string]map[int]any, err error) { + defer func() { + if r := recover(); r != nil { + err = safego.NewPanicErr(r, debug.Stack()) + } + }() + return s.variableAggregatorInputConverter(input), nil + } + return schema.StreamReaderWithConvert(in, converter) +} + +func (s *NodeSchema) ToTextProcessorConfig() (*textprocessor.Config, error) { + return &textprocessor.Config{ + Type: s.Configs.(map[string]any)["Type"].(textprocessor.Type), + Tpl: getKeyOrZero[string]("Tpl", s.Configs.(map[string]any)), + ConcatChar: getKeyOrZero[string]("ConcatChar", s.Configs.(map[string]any)), + Separators: getKeyOrZero[[]string]("Separators", s.Configs.(map[string]any)), + FullSources: getKeyOrZero[map[string]*nodes.SourceInfo]("FullSources", s.Configs), + }, nil +} + +func (s *NodeSchema) ToJsonSerializationConfig() (*json.SerializationConfig, error) { + return &json.SerializationConfig{ + InputTypes: s.InputTypes, + }, nil +} + +func (s *NodeSchema) ToJsonDeserializationConfig() (*json.DeserializationConfig, error) { + return &json.DeserializationConfig{ + OutputFields: s.OutputTypes, + }, nil +} + +func (s *NodeSchema) ToHTTPRequesterConfig() (*httprequester.Config, error) { + return &httprequester.Config{ + URLConfig: mustGetKey[httprequester.URLConfig]("URLConfig", s.Configs), + AuthConfig: getKeyOrZero[*httprequester.AuthenticationConfig]("AuthConfig", s.Configs), + BodyConfig: mustGetKey[httprequester.BodyConfig]("BodyConfig", s.Configs), + Method: mustGetKey[string]("Method", s.Configs), + Timeout: mustGetKey[time.Duration]("Timeout", s.Configs), + RetryTimes: mustGetKey[uint64]("RetryTimes", s.Configs), + MD5FieldMapping: mustGetKey[httprequester.MD5FieldMapping]("MD5FieldMapping", s.Configs), + }, nil +} + +func (s *NodeSchema) ToVariableAssignerConfig(handler *variable.Handler) (*variableassigner.Config, error) { + return &variableassigner.Config{ + Pairs: s.Configs.([]*variableassigner.Pair), + Handler: handler, + }, nil +} + +func (s *NodeSchema) ToVariableAssignerInLoopConfig() (*variableassigner.Config, error) { + return &variableassigner.Config{ + Pairs: s.Configs.([]*variableassigner.Pair), + }, nil +} + +func (s *NodeSchema) ToLoopConfig(inner compose.Runnable[map[string]any, map[string]any]) (*loop.Config, error) { + conf := &loop.Config{ + LoopNodeKey: s.Key, + LoopType: mustGetKey[loop.Type]("LoopType", s.Configs), + IntermediateVars: getKeyOrZero[map[string]*vo.TypeInfo]("IntermediateVars", s.Configs), + Outputs: s.OutputSources, + Inner: inner, + } + + for key, tInfo := range s.InputTypes { + if tInfo.Type != vo.DataTypeArray { + continue + } + + if _, ok := conf.IntermediateVars[key]; ok { // exclude arrays in intermediate vars + continue + } + + conf.InputArrays = append(conf.InputArrays, key) + } + + return conf, nil +} + +func (s *NodeSchema) ToQAConfig(ctx context.Context) (*qa.Config, error) { + conf := &qa.Config{ + QuestionTpl: mustGetKey[string]("QuestionTpl", s.Configs), + AnswerType: mustGetKey[qa.AnswerType]("AnswerType", s.Configs), + ChoiceType: getKeyOrZero[qa.ChoiceType]("ChoiceType", s.Configs), + FixedChoices: getKeyOrZero[[]string]("FixedChoices", s.Configs), + ExtractFromAnswer: getKeyOrZero[bool]("ExtractFromAnswer", s.Configs), + MaxAnswerCount: getKeyOrZero[int]("MaxAnswerCount", s.Configs), + AdditionalSystemPromptTpl: getKeyOrZero[string]("AdditionalSystemPromptTpl", s.Configs), + OutputFields: s.OutputTypes, + NodeKey: s.Key, + } + + llmParams := getKeyOrZero[*model.LLMParams]("LLMParams", s.Configs) + if llmParams != nil { + m, _, err := model.GetManager().GetModel(ctx, llmParams) + if err != nil { + return nil, err + } + + conf.Model = m + } + + return conf, nil +} + +func (s *NodeSchema) ToInputReceiverConfig() (*receiver.Config, error) { + return &receiver.Config{ + OutputTypes: s.OutputTypes, + NodeKey: s.Key, + OutputSchema: mustGetKey[string]("OutputSchema", s.Configs), + }, nil +} + +func (s *NodeSchema) ToOutputEmitterConfig(sc *WorkflowSchema) (*emitter.Config, error) { + conf := &emitter.Config{ + Template: getKeyOrZero[string]("Template", s.Configs), + FullSources: getKeyOrZero[map[string]*nodes.SourceInfo]("FullSources", s.Configs), + } + + return conf, nil +} + +func (s *NodeSchema) ToDatabaseCustomSQLConfig() (*database.CustomSQLConfig, error) { + return &database.CustomSQLConfig{ + DatabaseInfoID: mustGetKey[int64]("DatabaseInfoID", s.Configs), + SQLTemplate: mustGetKey[string]("SQLTemplate", s.Configs), + OutputConfig: s.OutputTypes, + CustomSQLExecutor: crossdatabase.GetDatabaseOperator(), + }, nil + +} + +func (s *NodeSchema) ToDatabaseQueryConfig() (*database.QueryConfig, error) { + return &database.QueryConfig{ + DatabaseInfoID: mustGetKey[int64]("DatabaseInfoID", s.Configs), + QueryFields: getKeyOrZero[[]string]("QueryFields", s.Configs), + OrderClauses: getKeyOrZero[[]*crossdatabase.OrderClause]("OrderClauses", s.Configs), + ClauseGroup: getKeyOrZero[*crossdatabase.ClauseGroup]("ClauseGroup", s.Configs), + OutputConfig: s.OutputTypes, + Limit: mustGetKey[int64]("Limit", s.Configs), + Op: crossdatabase.GetDatabaseOperator(), + }, nil +} + +func (s *NodeSchema) ToDatabaseInsertConfig() (*database.InsertConfig, error) { + + return &database.InsertConfig{ + DatabaseInfoID: mustGetKey[int64]("DatabaseInfoID", s.Configs), + OutputConfig: s.OutputTypes, + Inserter: crossdatabase.GetDatabaseOperator(), + }, nil +} + +func (s *NodeSchema) ToDatabaseDeleteConfig() (*database.DeleteConfig, error) { + return &database.DeleteConfig{ + DatabaseInfoID: mustGetKey[int64]("DatabaseInfoID", s.Configs), + ClauseGroup: mustGetKey[*crossdatabase.ClauseGroup]("ClauseGroup", s.Configs), + OutputConfig: s.OutputTypes, + Deleter: crossdatabase.GetDatabaseOperator(), + }, nil +} + +func (s *NodeSchema) ToDatabaseUpdateConfig() (*database.UpdateConfig, error) { + + return &database.UpdateConfig{ + DatabaseInfoID: mustGetKey[int64]("DatabaseInfoID", s.Configs), + ClauseGroup: mustGetKey[*crossdatabase.ClauseGroup]("ClauseGroup", s.Configs), + OutputConfig: s.OutputTypes, + Updater: crossdatabase.GetDatabaseOperator(), + }, nil +} + +func (s *NodeSchema) ToKnowledgeIndexerConfig() (*knowledge.IndexerConfig, error) { + return &knowledge.IndexerConfig{ + KnowledgeID: mustGetKey[int64]("KnowledgeID", s.Configs), + ParsingStrategy: mustGetKey[*crossknowledge.ParsingStrategy]("ParsingStrategy", s.Configs), + ChunkingStrategy: mustGetKey[*crossknowledge.ChunkingStrategy]("ChunkingStrategy", s.Configs), + KnowledgeIndexer: crossknowledge.GetKnowledgeOperator(), + }, nil +} + +func (s *NodeSchema) ToKnowledgeRetrieveConfig() (*knowledge.RetrieveConfig, error) { + return &knowledge.RetrieveConfig{ + KnowledgeIDs: mustGetKey[[]int64]("KnowledgeIDs", s.Configs), + RetrievalStrategy: mustGetKey[*crossknowledge.RetrievalStrategy]("RetrievalStrategy", s.Configs), + Retriever: crossknowledge.GetKnowledgeOperator(), + }, nil +} + +func (s *NodeSchema) ToKnowledgeDeleterConfig() (*knowledge.DeleterConfig, error) { + return &knowledge.DeleterConfig{ + KnowledgeID: mustGetKey[int64]("KnowledgeID", s.Configs), + KnowledgeDeleter: crossknowledge.GetKnowledgeOperator(), + }, nil +} + +func (s *NodeSchema) ToPluginConfig() (*plugin.Config, error) { + return &plugin.Config{ + PluginID: mustGetKey[int64]("PluginID", s.Configs), + ToolID: mustGetKey[int64]("ToolID", s.Configs), + PluginVersion: mustGetKey[string]("PluginVersion", s.Configs), + PluginService: crossplugin.GetPluginService(), + }, nil + +} + +func (s *NodeSchema) ToCodeRunnerConfig() (*code.Config, error) { + return &code.Config{ + Code: mustGetKey[string]("Code", s.Configs), + Language: mustGetKey[coderunner.Language]("Language", s.Configs), + OutputConfig: s.OutputTypes, + Runner: crosscode.GetCodeRunner(), + }, nil +} + +func (s *NodeSchema) ToCreateConversationConfig() (*conversation.CreateConversationConfig, error) { + return &conversation.CreateConversationConfig{ + Manager: crossconversation.GetConversationManager(), + }, nil +} + +func (s *NodeSchema) ToDeleteMessageConfig() (*conversation.DeleteMessageConfig, error) { + return &conversation.DeleteMessageConfig{ + Manager: crossconversation.GetConversationManager(), + }, nil +} +func (s *NodeSchema) ToEditMessageConfig() (*conversation.EditMessageConfig, error) { + return &conversation.EditMessageConfig{ + Manager: crossconversation.GetConversationManager(), + }, nil +} + +func (s *NodeSchema) ToCreateMessageConfig() (*conversation.CreateMessageConfig, error) { + return &conversation.CreateMessageConfig{ + Creator: crossconversation.GetConversationManager(), + }, nil +} + +func (s *NodeSchema) ToMessageListConfig() (*conversation.MessageListConfig, error) { + return &conversation.MessageListConfig{ + Lister: crossconversation.GetConversationManager(), + }, nil +} + +func (s *NodeSchema) ToClearConversationHistoryConfig() (*conversation.ClearConversationHistoryConfig, error) { + return &conversation.ClearConversationHistoryConfig{ + Manager: crossconversation.GetConversationManager(), + }, nil +} + +func (s *NodeSchema) ToConversationHistoryConfig() (*conversation.ConversationHistoryConfig, error) { + return &conversation.ConversationHistoryConfig{ + Manager: crossconversation.GetConversationManager(), + }, nil +} + +func (s *NodeSchema) ToIntentDetectorConfig(ctx context.Context) (*intentdetector.Config, error) { + cfg := &intentdetector.Config{ + Intents: mustGetKey[[]string]("Intents", s.Configs), + SystemPrompt: getKeyOrZero[string]("SystemPrompt", s.Configs), + IsFastMode: getKeyOrZero[bool]("IsFastMode", s.Configs), + } + + llmParams := mustGetKey[*model.LLMParams]("LLMParams", s.Configs) + m, _, err := model.GetManager().GetModel(ctx, llmParams) + if err != nil { + return nil, err + } + cfg.ChatModel = m + + return cfg, nil +} + +func (s *NodeSchema) ToSubWorkflowConfig(ctx context.Context, requireCheckpoint bool) (*subworkflow.Config, error) { + var opts []WorkflowOption + opts = append(opts, WithIDAsName(mustGetKey[int64]("WorkflowID", s.Configs))) + if requireCheckpoint { + opts = append(opts, WithParentRequireCheckpoint()) + } + if s := execute.GetStaticConfig(); s != nil && s.MaxNodeCountPerWorkflow > 0 { + opts = append(opts, WithMaxNodeCount(s.MaxNodeCountPerWorkflow)) + } + wf, err := NewWorkflow(ctx, s.SubWorkflowSchema, opts...) + if err != nil { + return nil, err + } + + return &subworkflow.Config{ + Runner: wf.Runner, + }, nil +} + +func totRetrievalSearchType(s int64) (crossknowledge.SearchType, error) { + switch s { + case 0: + return crossknowledge.SearchTypeSemantic, nil + case 1: + return crossknowledge.SearchTypeHybrid, nil + case 20: + return crossknowledge.SearchTypeFullText, nil + default: + return "", fmt.Errorf("invalid retrieval search type %v", s) + } +} + +// knowledgeRecallChatModel the chat model used by the knowledge base recall in the LLM node, not the user-configured model +func knowledgeRecallChatModel(ctx context.Context) (einomodel.BaseChatModel, error) { + defaultChatModelParma := &model.LLMParams{ + ModelName: "豆包·1.5·Pro·32k", + ModelType: 1, + Temperature: ptr.Of(0.5), + MaxTokens: 4096, + } + m, _, err := model.GetManager().GetModel(ctx, defaultChatModelParma) + return m, err +}