Compare commits
12 Commits
feat/conve
...
feat/chatf
| Author | SHA1 | Date | |
|---|---|---|---|
| bed69ca4a1 | |||
| bb2e426d90 | |||
| ece8bd3a47 | |||
| 77ac3001f0 | |||
| c4e7a94bb7 | |||
| 002b87738a | |||
| ba02bc80b8 | |||
| 2279f56fc5 | |||
| c89695c1b8 | |||
| a316a98759 | |||
| 4ad7701482 | |||
| 6dae6bd42d |
@ -22,6 +22,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
openapiauthApp "github.com/coze-dev/coze-studio/backend/application/openauth"
|
||||
"github.com/coze-dev/coze-studio/backend/application/plugin"
|
||||
"github.com/coze-dev/coze-studio/backend/application/singleagent"
|
||||
"github.com/coze-dev/coze-studio/backend/application/upload"
|
||||
@ -105,3 +106,24 @@ func GetBotOnlineInfo(ctx context.Context, c *app.RequestContext) {
|
||||
}
|
||||
c.JSON(consts.StatusOK, resp)
|
||||
}
|
||||
|
||||
// ImpersonateCozeUser .
|
||||
// @router /api/permission_api/coze_web_app/impersonate_coze_user [POST]
|
||||
func ImpersonateCozeUser(ctx context.Context, c *app.RequestContext) {
|
||||
var err error
|
||||
var req bot_open_api.ImpersonateCozeUserRequest
|
||||
err = c.BindAndValidate(&req)
|
||||
if err != nil {
|
||||
invalidParamRequestResponse(c, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
resp, err := openapiauthApp.OpenAuthApplication.ImpersonateCozeUserAccessToken(ctx, &req)
|
||||
|
||||
if err != nil {
|
||||
internalServerErrorResponse(ctx, c, err)
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(consts.StatusOK, resp)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,19 +1,3 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Code generated by thriftgo (0.4.1). DO NOT EDIT.
|
||||
|
||||
package intelligence
|
||||
|
||||
@ -1,19 +1,3 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Code generated by thriftgo (0.4.1). DO NOT EDIT.
|
||||
|
||||
package coze
|
||||
|
||||
@ -26,14 +26,6 @@ import (
|
||||
"github.com/coze-dev/coze-studio/backend/crossdomain/contract/crossworkflow"
|
||||
)
|
||||
|
||||
type AgentRuntime struct {
|
||||
AgentVersion string
|
||||
IsDraft bool
|
||||
SpaceID int64
|
||||
ConnectorID int64
|
||||
PreRetrieveTools []*agentrun.Tool
|
||||
}
|
||||
|
||||
type EventType string
|
||||
|
||||
const (
|
||||
@ -84,6 +76,8 @@ type SingleAgent struct {
|
||||
JumpConfig *bot_common.JumpConfig
|
||||
BackgroundImageInfoList []*bot_common.BackgroundImageInfo
|
||||
Database []*bot_common.Database
|
||||
BotMode bot_common.BotMode
|
||||
LayoutInfo *bot_common.LayoutInfo
|
||||
ShortcutCommand []string
|
||||
}
|
||||
|
||||
@ -106,6 +100,8 @@ type InterruptInfo struct {
|
||||
ToolCallID string
|
||||
InterruptType InterruptEventType
|
||||
InterruptID string
|
||||
|
||||
ChatflowInterrupt *crossworkflow.StateMessage
|
||||
}
|
||||
|
||||
type ExecuteRequest struct {
|
||||
|
||||
@ -15881,329 +15881,6 @@ func (p *GetFileUrlsResponse) String() string {
|
||||
|
||||
}
|
||||
|
||||
type File struct {
|
||||
// File URI
|
||||
URI string `thrift:"URI,1" form:"uri" json:"uri"`
|
||||
// file bytes
|
||||
Bytes int64 `thrift:"Bytes,2" form:"bytes" json:"bytes"`
|
||||
// Upload timestamp in s
|
||||
CreatedAt int64 `thrift:"CreatedAt,3" form:"CreatedAt" json:"CreatedAt" query:"CreatedAt"`
|
||||
// file name
|
||||
FileName string `thrift:"FileName,4" form:"file_name" json:"file_name"`
|
||||
URL string `thrift:"URL,5" form:"url" json:"url"`
|
||||
}
|
||||
|
||||
func NewFile() *File {
|
||||
return &File{}
|
||||
}
|
||||
|
||||
func (p *File) InitDefault() {
|
||||
}
|
||||
|
||||
func (p *File) GetURI() (v string) {
|
||||
return p.URI
|
||||
}
|
||||
|
||||
func (p *File) GetBytes() (v int64) {
|
||||
return p.Bytes
|
||||
}
|
||||
|
||||
func (p *File) GetCreatedAt() (v int64) {
|
||||
return p.CreatedAt
|
||||
}
|
||||
|
||||
func (p *File) GetFileName() (v string) {
|
||||
return p.FileName
|
||||
}
|
||||
|
||||
func (p *File) GetURL() (v string) {
|
||||
return p.URL
|
||||
}
|
||||
|
||||
var fieldIDToName_File = map[int16]string{
|
||||
1: "URI",
|
||||
2: "Bytes",
|
||||
3: "CreatedAt",
|
||||
4: "FileName",
|
||||
5: "URL",
|
||||
}
|
||||
|
||||
func (p *File) Read(iprot thrift.TProtocol) (err error) {
|
||||
var fieldTypeId thrift.TType
|
||||
var fieldId int16
|
||||
|
||||
if _, err = iprot.ReadStructBegin(); err != nil {
|
||||
goto ReadStructBeginError
|
||||
}
|
||||
|
||||
for {
|
||||
_, fieldTypeId, fieldId, err = iprot.ReadFieldBegin()
|
||||
if err != nil {
|
||||
goto ReadFieldBeginError
|
||||
}
|
||||
if fieldTypeId == thrift.STOP {
|
||||
break
|
||||
}
|
||||
|
||||
switch fieldId {
|
||||
case 1:
|
||||
if fieldTypeId == thrift.STRING {
|
||||
if err = p.ReadField1(iprot); err != nil {
|
||||
goto ReadFieldError
|
||||
}
|
||||
} else if err = iprot.Skip(fieldTypeId); err != nil {
|
||||
goto SkipFieldError
|
||||
}
|
||||
case 2:
|
||||
if fieldTypeId == thrift.I64 {
|
||||
if err = p.ReadField2(iprot); err != nil {
|
||||
goto ReadFieldError
|
||||
}
|
||||
} else if err = iprot.Skip(fieldTypeId); err != nil {
|
||||
goto SkipFieldError
|
||||
}
|
||||
case 3:
|
||||
if fieldTypeId == thrift.I64 {
|
||||
if err = p.ReadField3(iprot); err != nil {
|
||||
goto ReadFieldError
|
||||
}
|
||||
} else if err = iprot.Skip(fieldTypeId); err != nil {
|
||||
goto SkipFieldError
|
||||
}
|
||||
case 4:
|
||||
if fieldTypeId == thrift.STRING {
|
||||
if err = p.ReadField4(iprot); err != nil {
|
||||
goto ReadFieldError
|
||||
}
|
||||
} else if err = iprot.Skip(fieldTypeId); err != nil {
|
||||
goto SkipFieldError
|
||||
}
|
||||
case 5:
|
||||
if fieldTypeId == thrift.STRING {
|
||||
if err = p.ReadField5(iprot); err != nil {
|
||||
goto ReadFieldError
|
||||
}
|
||||
} else if err = iprot.Skip(fieldTypeId); err != nil {
|
||||
goto SkipFieldError
|
||||
}
|
||||
default:
|
||||
if err = iprot.Skip(fieldTypeId); err != nil {
|
||||
goto SkipFieldError
|
||||
}
|
||||
}
|
||||
if err = iprot.ReadFieldEnd(); err != nil {
|
||||
goto ReadFieldEndError
|
||||
}
|
||||
}
|
||||
if err = iprot.ReadStructEnd(); err != nil {
|
||||
goto ReadStructEndError
|
||||
}
|
||||
|
||||
return nil
|
||||
ReadStructBeginError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T read struct begin error: ", p), err)
|
||||
ReadFieldBeginError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T read field %d begin error: ", p, fieldId), err)
|
||||
ReadFieldError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T read field %d '%s' error: ", p, fieldId, fieldIDToName_File[fieldId]), err)
|
||||
SkipFieldError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T field %d skip type %d error: ", p, fieldId, fieldTypeId), err)
|
||||
|
||||
ReadFieldEndError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T read field end error", p), err)
|
||||
ReadStructEndError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T read struct end error: ", p), err)
|
||||
}
|
||||
|
||||
func (p *File) ReadField1(iprot thrift.TProtocol) error {
|
||||
|
||||
var _field string
|
||||
if v, err := iprot.ReadString(); err != nil {
|
||||
return err
|
||||
} else {
|
||||
_field = v
|
||||
}
|
||||
p.URI = _field
|
||||
return nil
|
||||
}
|
||||
func (p *File) ReadField2(iprot thrift.TProtocol) error {
|
||||
|
||||
var _field int64
|
||||
if v, err := iprot.ReadI64(); err != nil {
|
||||
return err
|
||||
} else {
|
||||
_field = v
|
||||
}
|
||||
p.Bytes = _field
|
||||
return nil
|
||||
}
|
||||
func (p *File) ReadField3(iprot thrift.TProtocol) error {
|
||||
|
||||
var _field int64
|
||||
if v, err := iprot.ReadI64(); err != nil {
|
||||
return err
|
||||
} else {
|
||||
_field = v
|
||||
}
|
||||
p.CreatedAt = _field
|
||||
return nil
|
||||
}
|
||||
func (p *File) ReadField4(iprot thrift.TProtocol) error {
|
||||
|
||||
var _field string
|
||||
if v, err := iprot.ReadString(); err != nil {
|
||||
return err
|
||||
} else {
|
||||
_field = v
|
||||
}
|
||||
p.FileName = _field
|
||||
return nil
|
||||
}
|
||||
func (p *File) ReadField5(iprot thrift.TProtocol) error {
|
||||
|
||||
var _field string
|
||||
if v, err := iprot.ReadString(); err != nil {
|
||||
return err
|
||||
} else {
|
||||
_field = v
|
||||
}
|
||||
p.URL = _field
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *File) Write(oprot thrift.TProtocol) (err error) {
|
||||
var fieldId int16
|
||||
if err = oprot.WriteStructBegin("File"); err != nil {
|
||||
goto WriteStructBeginError
|
||||
}
|
||||
if p != nil {
|
||||
if err = p.writeField1(oprot); err != nil {
|
||||
fieldId = 1
|
||||
goto WriteFieldError
|
||||
}
|
||||
if err = p.writeField2(oprot); err != nil {
|
||||
fieldId = 2
|
||||
goto WriteFieldError
|
||||
}
|
||||
if err = p.writeField3(oprot); err != nil {
|
||||
fieldId = 3
|
||||
goto WriteFieldError
|
||||
}
|
||||
if err = p.writeField4(oprot); err != nil {
|
||||
fieldId = 4
|
||||
goto WriteFieldError
|
||||
}
|
||||
if err = p.writeField5(oprot); err != nil {
|
||||
fieldId = 5
|
||||
goto WriteFieldError
|
||||
}
|
||||
}
|
||||
if err = oprot.WriteFieldStop(); err != nil {
|
||||
goto WriteFieldStopError
|
||||
}
|
||||
if err = oprot.WriteStructEnd(); err != nil {
|
||||
goto WriteStructEndError
|
||||
}
|
||||
return nil
|
||||
WriteStructBeginError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write struct begin error: ", p), err)
|
||||
WriteFieldError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field %d error: ", p, fieldId), err)
|
||||
WriteFieldStopError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field stop error: ", p), err)
|
||||
WriteStructEndError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write struct end error: ", p), err)
|
||||
}
|
||||
|
||||
func (p *File) writeField1(oprot thrift.TProtocol) (err error) {
|
||||
if err = oprot.WriteFieldBegin("URI", thrift.STRING, 1); err != nil {
|
||||
goto WriteFieldBeginError
|
||||
}
|
||||
if err := oprot.WriteString(p.URI); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = oprot.WriteFieldEnd(); err != nil {
|
||||
goto WriteFieldEndError
|
||||
}
|
||||
return nil
|
||||
WriteFieldBeginError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 1 begin error: ", p), err)
|
||||
WriteFieldEndError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 1 end error: ", p), err)
|
||||
}
|
||||
func (p *File) writeField2(oprot thrift.TProtocol) (err error) {
|
||||
if err = oprot.WriteFieldBegin("Bytes", thrift.I64, 2); err != nil {
|
||||
goto WriteFieldBeginError
|
||||
}
|
||||
if err := oprot.WriteI64(p.Bytes); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = oprot.WriteFieldEnd(); err != nil {
|
||||
goto WriteFieldEndError
|
||||
}
|
||||
return nil
|
||||
WriteFieldBeginError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 2 begin error: ", p), err)
|
||||
WriteFieldEndError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 2 end error: ", p), err)
|
||||
}
|
||||
func (p *File) writeField3(oprot thrift.TProtocol) (err error) {
|
||||
if err = oprot.WriteFieldBegin("CreatedAt", thrift.I64, 3); err != nil {
|
||||
goto WriteFieldBeginError
|
||||
}
|
||||
if err := oprot.WriteI64(p.CreatedAt); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = oprot.WriteFieldEnd(); err != nil {
|
||||
goto WriteFieldEndError
|
||||
}
|
||||
return nil
|
||||
WriteFieldBeginError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 3 begin error: ", p), err)
|
||||
WriteFieldEndError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 3 end error: ", p), err)
|
||||
}
|
||||
func (p *File) writeField4(oprot thrift.TProtocol) (err error) {
|
||||
if err = oprot.WriteFieldBegin("FileName", thrift.STRING, 4); err != nil {
|
||||
goto WriteFieldBeginError
|
||||
}
|
||||
if err := oprot.WriteString(p.FileName); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = oprot.WriteFieldEnd(); err != nil {
|
||||
goto WriteFieldEndError
|
||||
}
|
||||
return nil
|
||||
WriteFieldBeginError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 4 begin error: ", p), err)
|
||||
WriteFieldEndError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 4 end error: ", p), err)
|
||||
}
|
||||
func (p *File) writeField5(oprot thrift.TProtocol) (err error) {
|
||||
if err = oprot.WriteFieldBegin("URL", thrift.STRING, 5); err != nil {
|
||||
goto WriteFieldBeginError
|
||||
}
|
||||
if err := oprot.WriteString(p.URL); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = oprot.WriteFieldEnd(); err != nil {
|
||||
goto WriteFieldEndError
|
||||
}
|
||||
return nil
|
||||
WriteFieldBeginError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 5 begin error: ", p), err)
|
||||
WriteFieldEndError:
|
||||
return thrift.PrependError(fmt.Sprintf("%T write field 5 end error: ", p), err)
|
||||
}
|
||||
|
||||
func (p *File) String() string {
|
||||
if p == nil {
|
||||
return "<nil>"
|
||||
}
|
||||
return fmt.Sprintf("File(%+v)", *p)
|
||||
|
||||
}
|
||||
|
||||
type PlaygroundService interface {
|
||||
UpdateDraftBotInfoAgw(ctx context.Context, request *UpdateDraftBotInfoAgwRequest) (r *UpdateDraftBotInfoAgwResponse, err error)
|
||||
|
||||
|
||||
@ -250,6 +250,10 @@ func Register(r *server.Hertz) {
|
||||
}
|
||||
{
|
||||
_permission_api := _api.Group("/permission_api", _permission_apiMw()...)
|
||||
{
|
||||
_coze_web_app := _permission_api.Group("/coze_web_app", _coze_web_appMw()...)
|
||||
_coze_web_app.POST("/impersonate_coze_user", append(_impersonatecozeuserMw(), coze.ImpersonateCozeUser)...)
|
||||
}
|
||||
{
|
||||
_pat := _permission_api.Group("/pat", _patMw()...)
|
||||
_pat.POST("/create_personal_access_token_and_permission", append(_createpersonalaccesstokenandpermissionMw(), coze.CreatePersonalAccessTokenAndPermission)...)
|
||||
|
||||
@ -1505,3 +1505,13 @@ func _upload1Mw() []app.HandlerFunc {
|
||||
// your code...
|
||||
return nil
|
||||
}
|
||||
|
||||
func _coze_web_appMw() []app.HandlerFunc {
|
||||
// your code...
|
||||
return nil
|
||||
}
|
||||
|
||||
func _impersonatecozeuserMw() []app.HandlerFunc {
|
||||
// your code...
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -136,7 +136,7 @@ func Init(ctx context.Context) (err error) {
|
||||
crossconversation.SetDefaultSVC(conversationImpl.InitDomainService(complexServices.conversationSVC.ConversationDomainSVC))
|
||||
crossmessage.SetDefaultSVC(messageImpl.InitDomainService(complexServices.conversationSVC.MessageDomainSVC))
|
||||
crossagentrun.SetDefaultSVC(agentrunImpl.InitDomainService(complexServices.conversationSVC.AgentRunDomainSVC))
|
||||
crossagent.SetDefaultSVC(singleagentImpl.InitDomainService(complexServices.singleAgentSVC.DomainSVC, infra.ImageXClient))
|
||||
crossagent.SetDefaultSVC(singleagentImpl.InitDomainService(complexServices.singleAgentSVC.DomainSVC))
|
||||
crossuser.SetDefaultSVC(crossuserImpl.InitDomainService(basicServices.userSVC.DomainSVC))
|
||||
crossdatacopy.SetDefaultSVC(dataCopyImpl.InitDomainService(basicServices.infra))
|
||||
crosssearch.SetDefaultSVC(searchImpl.InitDomainService(complexServices.searchSVC.DomainSVC))
|
||||
@ -155,7 +155,7 @@ func initEventBus(infra *appinfra.AppDependencies) *eventbusImpl {
|
||||
|
||||
// initBasicServices init basic services that only depends on infra.
|
||||
func initBasicServices(ctx context.Context, infra *appinfra.AppDependencies, e *eventbusImpl) (*basicServices, error) {
|
||||
upload.InitService(infra.TOSClient, infra.CacheCli)
|
||||
upload.InitService(&upload.UploadComponents{Cache: infra.CacheCli, Oss: infra.TOSClient, DB: infra.DB, Idgen: infra.IDGenSVC})
|
||||
openAuthSVC := openauth.InitService(infra.DB, infra.IDGenSVC)
|
||||
promptSVC := prompt.InitService(infra.DB, infra.IDGenSVC, e.resourceEventBus)
|
||||
modelMgrSVC := modelmgr.InitService(infra.ModelMgr, infra.TOSClient)
|
||||
|
||||
@ -56,6 +56,7 @@ func InitService(s *ServiceComponents) *ConversationApplicationService {
|
||||
|
||||
arDomainComponents := &agentrun.Components{
|
||||
RunRecordRepo: repository.NewRunRecordRepo(s.DB, s.IDGen),
|
||||
ImagexSVC: s.ImageX,
|
||||
}
|
||||
|
||||
agentRunDomainSVC := agentrun.NewService(arDomainComponents)
|
||||
|
||||
@ -23,6 +23,7 @@ import (
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/app/bot_open_api"
|
||||
openapimodel "github.com/coze-dev/coze-studio/backend/api/model/permission/openapiauth"
|
||||
"github.com/coze-dev/coze-studio/backend/application/base/ctxutil"
|
||||
openapi "github.com/coze-dev/coze-studio/backend/domain/openauth/openapiauth"
|
||||
@ -80,6 +81,7 @@ func (s *OpenAuthApplicationService) CreatePersonalAccessToken(ctx context.Conte
|
||||
Name: req.Name,
|
||||
Expire: req.ExpireAt,
|
||||
UserID: *userID,
|
||||
AkType: entity.AkTypeCustomer,
|
||||
}
|
||||
|
||||
if req.DurationDay == "customize" {
|
||||
@ -111,6 +113,32 @@ func (s *OpenAuthApplicationService) CreatePersonalAccessToken(ctx context.Conte
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (s *OpenAuthApplicationService) ImpersonateCozeUserAccessToken(ctx context.Context, req *bot_open_api.ImpersonateCozeUserRequest) (*bot_open_api.ImpersonateCozeUserResponse, error) {
|
||||
resp := new(bot_open_api.ImpersonateCozeUserResponse)
|
||||
userID := ctxutil.GetUIDFromCtx(ctx)
|
||||
|
||||
expiredSecond := time.Now().Add(time.Duration(time.Second * 60 * 15)).Unix()
|
||||
|
||||
appReq := &entity.CreateApiKey{
|
||||
UserID: *userID,
|
||||
AkType: entity.AkTypeTemporary,
|
||||
Expire: expiredSecond,
|
||||
Name: "temporary access token",
|
||||
}
|
||||
|
||||
apiKeyResp, err := openapiAuthDomainSVC.Create(ctx, appReq)
|
||||
if err != nil {
|
||||
logs.CtxErrorf(ctx, "OpenAuthApplicationService.CreatePersonalAccessToken failed, err=%v", err)
|
||||
return resp, errors.New("CreatePersonalAccessToken failed")
|
||||
}
|
||||
resp.Data = &bot_open_api.ImpersonateCozeUserResponseData{
|
||||
AccessToken: apiKeyResp.ApiKey,
|
||||
ExpiresIn: expiredSecond,
|
||||
TokenType: "Bearer",
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (s *OpenAuthApplicationService) ListPersonalAccessTokens(ctx context.Context, req *openapimodel.ListPersonalAccessTokensRequest) (*openapimodel.ListPersonalAccessTokensResponse, error) {
|
||||
|
||||
resp := new(openapimodel.ListPersonalAccessTokensResponse)
|
||||
|
||||
@ -370,6 +370,12 @@ func (s *SingleAgentApplicationService) applyAgentUpdates(target *entity.SingleA
|
||||
}
|
||||
target.Database = patch.DatabaseList
|
||||
}
|
||||
if patch.BotMode != nil {
|
||||
target.BotMode = ptr.From(patch.BotMode)
|
||||
}
|
||||
if patch.LayoutInfo != nil {
|
||||
target.LayoutInfo = patch.LayoutInfo
|
||||
}
|
||||
|
||||
return target, nil
|
||||
}
|
||||
@ -419,11 +425,12 @@ func (s *SingleAgentApplicationService) singleAgentDraftDo2Vo(ctx context.Contex
|
||||
TaskInfo: &bot_common.TaskInfo{},
|
||||
CreateTime: do.CreatedAt / 1000,
|
||||
UpdateTime: do.UpdatedAt / 1000,
|
||||
BotMode: bot_common.BotMode_SingleMode,
|
||||
BotMode: do.BotMode,
|
||||
BackgroundImageInfoList: do.BackgroundImageInfoList,
|
||||
Status: bot_common.BotStatus_Using,
|
||||
DatabaseList: do.Database,
|
||||
ShortcutSort: do.ShortcutCommand,
|
||||
LayoutInfo: do.LayoutInfo,
|
||||
}
|
||||
|
||||
if do.VariablesMetaID != nil {
|
||||
|
||||
@ -40,6 +40,7 @@ import (
|
||||
|
||||
_ "golang.org/x/image/tiff"
|
||||
_ "golang.org/x/image/webp"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
@ -50,7 +51,9 @@ import (
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/playground"
|
||||
"github.com/coze-dev/coze-studio/backend/application/base/ctxutil"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/service"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/cache"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/idgen"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/storage"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/errorx"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/conv"
|
||||
@ -61,9 +64,17 @@ import (
|
||||
"github.com/coze-dev/coze-studio/backend/types/errno"
|
||||
)
|
||||
|
||||
func InitService(oss storage.Storage, cache cache.Cmdable) {
|
||||
SVC.cache = cache
|
||||
SVC.oss = oss
|
||||
func InitService(components *UploadComponents) {
|
||||
SVC.cache = components.Cache
|
||||
SVC.oss = components.Oss
|
||||
SVC.svc = service.NewUploadSVC(components.DB, components.Idgen)
|
||||
}
|
||||
|
||||
type UploadComponents struct {
|
||||
Oss storage.Storage
|
||||
Cache cache.Cmdable
|
||||
DB *gorm.DB
|
||||
Idgen idgen.IDGenerator
|
||||
}
|
||||
|
||||
var SVC = &UploadService{}
|
||||
@ -71,6 +82,7 @@ var SVC = &UploadService{}
|
||||
type UploadService struct {
|
||||
oss storage.Storage
|
||||
cache cache.Cmdable
|
||||
svc service.UploadService
|
||||
}
|
||||
|
||||
const (
|
||||
@ -427,6 +439,23 @@ func (u *UploadService) UploadFileOpen(ctx context.Context, req *bot_open_api.Up
|
||||
}
|
||||
resp.File.CreatedAt = time.Now().Unix()
|
||||
resp.File.URL = url
|
||||
fileEntity := entity.File{
|
||||
Name: fileHeader.Filename,
|
||||
FileSize: fileHeader.Size,
|
||||
TosURI: objName,
|
||||
Status: entity.FileStatusValid,
|
||||
CreatorID: strconv.FormatInt(uid, 10),
|
||||
Source: entity.FileSourceAPI,
|
||||
CozeAccountID: uid,
|
||||
ContentType: fileHeader.Header.Get("Content-Type"),
|
||||
CreatedAt: time.Now().UnixMilli(),
|
||||
UpdatedAt: time.Now().UnixMilli(),
|
||||
}
|
||||
domainResp, err := u.svc.UploadFile(ctx, &service.UploadFileRequest{File: &fileEntity})
|
||||
if err != nil {
|
||||
return &resp, err
|
||||
}
|
||||
resp.File.ID = strconv.FormatInt(domainResp.File.ID, 10)
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
|
||||
@ -21,17 +21,31 @@ import (
|
||||
|
||||
"github.com/cloudwego/eino/schema"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/message"
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/agentrun"
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/singleagent"
|
||||
)
|
||||
|
||||
// Requests and responses must not reference domain entities and can only use models under api/model/crossdomain.
|
||||
type SingleAgent interface {
|
||||
StreamExecute(ctx context.Context, historyMsg []*message.Message, query *message.Message,
|
||||
agentRuntime *singleagent.AgentRuntime) (*schema.StreamReader[*singleagent.AgentEvent], error)
|
||||
StreamExecute(ctx context.Context,
|
||||
agentRuntime *AgentRuntime) (*schema.StreamReader[*singleagent.AgentEvent], error)
|
||||
ObtainAgentByIdentity(ctx context.Context, identity *singleagent.AgentIdentity) (*singleagent.SingleAgent, error)
|
||||
}
|
||||
|
||||
type AgentRuntime struct {
|
||||
AgentVersion string
|
||||
UserID string
|
||||
AgentID int64
|
||||
IsDraft bool
|
||||
SpaceID int64
|
||||
ConnectorID int64
|
||||
PreRetrieveTools []*agentrun.Tool
|
||||
|
||||
HistoryMsg []*schema.Message
|
||||
Input *schema.Message
|
||||
ResumeInfo *ResumeInfo
|
||||
}
|
||||
|
||||
type ResumeInfo = singleagent.InterruptInfo
|
||||
|
||||
type AgentEvent = singleagent.AgentEvent
|
||||
|
||||
@ -39,18 +39,32 @@ type Workflow interface {
|
||||
ReleaseApplicationWorkflows(ctx context.Context, appID int64, config *ReleaseWorkflowConfig) ([]*vo.ValidateIssue, error)
|
||||
GetWorkflowIDsByAppID(ctx context.Context, appID int64) ([]int64, error)
|
||||
SyncExecuteWorkflow(ctx context.Context, config vo.ExecuteConfig, input map[string]any) (*workflowEntity.WorkflowExecution, vo.TerminatePlan, error)
|
||||
StreamExecute(ctx context.Context, config vo.ExecuteConfig, input map[string]any) (*schema.StreamReader[*workflowEntity.Message], error)
|
||||
StreamResume(ctx context.Context, req *entity.ResumeRequest, config vo.ExecuteConfig) (*schema.StreamReader[*entity.Message], error)
|
||||
WithExecuteConfig(cfg vo.ExecuteConfig) einoCompose.Option
|
||||
WithMessagePipe() (compose.Option, *schema.StreamReader[*entity.Message])
|
||||
}
|
||||
|
||||
type ExecuteConfig = vo.ExecuteConfig
|
||||
type WorkflowMessage = workflowEntity.Message
|
||||
type StateMessage = workflowEntity.StateMessage
|
||||
type ExecuteMode = vo.ExecuteMode
|
||||
type NodeType = entity.NodeType
|
||||
type MessageType = entity.MessageType
|
||||
type InterruptEvent = workflowEntity.InterruptEvent
|
||||
type EventType = workflowEntity.InterruptEventType
|
||||
type ResumeRequest = entity.ResumeRequest
|
||||
|
||||
type WorkflowMessage = entity.Message
|
||||
const (
|
||||
Answer MessageType = "answer"
|
||||
FunctionCall MessageType = "function_call"
|
||||
ToolResponse MessageType = "tool_response"
|
||||
)
|
||||
|
||||
const (
|
||||
NodeTypeOutputEmitter NodeType = "OutputEmitter"
|
||||
NodeTypeInputReceiver NodeType = "InputReceiver"
|
||||
NodeTypeQuestion NodeType = "Question"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -59,6 +73,14 @@ const (
|
||||
ExecuteModeNodeDebug ExecuteMode = "node_debug"
|
||||
)
|
||||
|
||||
type SyncPattern = vo.SyncPattern
|
||||
|
||||
const (
|
||||
SyncPatternSync SyncPattern = "sync"
|
||||
SyncPatternAsync SyncPattern = "async"
|
||||
SyncPatternStream SyncPattern = "stream"
|
||||
)
|
||||
|
||||
type TaskType = vo.TaskType
|
||||
|
||||
const (
|
||||
|
||||
@ -18,17 +18,13 @@ package agent
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/cloudwego/eino/schema"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/agentrun"
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/message"
|
||||
model "github.com/coze-dev/coze-studio/backend/api/model/crossdomain/singleagent"
|
||||
"github.com/coze-dev/coze-studio/backend/crossdomain/contract/crossagent"
|
||||
singleagent "github.com/coze-dev/coze-studio/backend/domain/agent/singleagent/service"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/message/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/imagex"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/conv"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/slices"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/logs"
|
||||
@ -38,49 +34,34 @@ var defaultSVC crossagent.SingleAgent
|
||||
|
||||
type impl struct {
|
||||
DomainSVC singleagent.SingleAgent
|
||||
ImagexSVC imagex.ImageX
|
||||
}
|
||||
|
||||
func InitDomainService(c singleagent.SingleAgent, imagexClient imagex.ImageX) crossagent.SingleAgent {
|
||||
func InitDomainService(c singleagent.SingleAgent) crossagent.SingleAgent {
|
||||
defaultSVC = &impl{
|
||||
DomainSVC: c,
|
||||
ImagexSVC: imagexClient,
|
||||
}
|
||||
|
||||
return defaultSVC
|
||||
}
|
||||
|
||||
func (c *impl) StreamExecute(ctx context.Context, historyMsg []*message.Message,
|
||||
query *message.Message, agentRuntime *model.AgentRuntime,
|
||||
func (c *impl) StreamExecute(ctx context.Context, agentRuntime *crossagent.AgentRuntime,
|
||||
) (*schema.StreamReader[*model.AgentEvent], error) {
|
||||
|
||||
historyMsg = c.historyPairs(historyMsg)
|
||||
|
||||
singleAgentStreamExecReq := c.buildSingleAgentStreamExecuteReq(ctx, historyMsg, query, agentRuntime)
|
||||
singleAgentStreamExecReq := c.buildSingleAgentStreamExecuteReq(ctx, agentRuntime)
|
||||
|
||||
streamEvent, err := c.DomainSVC.StreamExecute(ctx, singleAgentStreamExecReq)
|
||||
logs.CtxInfof(ctx, "agent StreamExecute req:%v, streamEvent:%v, err:%v", conv.DebugJsonToStr(singleAgentStreamExecReq), streamEvent, err)
|
||||
return streamEvent, err
|
||||
}
|
||||
|
||||
func (c *impl) buildSingleAgentStreamExecuteReq(ctx context.Context, historyMsg []*message.Message,
|
||||
input *message.Message, agentRuntime *model.AgentRuntime,
|
||||
func (c *impl) buildSingleAgentStreamExecuteReq(ctx context.Context, agentRuntime *crossagent.AgentRuntime,
|
||||
) *model.ExecuteRequest {
|
||||
identity := c.buildIdentity(input, agentRuntime)
|
||||
inputBuild := c.buildSchemaMessage(ctx, []*message.Message{input})
|
||||
var inputSM *schema.Message
|
||||
if len(inputBuild) > 0 {
|
||||
inputSM = inputBuild[0]
|
||||
}
|
||||
history := c.buildSchemaMessage(ctx, historyMsg)
|
||||
|
||||
resumeInfo := c.checkResumeInfo(ctx, historyMsg)
|
||||
|
||||
return &model.ExecuteRequest{
|
||||
Identity: identity,
|
||||
Input: inputSM,
|
||||
History: history,
|
||||
UserID: input.UserID,
|
||||
Identity: c.buildIdentity(agentRuntime),
|
||||
Input: agentRuntime.Input,
|
||||
History: agentRuntime.HistoryMsg,
|
||||
UserID: agentRuntime.UserID,
|
||||
PreCallTools: slices.Transform(agentRuntime.PreRetrieveTools, func(tool *agentrun.Tool) *agentrun.ToolsRetriever {
|
||||
return &agentrun.ToolsRetriever{
|
||||
PluginID: tool.PluginID,
|
||||
@ -98,141 +79,19 @@ func (c *impl) buildSingleAgentStreamExecuteReq(ctx context.Context, historyMsg
|
||||
}(tool.Type),
|
||||
}
|
||||
}),
|
||||
|
||||
ResumeInfo: resumeInfo,
|
||||
ResumeInfo: agentRuntime.ResumeInfo,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *impl) historyPairs(historyMsg []*message.Message) []*message.Message {
|
||||
|
||||
fcMsgPairs := make(map[int64][]*message.Message)
|
||||
for _, one := range historyMsg {
|
||||
if one.MessageType != message.MessageTypeFunctionCall && one.MessageType != message.MessageTypeToolResponse {
|
||||
continue
|
||||
}
|
||||
if _, ok := fcMsgPairs[one.RunID]; !ok {
|
||||
fcMsgPairs[one.RunID] = []*message.Message{one}
|
||||
} else {
|
||||
fcMsgPairs[one.RunID] = append(fcMsgPairs[one.RunID], one)
|
||||
}
|
||||
}
|
||||
|
||||
var historyAfterPairs []*message.Message
|
||||
for _, value := range historyMsg {
|
||||
if value.MessageType == message.MessageTypeFunctionCall {
|
||||
if len(fcMsgPairs[value.RunID])%2 == 0 {
|
||||
historyAfterPairs = append(historyAfterPairs, value)
|
||||
}
|
||||
} else {
|
||||
historyAfterPairs = append(historyAfterPairs, value)
|
||||
}
|
||||
}
|
||||
return historyAfterPairs
|
||||
|
||||
}
|
||||
func (c *impl) checkResumeInfo(_ context.Context, historyMsg []*message.Message) *crossagent.ResumeInfo {
|
||||
|
||||
var resumeInfo *crossagent.ResumeInfo
|
||||
for i := len(historyMsg) - 1; i >= 0; i-- {
|
||||
if historyMsg[i].MessageType == message.MessageTypeQuestion {
|
||||
break
|
||||
}
|
||||
if historyMsg[i].MessageType == message.MessageTypeVerbose {
|
||||
if historyMsg[i].Ext[string(entity.ExtKeyResumeInfo)] != "" {
|
||||
err := json.Unmarshal([]byte(historyMsg[i].Ext[string(entity.ExtKeyResumeInfo)]), &resumeInfo)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return resumeInfo
|
||||
}
|
||||
|
||||
func (c *impl) buildSchemaMessage(ctx context.Context, msgs []*message.Message) []*schema.Message {
|
||||
schemaMessage := make([]*schema.Message, 0, len(msgs))
|
||||
|
||||
for _, msgOne := range msgs {
|
||||
if msgOne.ModelContent == "" {
|
||||
continue
|
||||
}
|
||||
if msgOne.MessageType == message.MessageTypeVerbose || msgOne.MessageType == message.MessageTypeFlowUp {
|
||||
continue
|
||||
}
|
||||
var sm *schema.Message
|
||||
err := json.Unmarshal([]byte(msgOne.ModelContent), &sm)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if len(sm.ReasoningContent) > 0 {
|
||||
sm.ReasoningContent = ""
|
||||
}
|
||||
|
||||
schemaMessage = append(schemaMessage, c.parseMessageURI(ctx, sm))
|
||||
}
|
||||
|
||||
return schemaMessage
|
||||
}
|
||||
|
||||
func (c *impl) parseMessageURI(ctx context.Context, mcMsg *schema.Message) *schema.Message {
|
||||
if mcMsg.MultiContent == nil {
|
||||
return mcMsg
|
||||
}
|
||||
for k, one := range mcMsg.MultiContent {
|
||||
switch one.Type {
|
||||
case schema.ChatMessagePartTypeImageURL:
|
||||
|
||||
if one.ImageURL.URI != "" {
|
||||
url, err := c.ImagexSVC.GetResourceURL(ctx, one.ImageURL.URI)
|
||||
if err == nil {
|
||||
mcMsg.MultiContent[k].ImageURL.URL = url.URL
|
||||
}
|
||||
}
|
||||
case schema.ChatMessagePartTypeFileURL:
|
||||
if one.FileURL.URI != "" {
|
||||
url, err := c.ImagexSVC.GetResourceURL(ctx, one.FileURL.URI)
|
||||
if err == nil {
|
||||
mcMsg.MultiContent[k].FileURL.URL = url.URL
|
||||
}
|
||||
}
|
||||
case schema.ChatMessagePartTypeAudioURL:
|
||||
if one.AudioURL.URI != "" {
|
||||
url, err := c.ImagexSVC.GetResourceURL(ctx, one.AudioURL.URI)
|
||||
if err == nil {
|
||||
mcMsg.MultiContent[k].AudioURL.URL = url.URL
|
||||
}
|
||||
}
|
||||
case schema.ChatMessagePartTypeVideoURL:
|
||||
if one.VideoURL.URI != "" {
|
||||
url, err := c.ImagexSVC.GetResourceURL(ctx, one.VideoURL.URI)
|
||||
if err == nil {
|
||||
mcMsg.MultiContent[k].VideoURL.URL = url.URL
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return mcMsg
|
||||
}
|
||||
|
||||
func (c *impl) buildIdentity(input *message.Message, agentRuntime *model.AgentRuntime) *model.AgentIdentity {
|
||||
func (c *impl) buildIdentity(agentRuntime *crossagent.AgentRuntime) *model.AgentIdentity {
|
||||
return &model.AgentIdentity{
|
||||
AgentID: input.AgentID,
|
||||
AgentID: agentRuntime.AgentID,
|
||||
Version: agentRuntime.AgentVersion,
|
||||
IsDraft: agentRuntime.IsDraft,
|
||||
ConnectorID: agentRuntime.ConnectorID,
|
||||
}
|
||||
}
|
||||
|
||||
func (c *impl) GetSingleAgent(ctx context.Context, agentID int64, version string) (agent *model.SingleAgent, err error) {
|
||||
agentInfo, err := c.DomainSVC.GetSingleAgent(ctx, agentID, version)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return agentInfo.SingleAgent, nil
|
||||
}
|
||||
|
||||
func (c *impl) ObtainAgentByIdentity(ctx context.Context, identity *model.AgentIdentity) (*model.SingleAgent, error) {
|
||||
agentInfo, err := c.DomainSVC.ObtainAgentByIdentity(ctx, identity)
|
||||
if err != nil {
|
||||
|
||||
@ -70,6 +70,12 @@ func (i *impl) WithResumeToolWorkflow(resumingEvent *workflowEntity.ToolInterrup
|
||||
func (i *impl) SyncExecuteWorkflow(ctx context.Context, config vo.ExecuteConfig, input map[string]any) (*workflowEntity.WorkflowExecution, vo.TerminatePlan, error) {
|
||||
return i.DomainSVC.SyncExecute(ctx, config, input)
|
||||
}
|
||||
func (i *impl) StreamExecute(ctx context.Context, config vo.ExecuteConfig, input map[string]any) (*schema.StreamReader[*workflowEntity.Message], error) {
|
||||
return i.DomainSVC.StreamExecute(ctx, config, input)
|
||||
}
|
||||
func (i *impl) StreamResume(ctx context.Context, req *entity.ResumeRequest, config vo.ExecuteConfig) (*schema.StreamReader[*entity.Message], error) {
|
||||
return i.DomainSVC.StreamResume(ctx, req, config)
|
||||
}
|
||||
|
||||
func (i *impl) WithExecuteConfig(cfg vo.ExecuteConfig) einoCompose.Option {
|
||||
return i.DomainSVC.WithExecuteConfig(cfg)
|
||||
|
||||
@ -50,7 +50,9 @@ type SingleAgentDraft struct {
|
||||
JumpConfig *bot_common.JumpConfig `gorm:"column:jump_config;comment:Jump Configuration;serializer:json" json:"jump_config"` // Jump Configuration
|
||||
BackgroundImageInfoList []*bot_common.BackgroundImageInfo `gorm:"column:background_image_info_list;comment:Background image;serializer:json" json:"background_image_info_list"` // Background image
|
||||
DatabaseConfig []*bot_common.Database `gorm:"column:database_config;comment:Agent Database Base Configuration;serializer:json" json:"database_config"` // Agent Database Base Configuration
|
||||
BotMode int32 `gorm:"column:bot_mode;not null;comment:mod,0:single mode 2:chatflow mode" json:"bot_mode"` // mod,0:single mode 2:chatflow mode
|
||||
ShortcutCommand []string `gorm:"column:shortcut_command;comment:shortcut command;serializer:json" json:"shortcut_command"` // shortcut command
|
||||
LayoutInfo *bot_common.LayoutInfo `gorm:"column:layout_info;comment:chatflow layout info;serializer:json" json:"layout_info"` // chatflow layout info
|
||||
}
|
||||
|
||||
// TableName SingleAgentDraft's table name
|
||||
|
||||
@ -52,7 +52,9 @@ type SingleAgentVersion struct {
|
||||
Version string `gorm:"column:version;not null;comment:Agent Version" json:"version"` // Agent Version
|
||||
BackgroundImageInfoList []*bot_common.BackgroundImageInfo `gorm:"column:background_image_info_list;comment:Background image;serializer:json" json:"background_image_info_list"` // Background image
|
||||
DatabaseConfig []*bot_common.Database `gorm:"column:database_config;comment:Agent Database Base Configuration;serializer:json" json:"database_config"` // Agent Database Base Configuration
|
||||
BotMode int32 `gorm:"column:bot_mode;not null;comment:mod,0:single mode 2:chatflow mode" json:"bot_mode"` // mod,0:single mode 2:chatflow mode
|
||||
ShortcutCommand []string `gorm:"column:shortcut_command;comment:shortcut command;serializer:json" json:"shortcut_command"` // shortcut command
|
||||
LayoutInfo *bot_common.LayoutInfo `gorm:"column:layout_info;comment:chatflow layout info;serializer:json" json:"layout_info"` // chatflow layout info
|
||||
}
|
||||
|
||||
// TableName SingleAgentVersion's table name
|
||||
|
||||
@ -1,3 +1,19 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
@ -48,7 +64,9 @@ func newSingleAgentDraft(db *gorm.DB, opts ...gen.DOOption) singleAgentDraft {
|
||||
_singleAgentDraft.JumpConfig = field.NewField(tableName, "jump_config")
|
||||
_singleAgentDraft.BackgroundImageInfoList = field.NewField(tableName, "background_image_info_list")
|
||||
_singleAgentDraft.DatabaseConfig = field.NewField(tableName, "database_config")
|
||||
_singleAgentDraft.BotMode = field.NewInt32(tableName, "bot_mode")
|
||||
_singleAgentDraft.ShortcutCommand = field.NewField(tableName, "shortcut_command")
|
||||
_singleAgentDraft.LayoutInfo = field.NewField(tableName, "layout_info")
|
||||
|
||||
_singleAgentDraft.fillFieldMap()
|
||||
|
||||
@ -81,7 +99,9 @@ type singleAgentDraft struct {
|
||||
JumpConfig field.Field // Jump Configuration
|
||||
BackgroundImageInfoList field.Field // Background image
|
||||
DatabaseConfig field.Field // Agent Database Base Configuration
|
||||
BotMode field.Int32 // mod,0:single mode 2:chatflow mode
|
||||
ShortcutCommand field.Field // shortcut command
|
||||
LayoutInfo field.Field // chatflow layout info
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
@ -119,7 +139,9 @@ func (s *singleAgentDraft) updateTableName(table string) *singleAgentDraft {
|
||||
s.JumpConfig = field.NewField(table, "jump_config")
|
||||
s.BackgroundImageInfoList = field.NewField(table, "background_image_info_list")
|
||||
s.DatabaseConfig = field.NewField(table, "database_config")
|
||||
s.BotMode = field.NewInt32(table, "bot_mode")
|
||||
s.ShortcutCommand = field.NewField(table, "shortcut_command")
|
||||
s.LayoutInfo = field.NewField(table, "layout_info")
|
||||
|
||||
s.fillFieldMap()
|
||||
|
||||
@ -136,7 +158,7 @@ func (s *singleAgentDraft) GetFieldByName(fieldName string) (field.OrderExpr, bo
|
||||
}
|
||||
|
||||
func (s *singleAgentDraft) fillFieldMap() {
|
||||
s.fieldMap = make(map[string]field.Expr, 22)
|
||||
s.fieldMap = make(map[string]field.Expr, 24)
|
||||
s.fieldMap["id"] = s.ID
|
||||
s.fieldMap["agent_id"] = s.AgentID
|
||||
s.fieldMap["creator_id"] = s.CreatorID
|
||||
@ -158,7 +180,9 @@ func (s *singleAgentDraft) fillFieldMap() {
|
||||
s.fieldMap["jump_config"] = s.JumpConfig
|
||||
s.fieldMap["background_image_info_list"] = s.BackgroundImageInfoList
|
||||
s.fieldMap["database_config"] = s.DatabaseConfig
|
||||
s.fieldMap["bot_mode"] = s.BotMode
|
||||
s.fieldMap["shortcut_command"] = s.ShortcutCommand
|
||||
s.fieldMap["layout_info"] = s.LayoutInfo
|
||||
}
|
||||
|
||||
func (s singleAgentDraft) clone(db *gorm.DB) singleAgentDraft {
|
||||
|
||||
@ -1,3 +1,19 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
@ -50,7 +66,9 @@ func newSingleAgentVersion(db *gorm.DB, opts ...gen.DOOption) singleAgentVersion
|
||||
_singleAgentVersion.Version = field.NewString(tableName, "version")
|
||||
_singleAgentVersion.BackgroundImageInfoList = field.NewField(tableName, "background_image_info_list")
|
||||
_singleAgentVersion.DatabaseConfig = field.NewField(tableName, "database_config")
|
||||
_singleAgentVersion.BotMode = field.NewInt32(tableName, "bot_mode")
|
||||
_singleAgentVersion.ShortcutCommand = field.NewField(tableName, "shortcut_command")
|
||||
_singleAgentVersion.LayoutInfo = field.NewField(tableName, "layout_info")
|
||||
|
||||
_singleAgentVersion.fillFieldMap()
|
||||
|
||||
@ -85,7 +103,9 @@ type singleAgentVersion struct {
|
||||
Version field.String // Agent Version
|
||||
BackgroundImageInfoList field.Field // Background image
|
||||
DatabaseConfig field.Field // Agent Database Base Configuration
|
||||
BotMode field.Int32 // mod,0:single mode 2:chatflow mode
|
||||
ShortcutCommand field.Field // shortcut command
|
||||
LayoutInfo field.Field // chatflow layout info
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
@ -125,7 +145,9 @@ func (s *singleAgentVersion) updateTableName(table string) *singleAgentVersion {
|
||||
s.Version = field.NewString(table, "version")
|
||||
s.BackgroundImageInfoList = field.NewField(table, "background_image_info_list")
|
||||
s.DatabaseConfig = field.NewField(table, "database_config")
|
||||
s.BotMode = field.NewInt32(table, "bot_mode")
|
||||
s.ShortcutCommand = field.NewField(table, "shortcut_command")
|
||||
s.LayoutInfo = field.NewField(table, "layout_info")
|
||||
|
||||
s.fillFieldMap()
|
||||
|
||||
@ -142,7 +164,7 @@ func (s *singleAgentVersion) GetFieldByName(fieldName string) (field.OrderExpr,
|
||||
}
|
||||
|
||||
func (s *singleAgentVersion) fillFieldMap() {
|
||||
s.fieldMap = make(map[string]field.Expr, 24)
|
||||
s.fieldMap = make(map[string]field.Expr, 26)
|
||||
s.fieldMap["id"] = s.ID
|
||||
s.fieldMap["agent_id"] = s.AgentID
|
||||
s.fieldMap["creator_id"] = s.CreatorID
|
||||
@ -166,7 +188,9 @@ func (s *singleAgentVersion) fillFieldMap() {
|
||||
s.fieldMap["version"] = s.Version
|
||||
s.fieldMap["background_image_info_list"] = s.BackgroundImageInfoList
|
||||
s.fieldMap["database_config"] = s.DatabaseConfig
|
||||
s.fieldMap["bot_mode"] = s.BotMode
|
||||
s.fieldMap["shortcut_command"] = s.ShortcutCommand
|
||||
s.fieldMap["layout_info"] = s.LayoutInfo
|
||||
}
|
||||
|
||||
func (s singleAgentVersion) clone(db *gorm.DB) singleAgentVersion {
|
||||
|
||||
@ -22,7 +22,9 @@ import (
|
||||
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/app/bot_common"
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/singleagent"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/agent/singleagent/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/agent/singleagent/internal/dal/model"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/agent/singleagent/internal/dal/query"
|
||||
@ -144,6 +146,8 @@ func (sa *SingleAgentDraftDAO) singleAgentDraftPo2Do(po *model.SingleAgentDraft)
|
||||
BackgroundImageInfoList: po.BackgroundImageInfoList,
|
||||
Database: po.DatabaseConfig,
|
||||
ShortcutCommand: po.ShortcutCommand,
|
||||
BotMode: bot_common.BotMode(po.BotMode),
|
||||
LayoutInfo: po.LayoutInfo,
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -171,5 +175,7 @@ func (sa *SingleAgentDraftDAO) singleAgentDraftDo2Po(do *entity.SingleAgent) *mo
|
||||
BackgroundImageInfoList: do.BackgroundImageInfoList,
|
||||
DatabaseConfig: do.Database,
|
||||
ShortcutCommand: do.ShortcutCommand,
|
||||
BotMode: int32(do.BotMode),
|
||||
LayoutInfo: do.LayoutInfo,
|
||||
}
|
||||
}
|
||||
|
||||
@ -158,3 +158,13 @@ type ModelAnswerEvent struct {
|
||||
Message *schema.Message
|
||||
Err error
|
||||
}
|
||||
|
||||
type ListRunRecordMeta struct {
|
||||
ConversationID int64 `json:"conversation_id"`
|
||||
AgentID int64 `json:"agent_id"`
|
||||
SectionID int64 `json:"section_id"`
|
||||
Limit int32 `json:"limit"`
|
||||
OrderBy string `json:"order_by"` //desc asc
|
||||
BeforeID int64 `json:"before_id"`
|
||||
AfterID int64 `json:"after_id"`
|
||||
}
|
||||
|
||||
@ -0,0 +1,140 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package internal
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/cloudwego/eino/schema"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/message"
|
||||
"github.com/coze-dev/coze-studio/backend/crossdomain/contract/crossagent"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/message/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/imagex"
|
||||
)
|
||||
|
||||
func HistoryPairs(historyMsg []*message.Message) []*message.Message {
|
||||
|
||||
fcMsgPairs := make(map[int64][]*message.Message)
|
||||
for _, one := range historyMsg {
|
||||
if one.MessageType != message.MessageTypeFunctionCall && one.MessageType != message.MessageTypeToolResponse {
|
||||
continue
|
||||
}
|
||||
if _, ok := fcMsgPairs[one.RunID]; !ok {
|
||||
fcMsgPairs[one.RunID] = []*message.Message{one}
|
||||
} else {
|
||||
fcMsgPairs[one.RunID] = append(fcMsgPairs[one.RunID], one)
|
||||
}
|
||||
}
|
||||
|
||||
var historyAfterPairs []*message.Message
|
||||
for _, value := range historyMsg {
|
||||
if value.MessageType == message.MessageTypeFunctionCall {
|
||||
if len(fcMsgPairs[value.RunID])%2 == 0 {
|
||||
historyAfterPairs = append(historyAfterPairs, value)
|
||||
}
|
||||
} else {
|
||||
historyAfterPairs = append(historyAfterPairs, value)
|
||||
}
|
||||
}
|
||||
return historyAfterPairs
|
||||
|
||||
}
|
||||
|
||||
func TransMessageToSchemaMessage(ctx context.Context, msgs []*message.Message, imagexClient imagex.ImageX) []*schema.Message {
|
||||
schemaMessage := make([]*schema.Message, 0, len(msgs))
|
||||
|
||||
for _, msgOne := range msgs {
|
||||
if msgOne.ModelContent == "" {
|
||||
continue
|
||||
}
|
||||
if msgOne.MessageType == message.MessageTypeVerbose || msgOne.MessageType == message.MessageTypeFlowUp {
|
||||
continue
|
||||
}
|
||||
var sm *schema.Message
|
||||
err := json.Unmarshal([]byte(msgOne.ModelContent), &sm)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if len(sm.ReasoningContent) > 0 {
|
||||
sm.ReasoningContent = ""
|
||||
}
|
||||
schemaMessage = append(schemaMessage, parseMessageURI(ctx, sm, imagexClient))
|
||||
}
|
||||
|
||||
return schemaMessage
|
||||
}
|
||||
|
||||
func parseMessageURI(ctx context.Context, mcMsg *schema.Message, imagexClient imagex.ImageX) *schema.Message {
|
||||
if mcMsg.MultiContent == nil {
|
||||
return mcMsg
|
||||
}
|
||||
for k, one := range mcMsg.MultiContent {
|
||||
switch one.Type {
|
||||
case schema.ChatMessagePartTypeImageURL:
|
||||
|
||||
if one.ImageURL.URI != "" {
|
||||
url, err := imagexClient.GetResourceURL(ctx, one.ImageURL.URI)
|
||||
if err == nil {
|
||||
mcMsg.MultiContent[k].ImageURL.URL = url.URL
|
||||
}
|
||||
}
|
||||
case schema.ChatMessagePartTypeFileURL:
|
||||
if one.FileURL.URI != "" {
|
||||
url, err := imagexClient.GetResourceURL(ctx, one.FileURL.URI)
|
||||
if err == nil {
|
||||
mcMsg.MultiContent[k].FileURL.URL = url.URL
|
||||
}
|
||||
}
|
||||
case schema.ChatMessagePartTypeAudioURL:
|
||||
if one.AudioURL.URI != "" {
|
||||
url, err := imagexClient.GetResourceURL(ctx, one.AudioURL.URI)
|
||||
if err == nil {
|
||||
mcMsg.MultiContent[k].AudioURL.URL = url.URL
|
||||
}
|
||||
}
|
||||
case schema.ChatMessagePartTypeVideoURL:
|
||||
if one.VideoURL.URI != "" {
|
||||
url, err := imagexClient.GetResourceURL(ctx, one.VideoURL.URI)
|
||||
if err == nil {
|
||||
mcMsg.MultiContent[k].VideoURL.URL = url.URL
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return mcMsg
|
||||
}
|
||||
|
||||
func ParseResumeInfo(_ context.Context, historyMsg []*message.Message) *crossagent.ResumeInfo {
|
||||
|
||||
var resumeInfo *crossagent.ResumeInfo
|
||||
for i := len(historyMsg) - 1; i >= 0; i-- {
|
||||
if historyMsg[i].MessageType == message.MessageTypeQuestion {
|
||||
break
|
||||
}
|
||||
if historyMsg[i].MessageType == message.MessageTypeVerbose {
|
||||
if historyMsg[i].Ext[string(entity.ExtKeyResumeInfo)] != "" {
|
||||
err := json.Unmarshal([]byte(historyMsg[i].Ext[string(entity.ExtKeyResumeInfo)]), &resumeInfo)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return resumeInfo
|
||||
}
|
||||
@ -19,6 +19,7 @@ package dal
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"gorm.io/gorm"
|
||||
@ -27,6 +28,7 @@ import (
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/internal/dal/model"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/internal/dal/query"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/idgen"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/slices"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/logs"
|
||||
)
|
||||
|
||||
@ -106,20 +108,40 @@ func (dao *RunRecordDAO) Delete(ctx context.Context, id []int64) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func (dao *RunRecordDAO) List(ctx context.Context, conversationID int64, sectionID int64, limit int32) ([]*model.RunRecord, error) {
|
||||
logs.CtxInfof(ctx, "list run record req:%v, sectionID:%v, limit:%v", conversationID, sectionID, limit)
|
||||
func (dao *RunRecordDAO) List(ctx context.Context, meta *entity.ListRunRecordMeta) ([]*entity.RunRecordMeta, error) {
|
||||
logs.CtxInfof(ctx, "list run record req:%v, sectionID:%v, limit:%v", meta.ConversationID, meta.SectionID, meta.Limit)
|
||||
m := dao.query.RunRecord
|
||||
do := m.WithContext(ctx).Where(m.ConversationID.Eq(conversationID)).Debug().Where(m.Status.NotIn(string(entity.RunStatusDeleted)))
|
||||
|
||||
if sectionID > 0 {
|
||||
do = do.Where(m.SectionID.Eq(sectionID))
|
||||
do := m.WithContext(ctx).Where(m.ConversationID.Eq(meta.ConversationID)).Debug().Where(m.Status.NotIn(string(entity.RunStatusDeleted)))
|
||||
if meta.BeforeID > 0 {
|
||||
runRecord, err := m.Where(m.ID.Eq(meta.BeforeID)).First()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
do = do.Where(m.CreatedAt.Lt(runRecord.CreatedAt))
|
||||
}
|
||||
if limit > 0 {
|
||||
do = do.Limit(int(limit))
|
||||
if meta.AfterID > 0 {
|
||||
runRecord, err := m.Where(m.ID.Eq(meta.AfterID)).First()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
do = do.Where(m.CreatedAt.Gt(runRecord.CreatedAt))
|
||||
}
|
||||
if meta.SectionID > 0 {
|
||||
do = do.Where(m.SectionID.Eq(meta.SectionID))
|
||||
}
|
||||
if meta.Limit > 0 {
|
||||
do = do.Limit(int(meta.Limit))
|
||||
}
|
||||
if strings.ToLower(meta.OrderBy) == "asc" {
|
||||
do = do.Order(m.CreatedAt.Asc())
|
||||
} else {
|
||||
do = do.Order(m.CreatedAt.Desc())
|
||||
}
|
||||
|
||||
runRecords, err := do.Order(m.CreatedAt.Desc()).Find()
|
||||
return runRecords, err
|
||||
runRecords, err := do.Find()
|
||||
return slices.Transform(runRecords, func(item *model.RunRecord) *entity.RunRecordMeta {
|
||||
return dao.buildPo2Do(item)
|
||||
}), err
|
||||
}
|
||||
|
||||
func (dao *RunRecordDAO) buildCreatePO(ctx context.Context, runMeta *entity.AgentRunMeta) (*model.RunRecord, error) {
|
||||
|
||||
@ -23,7 +23,6 @@ import (
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/internal/dal"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/internal/dal/model"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/idgen"
|
||||
)
|
||||
|
||||
@ -37,5 +36,5 @@ type RunRecordRepo interface {
|
||||
GetByID(ctx context.Context, id int64) (*entity.RunRecord, error)
|
||||
Delete(ctx context.Context, id []int64) error
|
||||
UpdateByID(ctx context.Context, id int64, update *entity.UpdateMeta) error
|
||||
List(ctx context.Context, conversationID int64, sectionID int64, limit int32) ([]*model.RunRecord, error)
|
||||
List(ctx context.Context, meta *entity.ListRunRecordMeta) ([]*entity.RunRecordMeta, error)
|
||||
}
|
||||
|
||||
@ -28,4 +28,6 @@ type Run interface {
|
||||
AgentRun(ctx context.Context, req *entity.AgentRunMeta) (*schema.StreamReader[*entity.AgentRunResponse], error)
|
||||
|
||||
Delete(ctx context.Context, runID []int64) error
|
||||
Create(ctx context.Context, runRecord *entity.AgentRunMeta) (*entity.RunRecordMeta, error)
|
||||
List(ctx context.Context, ListMeta *entity.ListRunRecordMeta) ([]*entity.RunRecordMeta, error)
|
||||
}
|
||||
|
||||
@ -33,17 +33,20 @@ import (
|
||||
"github.com/cloudwego/eino/schema"
|
||||
"github.com/mohae/deepcopy"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/app/bot_common"
|
||||
messageModel "github.com/coze-dev/coze-studio/backend/api/model/conversation/message"
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/agentrun"
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/message"
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/singleagent"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/crossdomain/contract/crossagent"
|
||||
"github.com/coze-dev/coze-studio/backend/crossdomain/contract/crossmessage"
|
||||
"github.com/coze-dev/coze-studio/backend/crossdomain/contract/crossworkflow"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/internal"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/internal/dal/model"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/repository"
|
||||
msgEntity "github.com/coze-dev/coze-studio/backend/domain/conversation/message/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/imagex"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/errorx"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/conv"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/ptr"
|
||||
@ -70,8 +73,40 @@ type runtimeDependence struct {
|
||||
usage *agentrun.Usage
|
||||
}
|
||||
|
||||
func (rd *runtimeDependence) SetRunID(runID int64) {
|
||||
rd.runID = runID
|
||||
}
|
||||
func (rd *runtimeDependence) GetRunID() int64 {
|
||||
return rd.runID
|
||||
}
|
||||
func (rd *runtimeDependence) SetRunMeta(arm *entity.AgentRunMeta) {
|
||||
rd.runMeta = arm
|
||||
}
|
||||
func (rd *runtimeDependence) GetRunMeta() *entity.AgentRunMeta {
|
||||
return rd.runMeta
|
||||
}
|
||||
func (rd *runtimeDependence) SetAgentInfo(agentInfo *singleagent.SingleAgent) {
|
||||
rd.agentInfo = agentInfo
|
||||
}
|
||||
func (rd *runtimeDependence) GetAgentInfo() *singleagent.SingleAgent {
|
||||
return rd.agentInfo
|
||||
}
|
||||
func (rd *runtimeDependence) SetQuestionMsgID(msgID int64) {
|
||||
rd.questionMsgID = msgID
|
||||
}
|
||||
func (rd *runtimeDependence) GetQuestionMsgID() int64 {
|
||||
return rd.questionMsgID
|
||||
}
|
||||
func (rd *runtimeDependence) SetStartTime(t time.Time) {
|
||||
rd.startTime = t
|
||||
}
|
||||
func (rd *runtimeDependence) GetStartTime() time.Time {
|
||||
return rd.startTime
|
||||
}
|
||||
|
||||
type Components struct {
|
||||
RunRecordRepo repository.RunRecordRepo
|
||||
ImagexSVC imagex.ImageX
|
||||
}
|
||||
|
||||
func NewService(c *Components) Run {
|
||||
@ -112,7 +147,7 @@ func (c *runImpl) run(ctx context.Context, sw *schema.StreamWriter[*entity.Agent
|
||||
return
|
||||
}
|
||||
|
||||
rtDependence.agentInfo = agentInfo
|
||||
rtDependence.SetAgentInfo(agentInfo)
|
||||
|
||||
history, err := c.handlerHistory(ctx, rtDependence)
|
||||
if err != nil {
|
||||
@ -124,7 +159,7 @@ func (c *runImpl) run(ctx context.Context, sw *schema.StreamWriter[*entity.Agent
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
rtDependence.runID = runRecord.ID
|
||||
rtDependence.SetRunID(runRecord.ID)
|
||||
defer func() {
|
||||
srRecord := c.buildSendRunRecord(ctx, runRecord, entity.RunStatusCompleted)
|
||||
if err != nil {
|
||||
@ -143,9 +178,13 @@ func (c *runImpl) run(ctx context.Context, sw *schema.StreamWriter[*entity.Agent
|
||||
return
|
||||
}
|
||||
|
||||
rtDependence.questionMsgID = input.ID
|
||||
rtDependence.SetQuestionMsgID(input.ID)
|
||||
|
||||
err = c.handlerStreamExecute(ctx, sw, history, input, rtDependence)
|
||||
if rtDependence.GetAgentInfo().BotMode == bot_common.BotMode_WorkflowMode {
|
||||
err = c.handlerWfAsAgentStreamExecute(ctx, sw, history, rtDependence)
|
||||
} else {
|
||||
err = c.handlerAgentStreamExecute(ctx, sw, history, input, rtDependence)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@ -161,18 +200,78 @@ func (c *runImpl) handlerAgent(ctx context.Context, rtDependence *runtimeDepende
|
||||
return agentInfo, nil
|
||||
}
|
||||
|
||||
func (c *runImpl) handlerStreamExecute(ctx context.Context, sw *schema.StreamWriter[*entity.AgentRunResponse], historyMsg []*msgEntity.Message, input *msgEntity.Message, rtDependence *runtimeDependence) (err error) {
|
||||
func (c *runImpl) handlerWfAsAgentStreamExecute(ctx context.Context, sw *schema.StreamWriter[*entity.AgentRunResponse], historyMsg []*msgEntity.Message, rtDependence *runtimeDependence) (err error) {
|
||||
|
||||
resumeInfo := internal.ParseResumeInfo(ctx, historyMsg)
|
||||
wfID, _ := strconv.ParseInt(rtDependence.agentInfo.LayoutInfo.WorkflowId, 10, 64)
|
||||
|
||||
var wfStreamer *schema.StreamReader[*crossworkflow.WorkflowMessage]
|
||||
|
||||
executeConfig := crossworkflow.ExecuteConfig{
|
||||
ID: wfID,
|
||||
ConnectorID: rtDependence.runMeta.ConnectorID,
|
||||
ConnectorUID: rtDependence.runMeta.UserID,
|
||||
AgentID: ptr.Of(rtDependence.runMeta.AgentID),
|
||||
Mode: crossworkflow.ExecuteModeRelease,
|
||||
BizType: crossworkflow.BizTypeAgent,
|
||||
SyncPattern: crossworkflow.SyncPatternStream,
|
||||
}
|
||||
|
||||
if resumeInfo != nil {
|
||||
wfStreamer, err = crossworkflow.DefaultSVC().StreamResume(ctx, &crossworkflow.ResumeRequest{
|
||||
ResumeData: concatWfInput(rtDependence),
|
||||
EventID: resumeInfo.ChatflowInterrupt.InterruptEvent.ID,
|
||||
ExecuteID: resumeInfo.ChatflowInterrupt.ExecuteID,
|
||||
}, executeConfig)
|
||||
} else {
|
||||
wfStreamer, err = crossworkflow.DefaultSVC().StreamExecute(ctx, executeConfig, map[string]any{
|
||||
"input": concatWfInput(rtDependence),
|
||||
})
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(1)
|
||||
safego.Go(ctx, func() {
|
||||
defer wg.Done()
|
||||
c.pullWfStream(ctx, wfStreamer, rtDependence, sw)
|
||||
})
|
||||
wg.Wait()
|
||||
return err
|
||||
}
|
||||
|
||||
func concatWfInput(rtDependence *runtimeDependence) string {
|
||||
var input string
|
||||
for _, content := range rtDependence.runMeta.Content {
|
||||
if content.Type == message.InputTypeText {
|
||||
input = content.Text + "," + input
|
||||
} else {
|
||||
for _, file := range content.FileData {
|
||||
input += file.Url + ","
|
||||
}
|
||||
}
|
||||
}
|
||||
return input
|
||||
}
|
||||
|
||||
func (c *runImpl) handlerAgentStreamExecute(ctx context.Context, sw *schema.StreamWriter[*entity.AgentRunResponse], historyMsg []*msgEntity.Message, input *msgEntity.Message, rtDependence *runtimeDependence) (err error) {
|
||||
mainChan := make(chan *entity.AgentRespEvent, 100)
|
||||
|
||||
ar := &singleagent.AgentRuntime{
|
||||
ar := &crossagent.AgentRuntime{
|
||||
AgentVersion: rtDependence.runMeta.Version,
|
||||
SpaceID: rtDependence.runMeta.SpaceID,
|
||||
AgentID: rtDependence.runMeta.AgentID,
|
||||
IsDraft: rtDependence.runMeta.IsDraft,
|
||||
ConnectorID: rtDependence.runMeta.ConnectorID,
|
||||
PreRetrieveTools: rtDependence.runMeta.PreRetrieveTools,
|
||||
Input: internal.TransMessageToSchemaMessage(ctx, []*msgEntity.Message{input}, c.ImagexSVC)[0],
|
||||
HistoryMsg: internal.TransMessageToSchemaMessage(ctx, internal.HistoryPairs(historyMsg), c.ImagexSVC),
|
||||
ResumeInfo: internal.ParseResumeInfo(ctx, historyMsg),
|
||||
}
|
||||
|
||||
streamer, err := crossagent.DefaultSVC().StreamExecute(ctx, historyMsg, input, ar)
|
||||
streamer, err := crossagent.DefaultSVC().StreamExecute(ctx, ar)
|
||||
if err != nil {
|
||||
return errors.New(errorx.ErrorWithoutStack(err))
|
||||
}
|
||||
@ -369,7 +468,11 @@ func (c *runImpl) handlerHistory(ctx context.Context, rtDependence *runtimeDepen
|
||||
conversationTurns = ptr.From(rtDependence.agentInfo.ModelInfo.ShortMemoryPolicy.HistoryRound)
|
||||
}
|
||||
|
||||
runRecordList, err := c.RunRecordRepo.List(ctx, rtDependence.runMeta.ConversationID, rtDependence.runMeta.SectionID, conversationTurns)
|
||||
runRecordList, err := c.RunRecordRepo.List(ctx, &entity.ListRunRecordMeta{
|
||||
ConversationID: rtDependence.runMeta.ConversationID,
|
||||
SectionID: rtDependence.runMeta.SectionID,
|
||||
Limit: conversationTurns,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -388,7 +491,7 @@ func (c *runImpl) handlerHistory(ctx context.Context, rtDependence *runtimeDepen
|
||||
return history, nil
|
||||
}
|
||||
|
||||
func (c *runImpl) getRunID(rr []*model.RunRecord) []int64 {
|
||||
func (c *runImpl) getRunID(rr []*entity.RunRecordMeta) []int64 {
|
||||
ids := make([]int64, 0, len(rr))
|
||||
for _, c := range rr {
|
||||
ids = append(ids, c.ID)
|
||||
@ -431,6 +534,201 @@ func (c *runImpl) handlerInput(ctx context.Context, sw *schema.StreamWriter[*ent
|
||||
}
|
||||
return cm, nil
|
||||
}
|
||||
func (c *runImpl) pullWfStream(ctx context.Context, events *schema.StreamReader[*crossworkflow.WorkflowMessage], rtDependence *runtimeDependence, sw *schema.StreamWriter[*entity.AgentRunResponse]) {
|
||||
|
||||
fullAnswerContent := bytes.NewBuffer([]byte{})
|
||||
var usage *msgEntity.UsageExt
|
||||
|
||||
preAnswerMsg, cErr := c.PreCreateAnswer(ctx, rtDependence)
|
||||
|
||||
if cErr != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var preMsgIsFinish = false
|
||||
|
||||
for {
|
||||
st, re := events.Recv()
|
||||
if re != nil {
|
||||
if errors.Is(re, io.EOF) {
|
||||
// update usage
|
||||
|
||||
finishErr := c.handlerFinalAnswerFinish(ctx, sw, rtDependence)
|
||||
if finishErr != nil {
|
||||
logs.CtxErrorf(ctx, "handlerFinalAnswerFinish error: %v", finishErr)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
logs.CtxErrorf(ctx, "pullWfStream Recv error: %v", re)
|
||||
c.handlerErr(ctx, re, sw)
|
||||
return
|
||||
}
|
||||
if st == nil {
|
||||
continue
|
||||
}
|
||||
if st.StateMessage != nil && st.StateMessage.Usage != nil {
|
||||
usage = &msgEntity.UsageExt{
|
||||
InputTokens: st.StateMessage.Usage.InputTokens,
|
||||
OutputTokens: st.StateMessage.Usage.OutputTokens,
|
||||
TotalCount: st.StateMessage.Usage.InputTokens + st.StateMessage.Usage.OutputTokens,
|
||||
}
|
||||
}
|
||||
|
||||
if st.StateMessage != nil && st.StateMessage.InterruptEvent != nil { // interrupt
|
||||
c.handlerWfInterruptMsg(ctx, sw, st.StateMessage, rtDependence)
|
||||
continue
|
||||
}
|
||||
if st.DataMessage == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
switch st.DataMessage.Type {
|
||||
case crossworkflow.Answer:
|
||||
|
||||
// input node & question node skip
|
||||
if st.DataMessage != nil && (st.DataMessage.NodeType == crossworkflow.NodeTypeInputReceiver || st.DataMessage.NodeType == crossworkflow.NodeTypeQuestion) {
|
||||
break
|
||||
}
|
||||
|
||||
if preMsgIsFinish {
|
||||
preAnswerMsg, cErr = c.PreCreateAnswer(ctx, rtDependence)
|
||||
if cErr != nil {
|
||||
return
|
||||
}
|
||||
preMsgIsFinish = false
|
||||
}
|
||||
|
||||
if st.DataMessage.Last {
|
||||
preMsgIsFinish = true
|
||||
sendAnswerMsg := c.buildSendMsg(ctx, preAnswerMsg, false, rtDependence)
|
||||
sendAnswerMsg.Content = fullAnswerContent.String()
|
||||
fullAnswerContent.Reset()
|
||||
hfErr := c.handlerAnswer(ctx, sendAnswerMsg, sw, usage, rtDependence, preAnswerMsg)
|
||||
if hfErr != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
sendAnswerMsg := c.buildSendMsg(ctx, preAnswerMsg, false, rtDependence)
|
||||
sendAnswerMsg.Content = st.DataMessage.Content
|
||||
fullAnswerContent.WriteString(st.DataMessage.Content)
|
||||
c.runEvent.SendMsgEvent(entity.RunEventMessageDelta, sendAnswerMsg, sw)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (c *runImpl) handlerWfInterruptMsg(ctx context.Context, sw *schema.StreamWriter[*entity.AgentRunResponse], stateMsg *crossworkflow.StateMessage, rtDependence *runtimeDependence) {
|
||||
interruptData, cType, err := c.handlerWfInterruptEvent(ctx, stateMsg.InterruptEvent)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
preMsg, err := c.PreCreateAnswer(ctx, rtDependence)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
deltaAnswer := &entity.ChunkMessageItem{
|
||||
ID: preMsg.ID,
|
||||
ConversationID: preMsg.ConversationID,
|
||||
SectionID: preMsg.SectionID,
|
||||
RunID: preMsg.RunID,
|
||||
AgentID: preMsg.AgentID,
|
||||
Role: entity.RoleType(preMsg.Role),
|
||||
Content: interruptData,
|
||||
MessageType: preMsg.MessageType,
|
||||
ContentType: cType,
|
||||
ReplyID: preMsg.RunID,
|
||||
Ext: preMsg.Ext,
|
||||
IsFinish: false,
|
||||
}
|
||||
|
||||
c.runEvent.SendMsgEvent(entity.RunEventMessageDelta, deltaAnswer, sw)
|
||||
finalAnswer := deepcopy.Copy(deltaAnswer).(*entity.ChunkMessageItem)
|
||||
|
||||
err = c.handlerAnswer(ctx, finalAnswer, sw, nil, rtDependence, preMsg)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = c.handlerInterruptVerbose(ctx, &entity.AgentRespEvent{
|
||||
EventType: message.MessageTypeInterrupt,
|
||||
Interrupt: &singleagent.InterruptInfo{
|
||||
|
||||
InterruptType: singleagent.InterruptEventType(stateMsg.InterruptEvent.EventType),
|
||||
InterruptID: strconv.FormatInt(stateMsg.InterruptEvent.ID, 10),
|
||||
ChatflowInterrupt: stateMsg,
|
||||
},
|
||||
}, sw, rtDependence)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
func (c *runImpl) handlerWfInterruptEvent(_ context.Context, interruptEventData *crossworkflow.InterruptEvent) (string, message.ContentType, error) {
|
||||
|
||||
type msg struct {
|
||||
Type string `json:"type,omitempty"`
|
||||
ContentType string `json:"content_type"`
|
||||
Content any `json:"content"` // either optionContent or string
|
||||
ID string `json:"id,omitempty"`
|
||||
}
|
||||
|
||||
defaultContentType := message.ContentTypeText
|
||||
switch singleagent.InterruptEventType(interruptEventData.EventType) {
|
||||
case singleagent.InterruptEventType_OauthPlugin:
|
||||
|
||||
case singleagent.InterruptEventType_Question:
|
||||
var iData map[string][]*msg
|
||||
err := json.Unmarshal([]byte(interruptEventData.InterruptData), &iData)
|
||||
if err != nil {
|
||||
return "", defaultContentType, err
|
||||
}
|
||||
if len(iData["messages"]) == 0 {
|
||||
return "", defaultContentType, errorx.New(errno.ErrInterruptDataEmpty)
|
||||
}
|
||||
interruptMsg := iData["messages"][0]
|
||||
|
||||
if interruptMsg.ContentType == "text" {
|
||||
return interruptMsg.Content.(string), defaultContentType, nil
|
||||
} else if interruptMsg.ContentType == "option" || interruptMsg.ContentType == "form_schema" {
|
||||
iMarshalData, err := json.Marshal(interruptMsg)
|
||||
if err != nil {
|
||||
return "", defaultContentType, err
|
||||
}
|
||||
return string(iMarshalData), message.ContentTypeCard, nil
|
||||
}
|
||||
case singleagent.InterruptEventType_InputNode:
|
||||
data := interruptEventData.InterruptData
|
||||
return data, message.ContentTypeCard, nil
|
||||
case singleagent.InterruptEventType_WorkflowLLM:
|
||||
data := interruptEventData.ToolInterruptEvent.InterruptData
|
||||
if singleagent.InterruptEventType(interruptEventData.EventType) == singleagent.InterruptEventType_InputNode {
|
||||
return data, message.ContentTypeCard, nil
|
||||
}
|
||||
if singleagent.InterruptEventType(interruptEventData.EventType) == singleagent.InterruptEventType_Question {
|
||||
var iData map[string][]*msg
|
||||
err := json.Unmarshal([]byte(data), &iData)
|
||||
if err != nil {
|
||||
return "", defaultContentType, err
|
||||
}
|
||||
if len(iData["messages"]) == 0 {
|
||||
return "", defaultContentType, errorx.New(errno.ErrInterruptDataEmpty)
|
||||
}
|
||||
interruptMsg := iData["messages"][0]
|
||||
|
||||
if interruptMsg.ContentType == "text" {
|
||||
return interruptMsg.Content.(string), defaultContentType, nil
|
||||
} else if interruptMsg.ContentType == "option" || interruptMsg.ContentType == "form_schema" {
|
||||
iMarshalData, err := json.Marshal(interruptMsg)
|
||||
if err != nil {
|
||||
return "", defaultContentType, err
|
||||
}
|
||||
return string(iMarshalData), message.ContentTypeCard, nil
|
||||
}
|
||||
}
|
||||
return "", defaultContentType, errorx.New(errno.ErrUnknowInterruptType)
|
||||
|
||||
}
|
||||
return "", defaultContentType, errorx.New(errno.ErrUnknowInterruptType)
|
||||
}
|
||||
|
||||
func (c *runImpl) pull(_ context.Context, mainChan chan *entity.AgentRespEvent, events *schema.StreamReader[*crossagent.AgentEvent]) {
|
||||
defer func() {
|
||||
@ -766,7 +1064,7 @@ func (c *runImpl) saveReasoningContent(ctx context.Context, firstAnswerMsg *msgE
|
||||
}
|
||||
}
|
||||
|
||||
func (c *runImpl) handlerInterrupt(ctx context.Context, chunk *entity.AgentRespEvent, sw *schema.StreamWriter[*entity.AgentRunResponse], rtDependence *runtimeDependence, firstAnswerMsg *msgEntity.Message, reasoningCOntent string) error {
|
||||
func (c *runImpl) handlerInterrupt(ctx context.Context, chunk *entity.AgentRespEvent, sw *schema.StreamWriter[*entity.AgentRunResponse], rtDependence *runtimeDependence, firstAnswerMsg *msgEntity.Message, reasoningContent string) error {
|
||||
interruptData, cType, err := c.parseInterruptData(ctx, chunk.Interrupt)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -792,8 +1090,8 @@ func (c *runImpl) handlerInterrupt(ctx context.Context, chunk *entity.AgentRespE
|
||||
|
||||
c.runEvent.SendMsgEvent(entity.RunEventMessageDelta, deltaAnswer, sw)
|
||||
finalAnswer := deepcopy.Copy(deltaAnswer).(*entity.ChunkMessageItem)
|
||||
if len(reasoningCOntent) > 0 && firstAnswerMsg == nil {
|
||||
finalAnswer.ReasoningContent = ptr.Of(reasoningCOntent)
|
||||
if len(reasoningContent) > 0 && firstAnswerMsg == nil {
|
||||
finalAnswer.ReasoningContent = ptr.Of(reasoningContent)
|
||||
}
|
||||
err = c.handlerAnswer(ctx, finalAnswer, sw, nil, rtDependence, preMsg)
|
||||
if err != nil {
|
||||
@ -1194,3 +1492,11 @@ func (c *runImpl) buildSendRunRecord(_ context.Context, runRecord *entity.RunRec
|
||||
func (c *runImpl) Delete(ctx context.Context, runID []int64) error {
|
||||
return c.RunRecordRepo.Delete(ctx, runID)
|
||||
}
|
||||
|
||||
func (c *runImpl) List(ctx context.Context, meta *entity.ListRunRecordMeta) ([]*entity.RunRecordMeta, error) {
|
||||
return c.RunRecordRepo.List(ctx, meta)
|
||||
}
|
||||
|
||||
func (c *runImpl) Create(ctx context.Context, runRecord *entity.AgentRunMeta) (*entity.RunRecordMeta, error) {
|
||||
return c.RunRecordRepo.Create(ctx, runRecord)
|
||||
}
|
||||
|
||||
@ -17,7 +17,18 @@
|
||||
package agentrun
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"go.uber.org/mock/gomock"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/internal/dal/model"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/agentrun/repository"
|
||||
mock "github.com/coze-dev/coze-studio/backend/internal/mock/infra/contract/idgen"
|
||||
"github.com/coze-dev/coze-studio/backend/internal/mock/infra/contract/orm"
|
||||
)
|
||||
|
||||
func TestAgentRun(t *testing.T) {
|
||||
@ -97,3 +108,158 @@ func TestAgentRun(t *testing.T) {
|
||||
// assert.NoError(t, err)
|
||||
|
||||
}
|
||||
|
||||
func TestRunImpl_List(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
mockDBGen := orm.NewMockDB()
|
||||
mockDBGen.AddTable(&model.RunRecord{}).AddRows(
|
||||
&model.RunRecord{
|
||||
ID: 1,
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
UserID: "123456",
|
||||
CreatedAt: time.Now().Unix(),
|
||||
},
|
||||
&model.RunRecord{
|
||||
ID: 2,
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
UserID: "123456",
|
||||
CreatedAt: time.Now().Unix() + 1,
|
||||
}, &model.RunRecord{
|
||||
ID: 3,
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
UserID: "123456",
|
||||
CreatedAt: time.Now().Unix() + 2,
|
||||
}, &model.RunRecord{
|
||||
ID: 4,
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
UserID: "123456",
|
||||
CreatedAt: time.Now().Unix() + 3,
|
||||
}, &model.RunRecord{
|
||||
ID: 5,
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
UserID: "123456",
|
||||
CreatedAt: time.Now().Unix() + 4,
|
||||
},
|
||||
&model.RunRecord{
|
||||
ID: 6,
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
UserID: "123456",
|
||||
CreatedAt: time.Now().Unix() + 5,
|
||||
}, &model.RunRecord{
|
||||
ID: 7,
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
UserID: "123456",
|
||||
CreatedAt: time.Now().Unix() + 6,
|
||||
}, &model.RunRecord{
|
||||
ID: 8,
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
UserID: "123456",
|
||||
CreatedAt: time.Now().Unix() + 7,
|
||||
}, &model.RunRecord{
|
||||
ID: 9,
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
UserID: "123456",
|
||||
CreatedAt: time.Now().Unix() + 8,
|
||||
},
|
||||
)
|
||||
mockDB, err := mockDBGen.DB()
|
||||
assert.NoError(t, err)
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
mockIDGen := mock.NewMockIDGenerator(ctrl)
|
||||
|
||||
runRecordRepo := repository.NewRunRecordRepo(mockDB, mockIDGen)
|
||||
|
||||
service := &runImpl{
|
||||
Components: Components{
|
||||
RunRecordRepo: runRecordRepo,
|
||||
},
|
||||
}
|
||||
|
||||
t.Run("list success", func(t *testing.T) {
|
||||
|
||||
meta := &entity.ListRunRecordMeta{
|
||||
ConversationID: 123,
|
||||
AgentID: 456,
|
||||
SectionID: 789,
|
||||
Limit: 10,
|
||||
OrderBy: "desc",
|
||||
}
|
||||
|
||||
result, err := service.List(ctx, meta)
|
||||
// check result
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, result, 9)
|
||||
assert.Equal(t, int64(123), result[0].ConversationID)
|
||||
assert.Equal(t, int64(456), result[0].AgentID)
|
||||
})
|
||||
|
||||
t.Run("empty list", func(t *testing.T) {
|
||||
meta := &entity.ListRunRecordMeta{
|
||||
ConversationID: 999, //
|
||||
Limit: 10,
|
||||
OrderBy: "desc",
|
||||
}
|
||||
|
||||
// check result
|
||||
result, err := service.List(ctx, meta)
|
||||
assert.NoError(t, err)
|
||||
assert.Empty(t, result)
|
||||
})
|
||||
|
||||
t.Run("search with before id", func(t *testing.T) {
|
||||
|
||||
meta := &entity.ListRunRecordMeta{
|
||||
ConversationID: 123,
|
||||
SectionID: 789,
|
||||
AgentID: 456,
|
||||
BeforeID: 5,
|
||||
Limit: 3,
|
||||
OrderBy: "desc",
|
||||
}
|
||||
|
||||
result, err := service.List(ctx, meta)
|
||||
|
||||
// check result
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, result, 3)
|
||||
assert.Equal(t, int64(4), result[0].ID)
|
||||
})
|
||||
t.Run("search with after id and limit", func(t *testing.T) {
|
||||
|
||||
meta := &entity.ListRunRecordMeta{
|
||||
ConversationID: 123,
|
||||
SectionID: 789,
|
||||
AgentID: 456,
|
||||
AfterID: 5,
|
||||
Limit: 3,
|
||||
OrderBy: "desc",
|
||||
}
|
||||
|
||||
result, err := service.List(ctx, meta)
|
||||
|
||||
// check result
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, result, 3)
|
||||
assert.Equal(t, int64(9), result[0].ID)
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
@ -84,14 +84,21 @@ func (dao *MessageDAO) List(ctx context.Context, conversationID int64, limit int
|
||||
}
|
||||
|
||||
if cursor > 0 {
|
||||
if direction == entity.ScrollPageDirectionPrev {
|
||||
do = do.Where(m.CreatedAt.Lt(cursor))
|
||||
} else {
|
||||
do = do.Where(m.CreatedAt.Gt(cursor))
|
||||
msg, err := m.Where(m.ID.Eq(cursor)).First()
|
||||
if err != nil {
|
||||
return nil, false, err
|
||||
}
|
||||
if direction == entity.ScrollPageDirectionPrev {
|
||||
do = do.Where(m.CreatedAt.Lt(msg.CreatedAt))
|
||||
do = do.Order(m.CreatedAt.Desc())
|
||||
} else {
|
||||
do = do.Where(m.CreatedAt.Gt(msg.CreatedAt))
|
||||
do = do.Order(m.CreatedAt.Asc())
|
||||
}
|
||||
} else {
|
||||
do = do.Order(m.CreatedAt.Desc())
|
||||
}
|
||||
|
||||
do = do.Order(m.CreatedAt.Desc())
|
||||
messageList, err := do.Find()
|
||||
|
||||
var hasMore bool
|
||||
@ -138,6 +145,10 @@ func (dao *MessageDAO) Edit(ctx context.Context, msgID int64, msg *message.Messa
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
if do.RowsAffected == 0 {
|
||||
return 0, errorx.New(errno.ErrRecordNotFound)
|
||||
}
|
||||
|
||||
return do.RowsAffected, nil
|
||||
}
|
||||
|
||||
|
||||
@ -18,6 +18,7 @@ package message
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sort"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/api/model/crossdomain/message"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/conversation/message/entity"
|
||||
@ -62,8 +63,11 @@ func (m *messageImpl) List(ctx context.Context, req *entity.ListMeta) (*entity.L
|
||||
resp.HasMore = hasMore
|
||||
|
||||
if len(messageList) > 0 {
|
||||
resp.PrevCursor = messageList[len(messageList)-1].CreatedAt
|
||||
resp.NextCursor = messageList[0].CreatedAt
|
||||
sort.Slice(messageList, func(i, j int) bool {
|
||||
return messageList[i].CreatedAt > messageList[j].CreatedAt
|
||||
})
|
||||
resp.PrevCursor = messageList[len(messageList)-1].ID
|
||||
resp.NextCursor = messageList[0].ID
|
||||
|
||||
var runIDs []int64
|
||||
for _, m := range messageList {
|
||||
|
||||
@ -32,6 +32,7 @@ type CreateApiKey struct {
|
||||
Name string `json:"name"`
|
||||
Expire int64 `json:"expire"`
|
||||
UserID int64 `json:"user_id"`
|
||||
AkType AkType `json:"ak_type"`
|
||||
}
|
||||
|
||||
type DeleteApiKey struct {
|
||||
|
||||
24
backend/domain/openauth/openapiauth/entity/consts.go
Normal file
24
backend/domain/openauth/openapiauth/entity/consts.go
Normal file
@ -0,0 +1,24 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package entity
|
||||
|
||||
type AkType int32
|
||||
|
||||
const (
|
||||
AkTypeCustomer AkType = 0
|
||||
AkTypeTemporary AkType = 1
|
||||
)
|
||||
@ -72,6 +72,7 @@ func (a *ApiKeyDAO) doToPo(ctx context.Context, do *entity.CreateApiKey) (*model
|
||||
Name: do.Name,
|
||||
ExpiredAt: do.Expire,
|
||||
UserID: do.UserID,
|
||||
AkType: int32(do.AkType),
|
||||
CreatedAt: time.Now().Unix(),
|
||||
}
|
||||
return po, nil
|
||||
@ -119,7 +120,7 @@ func (a *ApiKeyDAO) FindByKey(ctx context.Context, key string) (*model.APIKey, e
|
||||
|
||||
func (a *ApiKeyDAO) List(ctx context.Context, userID int64, limit int, page int) ([]*model.APIKey, bool, error) {
|
||||
do := a.dbQuery.APIKey.WithContext(ctx).Where(a.dbQuery.APIKey.UserID.Eq(userID))
|
||||
|
||||
do = do.Where(a.dbQuery.APIKey.AkType.Eq(int32(entity.AkTypeCustomer)))
|
||||
do = do.Offset((page - 1) * limit).Limit(limit + 1)
|
||||
|
||||
list, err := do.Order(a.dbQuery.APIKey.CreatedAt.Desc()).Find()
|
||||
|
||||
@ -17,6 +17,7 @@ type APIKey struct {
|
||||
CreatedAt int64 `gorm:"column:created_at;not null;autoCreateTime:milli;comment:Create Time in Milliseconds" json:"created_at"` // Create Time in Milliseconds
|
||||
UpdatedAt int64 `gorm:"column:updated_at;not null;autoUpdateTime:milli;comment:Update Time in Milliseconds" json:"updated_at"` // Update Time in Milliseconds
|
||||
LastUsedAt int64 `gorm:"column:last_used_at;not null;comment:Used Time in Milliseconds" json:"last_used_at"` // Used Time in Milliseconds
|
||||
AkType int32 `gorm:"column:ak_type;not null;comment:api key type" json:"ak_type"` // api key type
|
||||
}
|
||||
|
||||
// TableName APIKey's table name
|
||||
|
||||
@ -36,6 +36,7 @@ func newAPIKey(db *gorm.DB, opts ...gen.DOOption) aPIKey {
|
||||
_aPIKey.CreatedAt = field.NewInt64(tableName, "created_at")
|
||||
_aPIKey.UpdatedAt = field.NewInt64(tableName, "updated_at")
|
||||
_aPIKey.LastUsedAt = field.NewInt64(tableName, "last_used_at")
|
||||
_aPIKey.AkType = field.NewInt32(tableName, "ak_type")
|
||||
|
||||
_aPIKey.fillFieldMap()
|
||||
|
||||
@ -56,6 +57,7 @@ type aPIKey struct {
|
||||
CreatedAt field.Int64 // Create Time in Milliseconds
|
||||
UpdatedAt field.Int64 // Update Time in Milliseconds
|
||||
LastUsedAt field.Int64 // Used Time in Milliseconds
|
||||
AkType field.Int32 // api key type
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
@ -81,6 +83,7 @@ func (a *aPIKey) updateTableName(table string) *aPIKey {
|
||||
a.CreatedAt = field.NewInt64(table, "created_at")
|
||||
a.UpdatedAt = field.NewInt64(table, "updated_at")
|
||||
a.LastUsedAt = field.NewInt64(table, "last_used_at")
|
||||
a.AkType = field.NewInt32(table, "ak_type")
|
||||
|
||||
a.fillFieldMap()
|
||||
|
||||
@ -97,7 +100,7 @@ func (a *aPIKey) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
}
|
||||
|
||||
func (a *aPIKey) fillFieldMap() {
|
||||
a.fieldMap = make(map[string]field.Expr, 9)
|
||||
a.fieldMap = make(map[string]field.Expr, 10)
|
||||
a.fieldMap["id"] = a.ID
|
||||
a.fieldMap["api_key"] = a.APIKey
|
||||
a.fieldMap["name"] = a.Name
|
||||
@ -107,6 +110,7 @@ func (a *aPIKey) fillFieldMap() {
|
||||
a.fieldMap["created_at"] = a.CreatedAt
|
||||
a.fieldMap["updated_at"] = a.UpdatedAt
|
||||
a.fieldMap["last_used_at"] = a.LastUsedAt
|
||||
a.fieldMap["ak_type"] = a.AkType
|
||||
}
|
||||
|
||||
func (a aPIKey) clone(db *gorm.DB) aPIKey {
|
||||
|
||||
29
backend/domain/upload/entity/file.go
Normal file
29
backend/domain/upload/entity/file.go
Normal file
@ -0,0 +1,29 @@
|
||||
package entity
|
||||
|
||||
type File struct {
|
||||
ID int64 `json:"id"`
|
||||
Name string `json:"name"`
|
||||
FileSize int64 `json:"file_size"`
|
||||
TosURI string `json:"tos_uri"`
|
||||
Status FileStatus `json:"status"`
|
||||
Comment string `json:"comment"`
|
||||
Source FileSource `json:"source"`
|
||||
CreatorID string `json:"creator_id"`
|
||||
CozeAccountID int64 `json:"coze_account_id"`
|
||||
ContentType string `json:"content_type"`
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
UpdatedAt int64 `json:"updated_at"`
|
||||
}
|
||||
|
||||
type FileStatus int32
|
||||
|
||||
const (
|
||||
FileStatusInvalid FileStatus = 0
|
||||
FileStatusValid FileStatus = 1
|
||||
)
|
||||
|
||||
type FileSource int32
|
||||
|
||||
const (
|
||||
FileSourceAPI FileSource = 1
|
||||
)
|
||||
96
backend/domain/upload/internal/dal/dao/files.go
Normal file
96
backend/domain/upload/internal/dal/dao/files.go
Normal file
@ -0,0 +1,96 @@
|
||||
package dao
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/internal/dal/model"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/internal/dal/query"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/lang/slices"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type FilesDAO struct {
|
||||
DB *gorm.DB
|
||||
Query *query.Query
|
||||
}
|
||||
|
||||
func NewFilesDAO(db *gorm.DB) *FilesDAO {
|
||||
return &FilesDAO{
|
||||
DB: db,
|
||||
Query: query.Use(db),
|
||||
}
|
||||
}
|
||||
|
||||
func (dao *FilesDAO) Create(ctx context.Context, file *entity.File) error {
|
||||
f := dao.fromEntityToModel(file)
|
||||
return dao.Query.Files.WithContext(ctx).Create(f)
|
||||
}
|
||||
|
||||
func (dao *FilesDAO) BatchCreate(ctx context.Context, files []*entity.File) error {
|
||||
if len(files) == 0 {
|
||||
return nil
|
||||
}
|
||||
return dao.Query.Files.WithContext(ctx).CreateInBatches(slices.Transform(files, dao.fromEntityToModel), len(files))
|
||||
}
|
||||
|
||||
func (dao *FilesDAO) Delete(ctx context.Context, id int64) error {
|
||||
_, err := dao.Query.Files.WithContext(ctx).Where(dao.Query.Files.ID.Eq(id)).Delete()
|
||||
return err
|
||||
}
|
||||
|
||||
func (dao *FilesDAO) GetByID(ctx context.Context, id int64) (*entity.File, error) {
|
||||
file, err := dao.Query.Files.WithContext(ctx).Where(dao.Query.Files.ID.Eq(id)).First()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dao.fromModelToEntity(file), nil
|
||||
}
|
||||
|
||||
func (dao *FilesDAO) MGetByIDs(ctx context.Context, ids []int64) ([]*entity.File, error) {
|
||||
if len(ids) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
files, err := dao.Query.Files.WithContext(ctx).Where(dao.Query.Files.ID.In(ids...)).Find()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return slices.Transform(files, dao.fromModelToEntity), nil
|
||||
}
|
||||
|
||||
func (dao *FilesDAO) fromModelToEntity(model *model.Files) *entity.File {
|
||||
if model == nil {
|
||||
return nil
|
||||
}
|
||||
return &entity.File{
|
||||
ID: model.ID,
|
||||
Name: model.Name,
|
||||
FileSize: model.FileSize,
|
||||
TosURI: model.TosURI,
|
||||
Status: entity.FileStatus(model.Status),
|
||||
Comment: model.Comment,
|
||||
Source: entity.FileSource(model.Source),
|
||||
CreatorID: model.CreatorID,
|
||||
CozeAccountID: model.CozeAccountID,
|
||||
ContentType: model.ContentType,
|
||||
CreatedAt: model.CreatedAt,
|
||||
UpdatedAt: model.UpdatedAt,
|
||||
}
|
||||
}
|
||||
|
||||
func (dao *FilesDAO) fromEntityToModel(entity *entity.File) *model.Files {
|
||||
return &model.Files{
|
||||
ID: entity.ID,
|
||||
Name: entity.Name,
|
||||
FileSize: entity.FileSize,
|
||||
TosURI: entity.TosURI,
|
||||
Status: int32(entity.Status),
|
||||
Comment: entity.Comment,
|
||||
Source: int32(entity.Source),
|
||||
CreatorID: entity.CreatorID,
|
||||
CozeAccountID: entity.CozeAccountID,
|
||||
ContentType: entity.ContentType,
|
||||
CreatedAt: entity.CreatedAt,
|
||||
UpdatedAt: entity.UpdatedAt,
|
||||
}
|
||||
}
|
||||
33
backend/domain/upload/internal/dal/model/files.gen.go
Normal file
33
backend/domain/upload/internal/dal/model/files.gen.go
Normal file
@ -0,0 +1,33 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package model
|
||||
|
||||
import (
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
const TableNameFiles = "files"
|
||||
|
||||
// Files file resource table
|
||||
type Files struct {
|
||||
ID int64 `gorm:"column:id;primaryKey;comment:id" json:"id"` // id
|
||||
Name string `gorm:"column:name;not null;comment:file name" json:"name"` // file name
|
||||
FileSize int64 `gorm:"column:file_size;not null;comment:file size" json:"file_size"` // file size
|
||||
TosURI string `gorm:"column:tos_uri;not null;comment:TOS URI" json:"tos_uri"` // TOS URI
|
||||
Status int32 `gorm:"column:status;not null;comment:status,0invalid,1valid" json:"status"` // status,0invalid,1valid
|
||||
Comment string `gorm:"column:comment;not null;comment:file comment" json:"comment"` // file comment
|
||||
Source int32 `gorm:"column:source;not null;comment:source:1 from API," json:"source"` // source:1 from API,
|
||||
CreatorID string `gorm:"column:creator_id;not null;comment:creator id" json:"creator_id"` // creator id
|
||||
ContentType string `gorm:"column:content_type;not null;comment:content type" json:"content_type"` // content type
|
||||
CozeAccountID int64 `gorm:"column:coze_account_id;not null;comment:coze account id" json:"coze_account_id"` // coze account id
|
||||
CreatedAt int64 `gorm:"column:created_at;not null;autoCreateTime:milli;comment:Create Time in Milliseconds" json:"created_at"` // Create Time in Milliseconds
|
||||
UpdatedAt int64 `gorm:"column:updated_at;not null;autoUpdateTime:milli;comment:Update Time in Milliseconds" json:"updated_at"` // Update Time in Milliseconds
|
||||
DeletedAt gorm.DeletedAt `gorm:"column:deleted_at;comment:Delete Time" json:"deleted_at"` // Delete Time
|
||||
}
|
||||
|
||||
// TableName Files's table name
|
||||
func (*Files) TableName() string {
|
||||
return TableNameFiles
|
||||
}
|
||||
429
backend/domain/upload/internal/dal/query/files.gen.go
Normal file
429
backend/domain/upload/internal/dal/query/files.gen.go
Normal file
@ -0,0 +1,429 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/clause"
|
||||
"gorm.io/gorm/schema"
|
||||
|
||||
"gorm.io/gen"
|
||||
"gorm.io/gen/field"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/internal/dal/model"
|
||||
)
|
||||
|
||||
func newFiles(db *gorm.DB, opts ...gen.DOOption) files {
|
||||
_files := files{}
|
||||
|
||||
_files.filesDo.UseDB(db, opts...)
|
||||
_files.filesDo.UseModel(&model.Files{})
|
||||
|
||||
tableName := _files.filesDo.TableName()
|
||||
_files.ALL = field.NewAsterisk(tableName)
|
||||
_files.ID = field.NewInt64(tableName, "id")
|
||||
_files.Name = field.NewString(tableName, "name")
|
||||
_files.FileSize = field.NewInt64(tableName, "file_size")
|
||||
_files.TosURI = field.NewString(tableName, "tos_uri")
|
||||
_files.Status = field.NewInt32(tableName, "status")
|
||||
_files.Comment = field.NewString(tableName, "comment")
|
||||
_files.Source = field.NewInt32(tableName, "source")
|
||||
_files.CreatorID = field.NewString(tableName, "creator_id")
|
||||
_files.ContentType = field.NewString(tableName, "content_type")
|
||||
_files.CozeAccountID = field.NewInt64(tableName, "coze_account_id")
|
||||
_files.CreatedAt = field.NewInt64(tableName, "created_at")
|
||||
_files.UpdatedAt = field.NewInt64(tableName, "updated_at")
|
||||
_files.DeletedAt = field.NewField(tableName, "deleted_at")
|
||||
|
||||
_files.fillFieldMap()
|
||||
|
||||
return _files
|
||||
}
|
||||
|
||||
// files file resource table
|
||||
type files struct {
|
||||
filesDo
|
||||
|
||||
ALL field.Asterisk
|
||||
ID field.Int64 // id
|
||||
Name field.String // file name
|
||||
FileSize field.Int64 // file size
|
||||
TosURI field.String // TOS URI
|
||||
Status field.Int32 // status,0invalid,1valid
|
||||
Comment field.String // file comment
|
||||
Source field.Int32 // source:1 from API,
|
||||
CreatorID field.String // creator id
|
||||
ContentType field.String // content type
|
||||
CozeAccountID field.Int64 // coze account id
|
||||
CreatedAt field.Int64 // Create Time in Milliseconds
|
||||
UpdatedAt field.Int64 // Update Time in Milliseconds
|
||||
DeletedAt field.Field // Delete Time
|
||||
|
||||
fieldMap map[string]field.Expr
|
||||
}
|
||||
|
||||
func (f files) Table(newTableName string) *files {
|
||||
f.filesDo.UseTable(newTableName)
|
||||
return f.updateTableName(newTableName)
|
||||
}
|
||||
|
||||
func (f files) As(alias string) *files {
|
||||
f.filesDo.DO = *(f.filesDo.As(alias).(*gen.DO))
|
||||
return f.updateTableName(alias)
|
||||
}
|
||||
|
||||
func (f *files) updateTableName(table string) *files {
|
||||
f.ALL = field.NewAsterisk(table)
|
||||
f.ID = field.NewInt64(table, "id")
|
||||
f.Name = field.NewString(table, "name")
|
||||
f.FileSize = field.NewInt64(table, "file_size")
|
||||
f.TosURI = field.NewString(table, "tos_uri")
|
||||
f.Status = field.NewInt32(table, "status")
|
||||
f.Comment = field.NewString(table, "comment")
|
||||
f.Source = field.NewInt32(table, "source")
|
||||
f.CreatorID = field.NewString(table, "creator_id")
|
||||
f.ContentType = field.NewString(table, "content_type")
|
||||
f.CozeAccountID = field.NewInt64(table, "coze_account_id")
|
||||
f.CreatedAt = field.NewInt64(table, "created_at")
|
||||
f.UpdatedAt = field.NewInt64(table, "updated_at")
|
||||
f.DeletedAt = field.NewField(table, "deleted_at")
|
||||
|
||||
f.fillFieldMap()
|
||||
|
||||
return f
|
||||
}
|
||||
|
||||
func (f *files) GetFieldByName(fieldName string) (field.OrderExpr, bool) {
|
||||
_f, ok := f.fieldMap[fieldName]
|
||||
if !ok || _f == nil {
|
||||
return nil, false
|
||||
}
|
||||
_oe, ok := _f.(field.OrderExpr)
|
||||
return _oe, ok
|
||||
}
|
||||
|
||||
func (f *files) fillFieldMap() {
|
||||
f.fieldMap = make(map[string]field.Expr, 13)
|
||||
f.fieldMap["id"] = f.ID
|
||||
f.fieldMap["name"] = f.Name
|
||||
f.fieldMap["file_size"] = f.FileSize
|
||||
f.fieldMap["tos_uri"] = f.TosURI
|
||||
f.fieldMap["status"] = f.Status
|
||||
f.fieldMap["comment"] = f.Comment
|
||||
f.fieldMap["source"] = f.Source
|
||||
f.fieldMap["creator_id"] = f.CreatorID
|
||||
f.fieldMap["content_type"] = f.ContentType
|
||||
f.fieldMap["coze_account_id"] = f.CozeAccountID
|
||||
f.fieldMap["created_at"] = f.CreatedAt
|
||||
f.fieldMap["updated_at"] = f.UpdatedAt
|
||||
f.fieldMap["deleted_at"] = f.DeletedAt
|
||||
}
|
||||
|
||||
func (f files) clone(db *gorm.DB) files {
|
||||
f.filesDo.ReplaceConnPool(db.Statement.ConnPool)
|
||||
return f
|
||||
}
|
||||
|
||||
func (f files) replaceDB(db *gorm.DB) files {
|
||||
f.filesDo.ReplaceDB(db)
|
||||
return f
|
||||
}
|
||||
|
||||
type filesDo struct{ gen.DO }
|
||||
|
||||
type IFilesDo interface {
|
||||
gen.SubQuery
|
||||
Debug() IFilesDo
|
||||
WithContext(ctx context.Context) IFilesDo
|
||||
WithResult(fc func(tx gen.Dao)) gen.ResultInfo
|
||||
ReplaceDB(db *gorm.DB)
|
||||
ReadDB() IFilesDo
|
||||
WriteDB() IFilesDo
|
||||
As(alias string) gen.Dao
|
||||
Session(config *gorm.Session) IFilesDo
|
||||
Columns(cols ...field.Expr) gen.Columns
|
||||
Clauses(conds ...clause.Expression) IFilesDo
|
||||
Not(conds ...gen.Condition) IFilesDo
|
||||
Or(conds ...gen.Condition) IFilesDo
|
||||
Select(conds ...field.Expr) IFilesDo
|
||||
Where(conds ...gen.Condition) IFilesDo
|
||||
Order(conds ...field.Expr) IFilesDo
|
||||
Distinct(cols ...field.Expr) IFilesDo
|
||||
Omit(cols ...field.Expr) IFilesDo
|
||||
Join(table schema.Tabler, on ...field.Expr) IFilesDo
|
||||
LeftJoin(table schema.Tabler, on ...field.Expr) IFilesDo
|
||||
RightJoin(table schema.Tabler, on ...field.Expr) IFilesDo
|
||||
Group(cols ...field.Expr) IFilesDo
|
||||
Having(conds ...gen.Condition) IFilesDo
|
||||
Limit(limit int) IFilesDo
|
||||
Offset(offset int) IFilesDo
|
||||
Count() (count int64, err error)
|
||||
Scopes(funcs ...func(gen.Dao) gen.Dao) IFilesDo
|
||||
Unscoped() IFilesDo
|
||||
Create(values ...*model.Files) error
|
||||
CreateInBatches(values []*model.Files, batchSize int) error
|
||||
Save(values ...*model.Files) error
|
||||
First() (*model.Files, error)
|
||||
Take() (*model.Files, error)
|
||||
Last() (*model.Files, error)
|
||||
Find() ([]*model.Files, error)
|
||||
FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Files, err error)
|
||||
FindInBatches(result *[]*model.Files, batchSize int, fc func(tx gen.Dao, batch int) error) error
|
||||
Pluck(column field.Expr, dest interface{}) error
|
||||
Delete(...*model.Files) (info gen.ResultInfo, err error)
|
||||
Update(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
Updates(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumn(column field.Expr, value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateColumnSimple(columns ...field.AssignExpr) (info gen.ResultInfo, err error)
|
||||
UpdateColumns(value interface{}) (info gen.ResultInfo, err error)
|
||||
UpdateFrom(q gen.SubQuery) gen.Dao
|
||||
Attrs(attrs ...field.AssignExpr) IFilesDo
|
||||
Assign(attrs ...field.AssignExpr) IFilesDo
|
||||
Joins(fields ...field.RelationField) IFilesDo
|
||||
Preload(fields ...field.RelationField) IFilesDo
|
||||
FirstOrInit() (*model.Files, error)
|
||||
FirstOrCreate() (*model.Files, error)
|
||||
FindByPage(offset int, limit int) (result []*model.Files, count int64, err error)
|
||||
ScanByPage(result interface{}, offset int, limit int) (count int64, err error)
|
||||
Scan(result interface{}) (err error)
|
||||
Returning(value interface{}, columns ...string) IFilesDo
|
||||
UnderlyingDB() *gorm.DB
|
||||
schema.Tabler
|
||||
}
|
||||
|
||||
func (f filesDo) Debug() IFilesDo {
|
||||
return f.withDO(f.DO.Debug())
|
||||
}
|
||||
|
||||
func (f filesDo) WithContext(ctx context.Context) IFilesDo {
|
||||
return f.withDO(f.DO.WithContext(ctx))
|
||||
}
|
||||
|
||||
func (f filesDo) ReadDB() IFilesDo {
|
||||
return f.Clauses(dbresolver.Read)
|
||||
}
|
||||
|
||||
func (f filesDo) WriteDB() IFilesDo {
|
||||
return f.Clauses(dbresolver.Write)
|
||||
}
|
||||
|
||||
func (f filesDo) Session(config *gorm.Session) IFilesDo {
|
||||
return f.withDO(f.DO.Session(config))
|
||||
}
|
||||
|
||||
func (f filesDo) Clauses(conds ...clause.Expression) IFilesDo {
|
||||
return f.withDO(f.DO.Clauses(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Returning(value interface{}, columns ...string) IFilesDo {
|
||||
return f.withDO(f.DO.Returning(value, columns...))
|
||||
}
|
||||
|
||||
func (f filesDo) Not(conds ...gen.Condition) IFilesDo {
|
||||
return f.withDO(f.DO.Not(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Or(conds ...gen.Condition) IFilesDo {
|
||||
return f.withDO(f.DO.Or(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Select(conds ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Select(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Where(conds ...gen.Condition) IFilesDo {
|
||||
return f.withDO(f.DO.Where(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Order(conds ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Order(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Distinct(cols ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Distinct(cols...))
|
||||
}
|
||||
|
||||
func (f filesDo) Omit(cols ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Omit(cols...))
|
||||
}
|
||||
|
||||
func (f filesDo) Join(table schema.Tabler, on ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Join(table, on...))
|
||||
}
|
||||
|
||||
func (f filesDo) LeftJoin(table schema.Tabler, on ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.LeftJoin(table, on...))
|
||||
}
|
||||
|
||||
func (f filesDo) RightJoin(table schema.Tabler, on ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.RightJoin(table, on...))
|
||||
}
|
||||
|
||||
func (f filesDo) Group(cols ...field.Expr) IFilesDo {
|
||||
return f.withDO(f.DO.Group(cols...))
|
||||
}
|
||||
|
||||
func (f filesDo) Having(conds ...gen.Condition) IFilesDo {
|
||||
return f.withDO(f.DO.Having(conds...))
|
||||
}
|
||||
|
||||
func (f filesDo) Limit(limit int) IFilesDo {
|
||||
return f.withDO(f.DO.Limit(limit))
|
||||
}
|
||||
|
||||
func (f filesDo) Offset(offset int) IFilesDo {
|
||||
return f.withDO(f.DO.Offset(offset))
|
||||
}
|
||||
|
||||
func (f filesDo) Scopes(funcs ...func(gen.Dao) gen.Dao) IFilesDo {
|
||||
return f.withDO(f.DO.Scopes(funcs...))
|
||||
}
|
||||
|
||||
func (f filesDo) Unscoped() IFilesDo {
|
||||
return f.withDO(f.DO.Unscoped())
|
||||
}
|
||||
|
||||
func (f filesDo) Create(values ...*model.Files) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return f.DO.Create(values)
|
||||
}
|
||||
|
||||
func (f filesDo) CreateInBatches(values []*model.Files, batchSize int) error {
|
||||
return f.DO.CreateInBatches(values, batchSize)
|
||||
}
|
||||
|
||||
// Save : !!! underlying implementation is different with GORM
|
||||
// The method is equivalent to executing the statement: db.Clauses(clause.OnConflict{UpdateAll: true}).Create(values)
|
||||
func (f filesDo) Save(values ...*model.Files) error {
|
||||
if len(values) == 0 {
|
||||
return nil
|
||||
}
|
||||
return f.DO.Save(values)
|
||||
}
|
||||
|
||||
func (f filesDo) First() (*model.Files, error) {
|
||||
if result, err := f.DO.First(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*model.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) Take() (*model.Files, error) {
|
||||
if result, err := f.DO.Take(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*model.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) Last() (*model.Files, error) {
|
||||
if result, err := f.DO.Last(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*model.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) Find() ([]*model.Files, error) {
|
||||
result, err := f.DO.Find()
|
||||
return result.([]*model.Files), err
|
||||
}
|
||||
|
||||
func (f filesDo) FindInBatch(batchSize int, fc func(tx gen.Dao, batch int) error) (results []*model.Files, err error) {
|
||||
buf := make([]*model.Files, 0, batchSize)
|
||||
err = f.DO.FindInBatches(&buf, batchSize, func(tx gen.Dao, batch int) error {
|
||||
defer func() { results = append(results, buf...) }()
|
||||
return fc(tx, batch)
|
||||
})
|
||||
return results, err
|
||||
}
|
||||
|
||||
func (f filesDo) FindInBatches(result *[]*model.Files, batchSize int, fc func(tx gen.Dao, batch int) error) error {
|
||||
return f.DO.FindInBatches(result, batchSize, fc)
|
||||
}
|
||||
|
||||
func (f filesDo) Attrs(attrs ...field.AssignExpr) IFilesDo {
|
||||
return f.withDO(f.DO.Attrs(attrs...))
|
||||
}
|
||||
|
||||
func (f filesDo) Assign(attrs ...field.AssignExpr) IFilesDo {
|
||||
return f.withDO(f.DO.Assign(attrs...))
|
||||
}
|
||||
|
||||
func (f filesDo) Joins(fields ...field.RelationField) IFilesDo {
|
||||
for _, _f := range fields {
|
||||
f = *f.withDO(f.DO.Joins(_f))
|
||||
}
|
||||
return &f
|
||||
}
|
||||
|
||||
func (f filesDo) Preload(fields ...field.RelationField) IFilesDo {
|
||||
for _, _f := range fields {
|
||||
f = *f.withDO(f.DO.Preload(_f))
|
||||
}
|
||||
return &f
|
||||
}
|
||||
|
||||
func (f filesDo) FirstOrInit() (*model.Files, error) {
|
||||
if result, err := f.DO.FirstOrInit(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*model.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) FirstOrCreate() (*model.Files, error) {
|
||||
if result, err := f.DO.FirstOrCreate(); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
return result.(*model.Files), nil
|
||||
}
|
||||
}
|
||||
|
||||
func (f filesDo) FindByPage(offset int, limit int) (result []*model.Files, count int64, err error) {
|
||||
result, err = f.Offset(offset).Limit(limit).Find()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
if size := len(result); 0 < limit && 0 < size && size < limit {
|
||||
count = int64(size + offset)
|
||||
return
|
||||
}
|
||||
|
||||
count, err = f.Offset(-1).Limit(-1).Count()
|
||||
return
|
||||
}
|
||||
|
||||
func (f filesDo) ScanByPage(result interface{}, offset int, limit int) (count int64, err error) {
|
||||
count, err = f.Count()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = f.Offset(offset).Limit(limit).Scan(result)
|
||||
return
|
||||
}
|
||||
|
||||
func (f filesDo) Scan(result interface{}) (err error) {
|
||||
return f.DO.Scan(result)
|
||||
}
|
||||
|
||||
func (f filesDo) Delete(models ...*model.Files) (result gen.ResultInfo, err error) {
|
||||
return f.DO.Delete(models)
|
||||
}
|
||||
|
||||
func (f *filesDo) withDO(do gen.Dao) *filesDo {
|
||||
f.DO = *do.(*gen.DO)
|
||||
return f
|
||||
}
|
||||
103
backend/domain/upload/internal/dal/query/gen.go
Normal file
103
backend/domain/upload/internal/dal/query/gen.go
Normal file
@ -0,0 +1,103 @@
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
// Code generated by gorm.io/gen. DO NOT EDIT.
|
||||
|
||||
package query
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
|
||||
"gorm.io/gorm"
|
||||
|
||||
"gorm.io/gen"
|
||||
|
||||
"gorm.io/plugin/dbresolver"
|
||||
)
|
||||
|
||||
var (
|
||||
Q = new(Query)
|
||||
Files *files
|
||||
)
|
||||
|
||||
func SetDefault(db *gorm.DB, opts ...gen.DOOption) {
|
||||
*Q = *Use(db, opts...)
|
||||
Files = &Q.Files
|
||||
}
|
||||
|
||||
func Use(db *gorm.DB, opts ...gen.DOOption) *Query {
|
||||
return &Query{
|
||||
db: db,
|
||||
Files: newFiles(db, opts...),
|
||||
}
|
||||
}
|
||||
|
||||
type Query struct {
|
||||
db *gorm.DB
|
||||
|
||||
Files files
|
||||
}
|
||||
|
||||
func (q *Query) Available() bool { return q.db != nil }
|
||||
|
||||
func (q *Query) clone(db *gorm.DB) *Query {
|
||||
return &Query{
|
||||
db: db,
|
||||
Files: q.Files.clone(db),
|
||||
}
|
||||
}
|
||||
|
||||
func (q *Query) ReadDB() *Query {
|
||||
return q.ReplaceDB(q.db.Clauses(dbresolver.Read))
|
||||
}
|
||||
|
||||
func (q *Query) WriteDB() *Query {
|
||||
return q.ReplaceDB(q.db.Clauses(dbresolver.Write))
|
||||
}
|
||||
|
||||
func (q *Query) ReplaceDB(db *gorm.DB) *Query {
|
||||
return &Query{
|
||||
db: db,
|
||||
Files: q.Files.replaceDB(db),
|
||||
}
|
||||
}
|
||||
|
||||
type queryCtx struct {
|
||||
Files IFilesDo
|
||||
}
|
||||
|
||||
func (q *Query) WithContext(ctx context.Context) *queryCtx {
|
||||
return &queryCtx{
|
||||
Files: q.Files.WithContext(ctx),
|
||||
}
|
||||
}
|
||||
|
||||
func (q *Query) Transaction(fc func(tx *Query) error, opts ...*sql.TxOptions) error {
|
||||
return q.db.Transaction(func(tx *gorm.DB) error { return fc(q.clone(tx)) }, opts...)
|
||||
}
|
||||
|
||||
func (q *Query) Begin(opts ...*sql.TxOptions) *QueryTx {
|
||||
tx := q.db.Begin(opts...)
|
||||
return &QueryTx{Query: q.clone(tx), Error: tx.Error}
|
||||
}
|
||||
|
||||
type QueryTx struct {
|
||||
*Query
|
||||
Error error
|
||||
}
|
||||
|
||||
func (q *QueryTx) Commit() error {
|
||||
return q.db.Commit().Error
|
||||
}
|
||||
|
||||
func (q *QueryTx) Rollback() error {
|
||||
return q.db.Rollback().Error
|
||||
}
|
||||
|
||||
func (q *QueryTx) SavePoint(name string) error {
|
||||
return q.db.SavePoint(name).Error
|
||||
}
|
||||
|
||||
func (q *QueryTx) RollbackTo(name string) error {
|
||||
return q.db.RollbackTo(name).Error
|
||||
}
|
||||
38
backend/domain/upload/repository/repository.go
Normal file
38
backend/domain/upload/repository/repository.go
Normal file
@ -0,0 +1,38 @@
|
||||
/*
|
||||
* Copyright 2025 coze-dev Authors
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package repository
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/entity"
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/internal/dal/dao"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
func NewFilesRepo(db *gorm.DB) FilesRepo {
|
||||
return dao.NewFilesDAO(db)
|
||||
}
|
||||
|
||||
//go:generate mockgen -destination ../internal/mock/dal/dao/knowledge_document.go --package dao -source knowledge_document.go
|
||||
type FilesRepo interface {
|
||||
Create(ctx context.Context, file *entity.File) error
|
||||
BatchCreate(ctx context.Context, files []*entity.File) error
|
||||
Delete(ctx context.Context, id int64) error
|
||||
GetByID(ctx context.Context, id int64) (*entity.File, error)
|
||||
MGetByIDs(ctx context.Context, ids []int64) ([]*entity.File, error)
|
||||
}
|
||||
44
backend/domain/upload/service/interface.go
Normal file
44
backend/domain/upload/service/interface.go
Normal file
@ -0,0 +1,44 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/entity"
|
||||
)
|
||||
|
||||
type UploadService interface {
|
||||
UploadFile(ctx context.Context, req *UploadFileRequest) (resp *UploadFileResponse, err error)
|
||||
UploadFiles(ctx context.Context, req *UploadFilesRequest) (resp *UploadFilesResponse, err error)
|
||||
GetFiles(ctx context.Context, req *GetFilesRequest) (resp *GetFilesResponse, err error)
|
||||
GetFile(ctx context.Context, req *GetFileRequest) (resp *GetFileResponse, err error)
|
||||
}
|
||||
|
||||
type UploadFileRequest struct {
|
||||
File *entity.File `json:"file"`
|
||||
}
|
||||
type UploadFileResponse struct {
|
||||
File *entity.File `json:"file"`
|
||||
}
|
||||
type UploadFilesRequest struct {
|
||||
Files []*entity.File `json:"files"`
|
||||
}
|
||||
|
||||
type UploadFilesResponse struct {
|
||||
Files []*entity.File `json:"files"`
|
||||
}
|
||||
|
||||
type GetFilesRequest struct {
|
||||
IDs []int64 `json:"ids"`
|
||||
}
|
||||
|
||||
type GetFilesResponse struct {
|
||||
Files []*entity.File `json:"files"`
|
||||
}
|
||||
|
||||
type GetFileRequest struct {
|
||||
ID int64 `json:"id"`
|
||||
}
|
||||
|
||||
type GetFileResponse struct {
|
||||
File *entity.File `json:"file"`
|
||||
}
|
||||
72
backend/domain/upload/service/service.go
Normal file
72
backend/domain/upload/service/service.go
Normal file
@ -0,0 +1,72 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/coze-dev/coze-studio/backend/domain/upload/repository"
|
||||
"github.com/coze-dev/coze-studio/backend/infra/contract/idgen"
|
||||
"github.com/coze-dev/coze-studio/backend/pkg/errorx"
|
||||
"github.com/coze-dev/coze-studio/backend/types/errno"
|
||||
"gorm.io/gorm"
|
||||
)
|
||||
|
||||
type uploadSVC struct {
|
||||
fileRepo repository.FilesRepo
|
||||
idgen idgen.IDGenerator
|
||||
}
|
||||
|
||||
func NewUploadSVC(db *gorm.DB, idgen idgen.IDGenerator) UploadService {
|
||||
return &uploadSVC{fileRepo: repository.NewFilesRepo(db), idgen: idgen}
|
||||
}
|
||||
|
||||
func (u *uploadSVC) UploadFile(ctx context.Context, req *UploadFileRequest) (resp *UploadFileResponse, err error) {
|
||||
resp = &UploadFileResponse{}
|
||||
if req.File.ID == 0 {
|
||||
req.File.ID, err = u.idgen.GenID(ctx)
|
||||
if err != nil {
|
||||
return nil, errorx.New(errno.ErrIDGenError)
|
||||
}
|
||||
}
|
||||
err = u.fileRepo.Create(ctx, req.File)
|
||||
if err != nil {
|
||||
return nil, errorx.WrapByCode(err, errno.ErrUploadSystemErrorCode)
|
||||
}
|
||||
resp.File = req.File
|
||||
return
|
||||
}
|
||||
|
||||
func (u *uploadSVC) UploadFiles(ctx context.Context, req *UploadFilesRequest) (resp *UploadFilesResponse, err error) {
|
||||
resp = &UploadFilesResponse{}
|
||||
for _, file := range req.Files {
|
||||
if file.ID == 0 {
|
||||
file.ID, err = u.idgen.GenID(ctx)
|
||||
if err != nil {
|
||||
return nil, errorx.New(errno.ErrIDGenError)
|
||||
}
|
||||
}
|
||||
}
|
||||
err = u.fileRepo.BatchCreate(ctx, req.Files)
|
||||
if err != nil {
|
||||
return nil, errorx.WrapByCode(err, errno.ErrUploadSystemErrorCode)
|
||||
}
|
||||
resp.Files = req.Files
|
||||
return
|
||||
}
|
||||
|
||||
func (u *uploadSVC) GetFiles(ctx context.Context, req *GetFilesRequest) (resp *GetFilesResponse, err error) {
|
||||
resp = &GetFilesResponse{}
|
||||
resp.Files, err = u.fileRepo.MGetByIDs(ctx, req.IDs)
|
||||
if err != nil {
|
||||
return nil, errorx.WrapByCode(err, errno.ErrUploadSystemErrorCode)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func (u *uploadSVC) GetFile(ctx context.Context, req *GetFileRequest) (resp *GetFileResponse, err error) {
|
||||
resp = &GetFileResponse{}
|
||||
resp.File, err = u.fileRepo.GetByID(ctx, req.ID)
|
||||
if err != nil {
|
||||
return nil, errorx.WrapByCode(err, errno.ErrUploadSystemErrorCode)
|
||||
}
|
||||
return
|
||||
}
|
||||
@ -57,6 +57,7 @@ var path2Table2Columns2Model = map[string]map[string]map[string]any{
|
||||
"background_image_info_list": []*bot_common.BackgroundImageInfo{},
|
||||
"database_config": []*bot_common.Database{},
|
||||
"shortcut_command": []string{},
|
||||
"layout_info": &bot_common.LayoutInfo{},
|
||||
},
|
||||
"single_agent_version": {
|
||||
// "variable": []*bot_common.Variable{},
|
||||
@ -71,6 +72,7 @@ var path2Table2Columns2Model = map[string]map[string]map[string]any{
|
||||
"background_image_info_list": []*bot_common.BackgroundImageInfo{},
|
||||
"database_config": []*bot_common.Database{},
|
||||
"shortcut_command": []string{},
|
||||
"layout_info": &bot_common.LayoutInfo{},
|
||||
},
|
||||
"single_agent_publish": {
|
||||
"connector_ids": []int64{},
|
||||
@ -191,6 +193,9 @@ var path2Table2Columns2Model = map[string]map[string]map[string]any{
|
||||
"publish_config": appEntity.PublishConfig{},
|
||||
},
|
||||
},
|
||||
"domain/upload/internal/dal/query": {
|
||||
"files": {},
|
||||
},
|
||||
}
|
||||
|
||||
var fieldNullablePath = map[string]bool{
|
||||
|
||||
@ -35,9 +35,17 @@ const (
|
||||
ErrConversationMessageNotFound = 103200001
|
||||
|
||||
ErrAgentRun = 103200002
|
||||
|
||||
ErrRecordNotFound = 103200003
|
||||
)
|
||||
|
||||
func init() {
|
||||
code.Register(
|
||||
ErrRecordNotFound,
|
||||
"record not found or nothing to update",
|
||||
code.WithAffectStability(false),
|
||||
)
|
||||
|
||||
code.Register(
|
||||
ErrAgentRun,
|
||||
"Interal Server Error",
|
||||
|
||||
2
docker/atlas/migrations/20250812093734_update.sql
Normal file
2
docker/atlas/migrations/20250812093734_update.sql
Normal file
@ -0,0 +1,2 @@
|
||||
-- Create "files" table
|
||||
CREATE TABLE `opencoze`.`files` (`id` bigint unsigned NOT NULL COMMENT "id", `name` varchar(255) NOT NULL DEFAULT "" COMMENT "file name", `file_size` bigint unsigned NOT NULL DEFAULT 0 COMMENT "file size", `tos_uri` varchar(1024) NOT NULL DEFAULT "" COMMENT "TOS URI", `status` tinyint unsigned NOT NULL DEFAULT 0 COMMENT "status,0invalid,1valid", `comment` varchar(1024) NOT NULL DEFAULT "" COMMENT "file comment", `source` tinyint unsigned NOT NULL DEFAULT 0 COMMENT "source:1 from API,", `creator_id` varchar(512) NOT NULL DEFAULT "" COMMENT "creator id", `content_type` varchar(255) NOT NULL DEFAULT "" COMMENT "content type", `coze_account_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT "coze account id", `created_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT "Create Time in Milliseconds", `updated_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT "Update Time in Milliseconds", `deleted_at` datetime(3) NULL COMMENT "Delete Time", PRIMARY KEY (`id`), INDEX `idx_creator_id` (`creator_id`)) CHARSET utf8mb4 COLLATE utf8mb4_general_ci COMMENT "file resource table";
|
||||
7
docker/atlas/migrations/20250813081543_update.sql
Normal file
7
docker/atlas/migrations/20250813081543_update.sql
Normal file
@ -0,0 +1,7 @@
|
||||
-- Modify "api_key" table
|
||||
ALTER TABLE `opencoze`.`api_key` ADD COLUMN `ak_type` tinyint NOT NULL DEFAULT 0 COMMENT "api key type ";
|
||||
-- Modify "single_agent_draft" table
|
||||
ALTER TABLE `opencoze`.`single_agent_draft` ADD COLUMN `bot_mode` tinyint NOT NULL DEFAULT 0 COMMENT "bot mode,0:single mode 2:chatflow mode" AFTER `database_config`, ADD COLUMN `layout_info` text NULL COMMENT "chatflow layout info";
|
||||
-- Modify "single_agent_version" table
|
||||
ALTER TABLE `opencoze`.`single_agent_version` ADD COLUMN `bot_mode` tinyint NOT NULL DEFAULT 0 COMMENT "bot mode,0:single mode 2:chatflow mode" AFTER `database_config`, ADD COLUMN `layout_info` text NULL COMMENT "chatflow layout info";
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
h1:3ar6fnSw3e4ni74BcE2N9cIentO27OcfSt465WTv2Po=
|
||||
h1:0fORsjIF9mQvStz3ASawpFr3Nr3B5pqjDOgwMa7E1L4=
|
||||
20250703095335_initial.sql h1:/joaeUTMhXqAEc0KwsSve5+bYM0qPOp+9OizJtsRc+U=
|
||||
20250703115304_update.sql h1:cbYo6Q6Lh96hB4hu5KW2Nn/Mr0VDpg7a1WPgpIb1SOc=
|
||||
20250704040445_update.sql h1:QWmoPY//oQ+GFZwET9w/oAWa8mM0KVaD5G8Yiu9bMqY=
|
||||
@ -8,3 +8,5 @@ h1:3ar6fnSw3e4ni74BcE2N9cIentO27OcfSt465WTv2Po=
|
||||
20250717125913_update.sql h1:WtPR99RlWZn0rXZsB19qp1hq0FwO5qmFhcTcV6EnFYs=
|
||||
20250730131847_update.sql h1:qIutMrXtuOA98jeucTFxXck+sQNjNTtIF2apbCYt3IY=
|
||||
20250802115105_update.sql h1:irreQaMAL0LtXcDlkdHP86C7/0e2HzEVsa1hP/FkZ2M=
|
||||
20250812093734_update.sql h1:uEPpf0UlG97yiOlUvuo9X2NNh2r86JYPuHk6F0Ka7Ic=
|
||||
20250813081543_update.sql h1:z8sHxQrR8z+ajfBatitYpCMiTrRkd339b6dP0iz8y9Y=
|
||||
|
||||
@ -850,6 +850,100 @@ table "draft_database_info" {
|
||||
columns = [column.space_id, column.app_id, column.creator_id, column.deleted_at]
|
||||
}
|
||||
}
|
||||
table "files" {
|
||||
schema = schema.opencoze
|
||||
comment = "file resource table"
|
||||
collate = "utf8mb4_general_ci"
|
||||
column "id" {
|
||||
null = false
|
||||
type = bigint
|
||||
unsigned = true
|
||||
comment = "id"
|
||||
}
|
||||
column "name" {
|
||||
null = false
|
||||
type = varchar(255)
|
||||
default = ""
|
||||
comment = "file name"
|
||||
}
|
||||
column "file_size" {
|
||||
null = false
|
||||
type = bigint
|
||||
default = 0
|
||||
unsigned = true
|
||||
comment = "file size"
|
||||
}
|
||||
column "tos_uri" {
|
||||
null = false
|
||||
type = varchar(1024)
|
||||
default = ""
|
||||
comment = "TOS URI"
|
||||
}
|
||||
column "status" {
|
||||
null = false
|
||||
type = tinyint
|
||||
default = 0
|
||||
unsigned = true
|
||||
comment = "status,0invalid,1valid"
|
||||
}
|
||||
column "comment" {
|
||||
null = false
|
||||
type = varchar(1024)
|
||||
default = ""
|
||||
comment = "file comment"
|
||||
}
|
||||
column "source" {
|
||||
null = false
|
||||
type = tinyint
|
||||
default = 0
|
||||
unsigned = true
|
||||
comment = "source:1 from API,"
|
||||
}
|
||||
column "creator_id" {
|
||||
null = false
|
||||
type = varchar(512)
|
||||
default = ""
|
||||
comment = "creator id"
|
||||
}
|
||||
column "content_type" {
|
||||
null = false
|
||||
type = varchar(255)
|
||||
default = ""
|
||||
comment = "content type"
|
||||
}
|
||||
column "coze_account_id" {
|
||||
null = false
|
||||
type = bigint
|
||||
default = 0
|
||||
unsigned = true
|
||||
comment = "coze account id"
|
||||
}
|
||||
column "created_at" {
|
||||
null = false
|
||||
type = bigint
|
||||
default = 0
|
||||
unsigned = true
|
||||
comment = "Create Time in Milliseconds"
|
||||
}
|
||||
column "updated_at" {
|
||||
null = false
|
||||
type = bigint
|
||||
default = 0
|
||||
unsigned = true
|
||||
comment = "Update Time in Milliseconds"
|
||||
}
|
||||
column "deleted_at" {
|
||||
null = true
|
||||
type = datetime(3)
|
||||
comment = "Delete Time"
|
||||
}
|
||||
primary_key {
|
||||
columns = [column.id]
|
||||
}
|
||||
index "idx_creator_id" {
|
||||
columns = [column.creator_id]
|
||||
}
|
||||
}
|
||||
table "knowledge" {
|
||||
schema = schema.opencoze
|
||||
comment = "knowledge tabke"
|
||||
|
||||
@ -22,6 +22,8 @@ CREATE TABLE IF NOT EXISTS `conversation` (`id` bigint unsigned NOT NULL AUTO_IN
|
||||
CREATE TABLE IF NOT EXISTS `data_copy_task` (`master_task_id` varchar(128) NULL DEFAULT '' COMMENT 'task id', `origin_data_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'origin data id', `target_data_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'target data id', `origin_space_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'origin space id', `target_space_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'target space id', `origin_user_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'origin user id', `target_user_id` bigint unsigned NULL DEFAULT 0 COMMENT 'target user id', `origin_app_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'origin app id', `target_app_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'target app id', `data_type` tinyint unsigned NOT NULL DEFAULT 0 COMMENT 'data type 1:knowledge, 2:database', `ext_info` varchar(255) NOT NULL DEFAULT '' COMMENT 'ext', `start_time` bigint NULL DEFAULT 0 COMMENT 'task start time', `finish_time` bigint NULL COMMENT 'task finish time', `status` tinyint NOT NULL DEFAULT 1 COMMENT '1: Create 2: Running 3: Success 4: Failure', `error_msg` varchar(128) NULL COMMENT 'error msg', `id` bigint unsigned NOT NULL AUTO_INCREMENT COMMENT 'ID', PRIMARY KEY (`id`), UNIQUE INDEX `uniq_master_task_id_origin_data_id_data_type` (`master_task_id`, `origin_data_id`, `data_type`)) ENGINE=InnoDB CHARSET utf8mb4 COLLATE utf8mb4_general_ci COMMENT 'data copy task record';
|
||||
-- Create 'draft_database_info' table
|
||||
CREATE TABLE IF NOT EXISTS `draft_database_info` (`id` bigint unsigned NOT NULL COMMENT 'ID', `app_id` bigint unsigned NULL COMMENT 'App ID', `space_id` bigint unsigned NOT NULL COMMENT 'Space ID', `related_online_id` bigint unsigned NOT NULL COMMENT 'The primary key ID of online_database_info table', `is_visible` tinyint NOT NULL DEFAULT 1 COMMENT 'Visibility: 0 invisible, 1 visible', `prompt_disabled` tinyint NOT NULL DEFAULT 0 COMMENT 'Support prompt calls: 1 not supported, 0 supported', `table_name` varchar(255) NOT NULL COMMENT 'Table name', `table_desc` varchar(256) NULL COMMENT 'Table description', `table_field` text NULL COMMENT 'Table field info', `creator_id` bigint NOT NULL DEFAULT 0 COMMENT 'Creator ID', `icon_uri` varchar(255) NOT NULL COMMENT 'Icon Uri', `physical_table_name` varchar(255) NULL COMMENT 'The name of the real physical table', `rw_mode` bigint NOT NULL DEFAULT 1 COMMENT 'Read and write permission modes: 1. Limited read and write mode 2. Read-only mode 3. Full read and write mode', `created_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Create Time in Milliseconds', `updated_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Update Time in Milliseconds', `deleted_at` datetime NULL COMMENT 'Delete Time', PRIMARY KEY (`id`), INDEX `idx_space_app_creator_deleted` (`space_id`, `app_id`, `creator_id`, `deleted_at`)) ENGINE=InnoDB CHARSET utf8mb4 COLLATE utf8mb4_general_ci COMMENT 'draft database info';
|
||||
-- Create 'files' table
|
||||
CREATE TABLE IF NOT EXISTS `files` (`id` bigint unsigned NOT NULL COMMENT 'id', `name` varchar(255) NOT NULL DEFAULT '' COMMENT 'file name', `file_size` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'file size', `tos_uri` varchar(1024) NOT NULL DEFAULT '' COMMENT 'TOS URI', `status` tinyint unsigned NOT NULL DEFAULT 0 COMMENT 'status,0invalid,1valid', `comment` varchar(1024) NOT NULL DEFAULT '' COMMENT 'file comment', `source` tinyint unsigned NOT NULL DEFAULT 0 COMMENT 'source:1 from API,', `creator_id` varchar(512) NOT NULL DEFAULT '' COMMENT 'creator id', `content_type` varchar(255) NOT NULL DEFAULT '' COMMENT 'content type', `coze_account_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'coze account id', `created_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Create Time in Milliseconds', `updated_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Update Time in Milliseconds', `deleted_at` datetime(3) NULL COMMENT 'Delete Time', PRIMARY KEY (`id`), INDEX `idx_creator_id` (`creator_id`)) ENGINE=InnoDB CHARSET utf8mb4 COLLATE utf8mb4_general_ci COMMENT 'file resource table';
|
||||
-- Create 'knowledge' table
|
||||
CREATE TABLE IF NOT EXISTS `knowledge` (`id` bigint unsigned NOT NULL COMMENT 'id', `name` varchar(150) NOT NULL DEFAULT '' COMMENT 'knowledge_s name', `app_id` bigint NOT NULL DEFAULT 0 COMMENT 'app id', `creator_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'creator id', `space_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'space id', `created_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Create Time in Milliseconds', `updated_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Update Time in Milliseconds', `deleted_at` datetime(3) NULL COMMENT 'Delete Time', `status` tinyint NOT NULL DEFAULT 1 COMMENT '0 initialization, 1 effective, 2 invalid', `description` text NULL COMMENT 'description', `icon_uri` varchar(150) NULL COMMENT 'icon uri', `format_type` tinyint NOT NULL DEFAULT 0 COMMENT '0: Text 1: Table 2: Images', PRIMARY KEY (`id`), INDEX `idx_app_id` (`app_id`), INDEX `idx_creator_id` (`creator_id`), INDEX `idx_space_id_deleted_at_updated_at` (`space_id`, `deleted_at`, `updated_at`)) ENGINE=InnoDB CHARSET utf8mb4 COLLATE utf8mb4_unicode_ci COMMENT 'knowledge tabke';
|
||||
-- Create 'knowledge_document' table
|
||||
|
||||
@ -22,6 +22,8 @@ CREATE TABLE IF NOT EXISTS `conversation` (`id` bigint unsigned NOT NULL AUTO_IN
|
||||
CREATE TABLE IF NOT EXISTS `data_copy_task` (`master_task_id` varchar(128) NULL DEFAULT '' COMMENT 'task id', `origin_data_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'origin data id', `target_data_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'target data id', `origin_space_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'origin space id', `target_space_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'target space id', `origin_user_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'origin user id', `target_user_id` bigint unsigned NULL DEFAULT 0 COMMENT 'target user id', `origin_app_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'origin app id', `target_app_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'target app id', `data_type` tinyint unsigned NOT NULL DEFAULT 0 COMMENT 'data type 1:knowledge, 2:database', `ext_info` varchar(255) NOT NULL DEFAULT '' COMMENT 'ext', `start_time` bigint NULL DEFAULT 0 COMMENT 'task start time', `finish_time` bigint NULL COMMENT 'task finish time', `status` tinyint NOT NULL DEFAULT 1 COMMENT '1: Create 2: Running 3: Success 4: Failure', `error_msg` varchar(128) NULL COMMENT 'error msg', `id` bigint unsigned NOT NULL AUTO_INCREMENT COMMENT 'ID', PRIMARY KEY (`id`), UNIQUE INDEX `uniq_master_task_id_origin_data_id_data_type` (`master_task_id`, `origin_data_id`, `data_type`)) ENGINE=InnoDB CHARSET utf8mb4 COLLATE utf8mb4_general_ci COMMENT 'data copy task record';
|
||||
-- Create 'draft_database_info' table
|
||||
CREATE TABLE IF NOT EXISTS `draft_database_info` (`id` bigint unsigned NOT NULL COMMENT 'ID', `app_id` bigint unsigned NULL COMMENT 'App ID', `space_id` bigint unsigned NOT NULL COMMENT 'Space ID', `related_online_id` bigint unsigned NOT NULL COMMENT 'The primary key ID of online_database_info table', `is_visible` tinyint NOT NULL DEFAULT 1 COMMENT 'Visibility: 0 invisible, 1 visible', `prompt_disabled` tinyint NOT NULL DEFAULT 0 COMMENT 'Support prompt calls: 1 not supported, 0 supported', `table_name` varchar(255) NOT NULL COMMENT 'Table name', `table_desc` varchar(256) NULL COMMENT 'Table description', `table_field` text NULL COMMENT 'Table field info', `creator_id` bigint NOT NULL DEFAULT 0 COMMENT 'Creator ID', `icon_uri` varchar(255) NOT NULL COMMENT 'Icon Uri', `physical_table_name` varchar(255) NULL COMMENT 'The name of the real physical table', `rw_mode` bigint NOT NULL DEFAULT 1 COMMENT 'Read and write permission modes: 1. Limited read and write mode 2. Read-only mode 3. Full read and write mode', `created_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Create Time in Milliseconds', `updated_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Update Time in Milliseconds', `deleted_at` datetime NULL COMMENT 'Delete Time', PRIMARY KEY (`id`), INDEX `idx_space_app_creator_deleted` (`space_id`, `app_id`, `creator_id`, `deleted_at`)) ENGINE=InnoDB CHARSET utf8mb4 COLLATE utf8mb4_general_ci COMMENT 'draft database info';
|
||||
-- Create 'files' table
|
||||
CREATE TABLE IF NOT EXISTS `files` (`id` bigint unsigned NOT NULL COMMENT 'id', `name` varchar(255) NOT NULL DEFAULT '' COMMENT 'file name', `file_size` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'file size', `tos_uri` varchar(1024) NOT NULL DEFAULT '' COMMENT 'TOS URI', `status` tinyint unsigned NOT NULL DEFAULT 0 COMMENT 'status,0invalid,1valid', `comment` varchar(1024) NOT NULL DEFAULT '' COMMENT 'file comment', `source` tinyint unsigned NOT NULL DEFAULT 0 COMMENT 'source:1 from API,', `creator_id` varchar(512) NOT NULL DEFAULT '' COMMENT 'creator id', `content_type` varchar(255) NOT NULL DEFAULT '' COMMENT 'content type', `coze_account_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'coze account id', `created_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Create Time in Milliseconds', `updated_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Update Time in Milliseconds', `deleted_at` datetime(3) NULL COMMENT 'Delete Time', PRIMARY KEY (`id`), INDEX `idx_creator_id` (`creator_id`)) ENGINE=InnoDB CHARSET utf8mb4 COLLATE utf8mb4_general_ci COMMENT 'file resource table';
|
||||
-- Create 'knowledge' table
|
||||
CREATE TABLE IF NOT EXISTS `knowledge` (`id` bigint unsigned NOT NULL COMMENT 'id', `name` varchar(150) NOT NULL DEFAULT '' COMMENT 'knowledge_s name', `app_id` bigint NOT NULL DEFAULT 0 COMMENT 'app id', `creator_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'creator id', `space_id` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'space id', `created_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Create Time in Milliseconds', `updated_at` bigint unsigned NOT NULL DEFAULT 0 COMMENT 'Update Time in Milliseconds', `deleted_at` datetime(3) NULL COMMENT 'Delete Time', `status` tinyint NOT NULL DEFAULT 1 COMMENT '0 initialization, 1 effective, 2 invalid', `description` text NULL COMMENT 'description', `icon_uri` varchar(150) NULL COMMENT 'icon uri', `format_type` tinyint NOT NULL DEFAULT 0 COMMENT '0: Text 1: Table 2: Images', PRIMARY KEY (`id`), INDEX `idx_app_id` (`app_id`), INDEX `idx_creator_id` (`creator_id`), INDEX `idx_space_id_deleted_at_updated_at` (`space_id`, `deleted_at`, `updated_at`)) ENGINE=InnoDB CHARSET utf8mb4 COLLATE utf8mb4_unicode_ci COMMENT 'knowledge tabke';
|
||||
-- Create 'knowledge_document' table
|
||||
|
||||
@ -32,11 +32,12 @@ struct UploadFileOpenResponse {
|
||||
}
|
||||
|
||||
struct File{
|
||||
1: string URI (api.body = "uri"), // 文件URI
|
||||
2: i64 Bytes (api.body = "bytes"), // 文件字节数
|
||||
3: i64 CreatedAt (agw.key = "created_at"), // 上传时间戳,单位s
|
||||
4: string FileName (api.body = "file_name"), // 文件名
|
||||
1: string URI (api.body = "uri"), // URI
|
||||
2: i64 Bytes (api.body = "bytes"), // bytes
|
||||
3: i64 CreatedAt (agw.key = "created_at"), // create at
|
||||
4: string FileName (api.body = "file_name"), // file name
|
||||
5: string URL (api.body = "url")
|
||||
6: string ID (api.body = "id")
|
||||
}
|
||||
|
||||
// resp
|
||||
@ -46,9 +47,42 @@ struct GetBotOnlineInfoResp {
|
||||
3: required bot_common.OpenAPIBotInfo data
|
||||
}
|
||||
|
||||
struct WorkspacePermission {
|
||||
1: list<string> workspace_id_list
|
||||
2: list<string> permission_list
|
||||
}
|
||||
|
||||
struct AccountPermission {
|
||||
1: list<string> permission_list
|
||||
}
|
||||
|
||||
struct Scope {
|
||||
1: WorkspacePermission workspace_permission
|
||||
2: AccountPermission account_permission
|
||||
}
|
||||
|
||||
struct ImpersonateCozeUserRequest {
|
||||
1: i64 duration_seconds
|
||||
2: Scope scope
|
||||
}
|
||||
|
||||
struct ImpersonateCozeUserResponse {
|
||||
1: required i32 code
|
||||
2: required string msg
|
||||
3: ImpersonateCozeUserResponseData data
|
||||
}
|
||||
|
||||
struct ImpersonateCozeUserResponseData {
|
||||
1: required string access_token
|
||||
2: required i64 expires_in
|
||||
3: required string token_type
|
||||
}
|
||||
|
||||
service BotOpenApiService {
|
||||
OauthAuthorizationCodeResp OauthAuthorizationCode(1: OauthAuthorizationCodeReq request)(api.get='/api/oauth/authorization_code', api.category="oauth", api.gen_path="oauth")
|
||||
|
||||
ImpersonateCozeUserResponse ImpersonateCozeUser (1: ImpersonateCozeUserRequest request) (api.post="/api/permission_api/coze_web_app/impersonate_coze_user")
|
||||
|
||||
//openapi
|
||||
GetBotOnlineInfoResp GetBotOnlineInfo(1: GetBotOnlineInfoReq request)(api.get='/v1/bot/get_online_info', api.category="bot", api.tag="openapi", api.gen_path="personal_api")
|
||||
// File related OpenAPI
|
||||
|
||||
@ -447,13 +447,6 @@ struct GetFileUrlsResponse {
|
||||
255: base.BaseResp BaseResp
|
||||
}
|
||||
|
||||
struct File{
|
||||
1: string URI (api.body = "uri"), // File URI
|
||||
2: i64 Bytes (api.body = "bytes"), // file bytes
|
||||
3: i64 CreatedAt (agw.key = "created_at"), // Upload timestamp in s
|
||||
4: string FileName (api.body = "file_name"), // file name
|
||||
5: string URL (api.body = "url")
|
||||
}
|
||||
|
||||
|
||||
service PlaygroundService {
|
||||
|
||||
Reference in New Issue
Block a user