Compare commits

..

13 Commits

29 changed files with 3595 additions and 159 deletions

View File

@ -5,6 +5,7 @@ on:
branches:
- "main"
- "deploy/dev"
- "fix/redis-slow-in-gevent"
release:
types: [published]

View File

@ -154,7 +154,7 @@ Dify 是一个开源的 LLM 应用开发平台。其直观的界面结合了 AI
我们提供[ Dify 云服务](https://dify.ai),任何人都可以零设置尝试。它提供了自部署版本的所有功能,并在沙盒计划中包含 200 次免费的 GPT-4 调用。
- **自托管 Dify 社区版</br>**
使用这个[入门指南](#quick-start)快速在您的环境中运行 Dify。
使用这个[入门指南](#快速启动)快速在您的环境中运行 Dify。
使用我们的[文档](https://docs.dify.ai)进行进一步的参考和更深入的说明。
- **面向企业/组织的 Dify</br>**

View File

@ -34,6 +34,11 @@ class RedisConfig(BaseSettings):
default=0,
)
REDIS_MAX_CONNECTIONS: PositiveInt = Field(
description="Maximum number of connections to Redis",
default=200,
)
REDIS_USE_SSL: bool = Field(
description="Enable SSL/TLS for the Redis connection",
default=False,

View File

@ -105,6 +105,8 @@ class ChatMessageListApi(Resource):
if rest_count > 0:
has_more = True
history_messages = list(reversed(history_messages))
return InfiniteScrollPagination(data=history_messages, limit=args["limit"], has_more=has_more)

View File

@ -226,13 +226,11 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
is_first_conversation = True
# init generate records
(conversation, message) = self._init_generate_records(application_generate_entity, conversation)
if is_first_conversation:
# update conversation features
conversation.override_model_configs = workflow.features
db.session.commit()
db.session.refresh(conversation)
(conversation, message) = self._init_generate_records(
application_generate_entity=application_generate_entity,
conversation=conversation,
override_model_configs=workflow.features_dict if is_first_conversation else None,
)
# init queue manager
queue_manager = MessageBasedAppQueueManager(

View File

@ -1,4 +1,5 @@
import logging
import time
from collections.abc import Mapping
from typing import Any, cast
@ -101,6 +102,9 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
):
return
# trace start time
start_time = time.perf_counter()
# Init conversation variables
stmt = select(ConversationVariable).where(
ConversationVariable.app_id == self.conversation.app_id,
@ -128,6 +132,13 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
conversation_dialogue_count = self.conversation.dialogue_count
db.session.commit()
# trace end time
end_time = time.perf_counter()
print(f"conversation_dialogue_count time: {end_time - start_time}")
# trace start time
start_time = time.perf_counter()
# Create a variable pool.
system_inputs = {
SystemVariableKey.QUERY: query,
@ -151,6 +162,10 @@ class AdvancedChatAppRunner(WorkflowBasedAppRunner):
# init graph
graph = self._init_graph(graph_config=workflow.graph_dict)
# trace end time
end_time = time.perf_counter()
print(f"init graph time: {end_time - start_time}")
db.session.close()
# RUN WORKFLOW

View File

@ -15,7 +15,6 @@ from core.app.entities.queue_entities import (
QueuePingEvent,
QueueStopEvent,
)
from extensions.ext_redis import redis_client
class PublishFrom(Enum):
@ -32,10 +31,10 @@ class AppQueueManager:
self._user_id = user_id
self._invoke_from = invoke_from
user_prefix = "account" if self._invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} else "end-user"
redis_client.setex(
AppQueueManager._generate_task_belong_cache_key(self._task_id), 1800, f"{user_prefix}-{self._user_id}"
)
# user_prefix = "account" if self._invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} else "end-user"
# redis_client.setex(
# AppQueueManager._generate_task_belong_cache_key(self._task_id), 1800, f"{user_prefix}-{self._user_id}"
# )
q = queue.Queue()
@ -114,26 +113,27 @@ class AppQueueManager:
Set task stop flag
:return:
"""
result = redis_client.get(cls._generate_task_belong_cache_key(task_id))
if result is None:
return
return
# result = redis_client.get(cls._generate_task_belong_cache_key(task_id))
# if result is None:
# return
user_prefix = "account" if invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} else "end-user"
if result.decode("utf-8") != f"{user_prefix}-{user_id}":
return
# user_prefix = "account" if invoke_from in {InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER} else "end-user"
# if result.decode("utf-8") != f"{user_prefix}-{user_id}":
# return
stopped_cache_key = cls._generate_stopped_cache_key(task_id)
redis_client.setex(stopped_cache_key, 600, 1)
# stopped_cache_key = cls._generate_stopped_cache_key(task_id)
# redis_client.setex(stopped_cache_key, 600, 1)
def _is_stopped(self) -> bool:
"""
Check if task is stopped
:return:
"""
stopped_cache_key = AppQueueManager._generate_stopped_cache_key(self._task_id)
result = redis_client.get(stopped_cache_key)
if result is not None:
return True
# stopped_cache_key = AppQueueManager._generate_stopped_cache_key(self._task_id)
# result = redis_client.get(stopped_cache_key)
# if result is not None:
# return True
return False

View File

@ -1,8 +1,9 @@
import json
import logging
from collections.abc import Generator
import uuid
from collections.abc import Generator, Mapping
from datetime import datetime, timezone
from typing import Optional, Union
from typing import Any, Optional, Union
from sqlalchemy import and_
@ -137,6 +138,7 @@ class MessageBasedAppGenerator(BaseAppGenerator):
AdvancedChatAppGenerateEntity,
],
conversation: Optional[Conversation] = None,
override_model_configs: Optional[Mapping[str, Any]] = None,
) -> tuple[Conversation, Message]:
"""
Initialize generate records
@ -158,14 +160,12 @@ class MessageBasedAppGenerator(BaseAppGenerator):
if isinstance(application_generate_entity, AdvancedChatAppGenerateEntity):
app_model_config_id = None
override_model_configs = None
model_provider = None
model_id = None
else:
app_model_config_id = app_config.app_model_config_id
model_provider = application_generate_entity.model_conf.provider
model_id = application_generate_entity.model_conf.model
override_model_configs = None
if app_config.app_model_config_from == EasyUIBasedAppModelConfigFrom.ARGS and app_config.app_mode in {
AppMode.AGENT_CHAT,
AppMode.CHAT,
@ -177,61 +177,63 @@ class MessageBasedAppGenerator(BaseAppGenerator):
introduction = self._get_conversation_introduction(application_generate_entity)
if not conversation:
conversation = Conversation(
app_id=app_config.app_id,
app_model_config_id=app_model_config_id,
model_provider=model_provider,
model_id=model_id,
override_model_configs=json.dumps(override_model_configs) if override_model_configs else None,
mode=app_config.app_mode.value,
name="New conversation",
inputs=application_generate_entity.inputs,
introduction=introduction,
system_instruction="",
system_instruction_tokens=0,
status="normal",
invoke_from=application_generate_entity.invoke_from.value,
from_source=from_source,
from_end_user_id=end_user_id,
from_account_id=account_id,
)
with db.Session(bind=db.engine, expire_on_commit=False) as session:
conversation = Conversation()
conversation.id = str(uuid.uuid4())
conversation.app_id = app_config.app_id
conversation.app_model_config_id = app_model_config_id
conversation.model_provider = model_provider
conversation.model_id = model_id
conversation.override_model_configs = (
json.dumps(override_model_configs) if override_model_configs else None
)
conversation.mode = app_config.app_mode.value
conversation.name = "New conversation"
conversation.inputs = application_generate_entity.inputs
conversation.introduction = introduction
conversation.system_instruction = ""
conversation.system_instruction_tokens = 0
conversation.status = "normal"
conversation.invoke_from = application_generate_entity.invoke_from.value
conversation.from_source = from_source
conversation.from_end_user_id = end_user_id
conversation.from_account_id = account_id
db.session.add(conversation)
db.session.commit()
db.session.refresh(conversation)
session.add(conversation)
session.commit()
session.refresh(conversation)
else:
conversation.updated_at = datetime.now(timezone.utc).replace(tzinfo=None)
db.session.commit()
message = Message(
app_id=app_config.app_id,
model_provider=model_provider,
model_id=model_id,
override_model_configs=json.dumps(override_model_configs) if override_model_configs else None,
conversation_id=conversation.id,
inputs=application_generate_entity.inputs,
query=application_generate_entity.query or "",
message="",
message_tokens=0,
message_unit_price=0,
message_price_unit=0,
answer="",
answer_tokens=0,
answer_unit_price=0,
answer_price_unit=0,
parent_message_id=getattr(application_generate_entity, "parent_message_id", None),
provider_response_latency=0,
total_price=0,
currency="USD",
invoke_from=application_generate_entity.invoke_from.value,
from_source=from_source,
from_end_user_id=end_user_id,
from_account_id=account_id,
)
with db.Session(bind=db.engine, expire_on_commit=False) as session:
message = Message()
message.app_id = app_config.app_id
message.model_provider = model_provider
message.model_id = model_id
message.override_model_configs = json.dumps(override_model_configs) if override_model_configs else None
message.conversation_id = conversation.id
message.inputs = application_generate_entity.inputs
message.query = application_generate_entity.query or ""
message.message = ""
message.message_tokens = 0
message.message_unit_price = 0
message.answer = ""
message.answer_tokens = 0
message.answer_unit_price = 0
message.answer_price_unit = 0
message.parent_message_id = getattr(application_generate_entity, "parent_message_id", None)
message.provider_response_latency = 0
message.total_price = 0
message.currency = "USD"
message.invoke_from = application_generate_entity.invoke_from.value
message.from_source = from_source
message.from_end_user_id = end_user_id
message.from_account_id = account_id
db.session.add(message)
db.session.commit()
db.session.refresh(message)
session.add(message)
session.commit()
session.refresh(message)
for file in application_generate_entity.files:
message_file = MessageFile(

View File

@ -977,6 +977,9 @@ class Message(db.Model):
config=FileExtraConfig(),
)
elif message_file.transfer_method == "tool_file":
if message_file.upload_file_id is None:
assert message_file.url is not None
message_file.upload_file_id = message_file.url.split("/")[-1].split(".")[0]
mapping = {
"id": message_file.id,
"type": message_file.type,
@ -1001,6 +1004,7 @@ class Message(db.Model):
for (file, message_file) in zip(files, message_files)
]
db.session.commit()
return result
@property

View File

@ -16,7 +16,7 @@ services:
-c 'maintenance_work_mem=${POSTGRES_MAINTENANCE_WORK_MEM:-64MB}'
-c 'effective_cache_size=${POSTGRES_EFFECTIVE_CACHE_SIZE:-4096MB}'
volumes:
- ./volumes/db/data:/var/lib/postgresql/data
- ${PGDATA_HOST_VOLUME:-./volumes/db/data}:/var/lib/postgresql/data
ports:
- "${EXPOSE_POSTGRES_PORT:-5432}:5432"
healthcheck:
@ -31,7 +31,7 @@ services:
restart: always
volumes:
# Mount the redis data directory to the container.
- ./volumes/redis/data:/data
- ${REDIS_HOST_VOLUME:-./volumes/redis/data}:/data
# Set the redis password when startup redis server.
command: redis-server --requirepass difyai123456
ports:
@ -94,7 +94,7 @@ services:
restart: always
volumes:
# Mount the Weaviate data directory to the container.
- ./volumes/weaviate:/var/lib/weaviate
- ${WEAVIATE_HOST_VOLUME:-./volumes/weaviate}:/var/lib/weaviate
env_file:
- ./middleware.env
environment:

View File

@ -8,6 +8,7 @@ POSTGRES_PASSWORD=difyai123456
POSTGRES_DB=dify
# postgres data directory
PGDATA=/var/lib/postgresql/data/pgdata
PGDATA_HOST_VOLUME=./volumes/db/data
# Maximum number of connections to the database
# Default is 100
@ -39,6 +40,11 @@ POSTGRES_MAINTENANCE_WORK_MEM=64MB
# Reference: https://www.postgresql.org/docs/current/runtime-config-query.html#GUC-EFFECTIVE-CACHE-SIZE
POSTGRES_EFFECTIVE_CACHE_SIZE=4096MB
# -----------------------------
# Environment Variables for redis Service
REDIS_HOST_VOLUME=./volumes/redis/data
# -----------------------------
# ------------------------------
# Environment Variables for sandbox Service
SANDBOX_API_KEY=dify-sandbox
@ -70,6 +76,7 @@ WEAVIATE_AUTHENTICATION_APIKEY_ALLOWED_KEYS=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
WEAVIATE_AUTHENTICATION_APIKEY_USERS=hello@dify.ai
WEAVIATE_AUTHORIZATION_ADMINLIST_ENABLED=true
WEAVIATE_AUTHORIZATION_ADMINLIST_USERS=hello@dify.ai
WEAVIATE_HOST_VOLUME=./volumes/weaviate
# ------------------------------
# Docker Compose Service Expose Host Port Configurations

0
web/__mocks__/mime.js Normal file
View File

View File

@ -7,8 +7,7 @@ import ConfigPrompt from '../../config-prompt'
import { languageMap } from '../../../../workflow/nodes/_base/components/editor/code-editor/index'
import { generateRuleCode } from '@/service/debug'
import type { CodeGenRes } from '@/service/debug'
import { ModelModeType } from '@/types/app'
import type { AppType, Model } from '@/types/app'
import { type AppType, type Model, ModelModeType } from '@/types/app'
import Modal from '@/app/components/base/modal'
import Button from '@/app/components/base/button'
import { Generator } from '@/app/components/base/icons/src/vender/other'
@ -16,6 +15,10 @@ import Toast from '@/app/components/base/toast'
import Loading from '@/app/components/base/loading'
import Confirm from '@/app/components/base/confirm'
import type { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import ModelIcon from '@/app/components/header/account-setting/model-provider-page/model-icon'
import ModelName from '@/app/components/header/account-setting/model-provider-page/model-name'
export type IGetCodeGeneratorResProps = {
mode: AppType
isShow: boolean
@ -31,9 +34,12 @@ export const GetCodeGeneratorResModal: FC<IGetCodeGeneratorResProps> = (
codeLanguages,
onClose,
onFinished,
},
) => {
const {
currentProvider,
currentModel,
} = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.textGeneration)
const { t } = useTranslation()
const [instruction, setInstruction] = React.useState<string>('')
const [isLoading, { setTrue: setLoadingTrue, setFalse: setLoadingFalse }] = useBoolean(false)
@ -51,9 +57,10 @@ export const GetCodeGeneratorResModal: FC<IGetCodeGeneratorResProps> = (
return true
}
const model: Model = {
provider: 'openai',
name: 'gpt-4o-mini',
provider: currentProvider?.provider || '',
name: currentModel?.model || '',
mode: ModelModeType.chat,
// This is a fixed parameter
completion_params: {
temperature: 0.7,
max_tokens: 0,
@ -112,6 +119,19 @@ export const GetCodeGeneratorResModal: FC<IGetCodeGeneratorResProps> = (
<div className={'leading-[28px] text-lg font-bold'}>{t('appDebug.codegen.title')}</div>
<div className='mt-1 text-[13px] font-normal text-gray-500'>{t('appDebug.codegen.description')}</div>
</div>
<div className='flex items-center'>
<ModelIcon
className='shrink-0 mr-1.5'
provider={currentProvider}
modelName={currentModel?.model}
/>
<ModelName
className='grow'
modelItem={currentModel!}
showMode
showFeatures
/>
</div>
<div className='mt-6'>
<div className='text-[0px]'>
<div className='mb-2 leading-5 text-sm font-medium text-gray-900'>{t('appDebug.codegen.instruction')}</div>

View File

@ -17,6 +17,7 @@ import { createContext, useContext } from 'use-context-selector'
import { useShallow } from 'zustand/react/shallow'
import { useTranslation } from 'react-i18next'
import { UUID_NIL } from '../../base/chat/constants'
import type { ChatItemInTree } from '../../base/chat/types'
import VarPanel from './var-panel'
import cn from '@/utils/classnames'
import type { FeedbackFunc, FeedbackType, IChatItem, SubmitAnnotationFunc } from '@/app/components/base/chat/chat/type'
@ -41,6 +42,7 @@ import { useAppContext } from '@/context/app-context'
import useTimestamp from '@/hooks/use-timestamp'
import Tooltip from '@/app/components/base/tooltip'
import { CopyIcon } from '@/app/components/base/copy-icon'
import { buildChatItemTree, getThreadMessages } from '@/app/components/base/chat/utils'
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
dayjs.extend(utc)
@ -139,6 +141,7 @@ function appendQAToChatList(newChatList: IChatItem[], item: any, conversationId:
})(),
parentMessageId: `question-${item.id}`,
})
const questionFiles = item.message_files?.filter((file: any) => file.belongs_to === 'user') || []
newChatList.push({
id: `question-${item.id}`,
@ -193,50 +196,66 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
currentLogModalActiveTab: state.currentLogModalActiveTab,
})))
const { t } = useTranslation()
const [items, setItems] = React.useState<IChatItem[]>([])
const fetchedMessages = useRef<ChatMessage[]>([])
const [hasMore, setHasMore] = useState(true)
const [varValues, setVarValues] = useState<Record<string, string>>({})
const fetchData = async () => {
const [allChatItems, setAllChatItems] = useState<IChatItem[]>([])
const [chatItemTree, setChatItemTree] = useState<ChatItemInTree[]>([])
const [threadChatItems, setThreadChatItems] = useState<IChatItem[]>([])
const fetchData = useCallback(async () => {
try {
if (!hasMore)
return
const params: ChatMessagesRequest = {
conversation_id: detail.id,
limit: 10,
}
if (items?.[0]?.id)
params.first_id = items?.[0]?.id.replace('question-', '')
if (allChatItems.at(-1)?.id)
params.first_id = allChatItems.at(-1)?.id.replace('question-', '')
const messageRes = await fetchChatMessages({
url: `/apps/${appDetail?.id}/chat-messages`,
params,
})
if (messageRes.data.length > 0) {
const varValues = messageRes.data[0].inputs
const varValues = messageRes.data.at(-1)!.inputs
setVarValues(varValues)
}
fetchedMessages.current = [...fetchedMessages.current, ...messageRes.data]
const newItems = getFormattedChatList(fetchedMessages.current, detail.id, timezone!, t('appLog.dateTimeFormat') as string)
setHasMore(messageRes.has_more)
const newAllChatItems = [
...getFormattedChatList(messageRes.data, detail.id, timezone!, t('appLog.dateTimeFormat') as string),
...allChatItems,
]
setAllChatItems(newAllChatItems)
let tree = buildChatItemTree(newAllChatItems)
if (messageRes.has_more === false && detail?.model_config?.configs?.introduction) {
newItems.unshift({
tree = [{
id: 'introduction',
isAnswer: true,
isOpeningStatement: true,
content: detail?.model_config?.configs?.introduction ?? 'hello',
feedbackDisabled: true,
})
children: tree,
}]
}
setItems(newItems)
setHasMore(messageRes.has_more)
setChatItemTree(tree)
setThreadChatItems(getThreadMessages(tree, newAllChatItems.at(-1)?.id))
}
catch (err) {
console.error(err)
}
}
}, [allChatItems, detail.id, hasMore, timezone, t, appDetail, detail?.model_config?.configs?.introduction])
const switchSibling = useCallback((siblingMessageId: string) => {
setThreadChatItems(getThreadMessages(chatItemTree, siblingMessageId))
}, [chatItemTree])
const handleAnnotationEdited = useCallback((query: string, answer: string, index: number) => {
setItems(items.map((item, i) => {
setAllChatItems(allChatItems.map((item, i) => {
if (i === index - 1) {
return {
...item,
@ -257,9 +276,9 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
}
return item
}))
}, [items])
}, [allChatItems])
const handleAnnotationAdded = useCallback((annotationId: string, authorName: string, query: string, answer: string, index: number) => {
setItems(items.map((item, i) => {
setAllChatItems(allChatItems.map((item, i) => {
if (i === index - 1) {
return {
...item,
@ -287,9 +306,9 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
}
return item
}))
}, [items])
}, [allChatItems])
const handleAnnotationRemoved = useCallback((index: number) => {
setItems(items.map((item, i) => {
setAllChatItems(allChatItems.map((item, i) => {
if (i === index) {
return {
...item,
@ -299,7 +318,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
}
return item
}))
}, [items])
}, [allChatItems])
const fetchInitiated = useRef(false)
@ -464,7 +483,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
siteInfo={null}
/>
</div>
: (items.length < 8 && !hasMore)
: threadChatItems.length < 8
? <div className="pt-4 mb-4">
<Chat
config={{
@ -478,7 +497,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
},
supportFeedback: true,
} as any}
chatList={items}
chatList={threadChatItems}
onAnnotationAdded={handleAnnotationAdded}
onAnnotationEdited={handleAnnotationEdited}
onAnnotationRemoved={handleAnnotationRemoved}
@ -487,6 +506,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
showPromptLog
hideProcessDetail
chatContainerInnerClassName='px-6'
switchSibling={switchSibling}
/>
</div>
: <div
@ -501,7 +521,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
{/* Put the scroll bar always on the bottom */}
<InfiniteScroll
scrollableTarget="scrollableDiv"
dataLength={items.length}
dataLength={threadChatItems.length}
next={fetchData}
hasMore={hasMore}
loader={<div className='text-center text-gray-400 text-xs'>{t('appLog.detail.loading')}...</div>}
@ -532,7 +552,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
},
supportFeedback: true,
} as any}
chatList={items}
chatList={threadChatItems}
onAnnotationAdded={handleAnnotationAdded}
onAnnotationEdited={handleAnnotationEdited}
onAnnotationRemoved={handleAnnotationRemoved}
@ -541,6 +561,7 @@ function DetailPanel({ detail, onFeedback }: IDetailPanel) {
showPromptLog
hideProcessDetail
chatContainerInnerClassName='px-6'
switchSibling={switchSibling}
/>
</InfiniteScroll>
</div>

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,42 @@
[
{
"id": "question-1",
"isAnswer": false,
"parentMessageId": null
},
{
"id": "1",
"isAnswer": true,
"parentMessageId": "question-1"
},
{
"id": "question-2",
"isAnswer": false,
"parentMessageId": "1"
},
{
"id": "2",
"isAnswer": true,
"parentMessageId": "question-2"
},
{
"id": "question-3",
"isAnswer": false,
"parentMessageId": "2"
},
{
"id": "3",
"isAnswer": true,
"parentMessageId": "question-3"
},
{
"id": "question-4",
"isAnswer": false,
"parentMessageId": "1"
},
{
"id": "4",
"isAnswer": true,
"parentMessageId": "question-4"
}
]

View File

@ -0,0 +1,42 @@
[
{
"id": "question-1",
"isAnswer": false,
"parentMessageId": "00000000-0000-0000-0000-000000000000"
},
{
"id": "1",
"isAnswer": true,
"parentMessageId": "question-1"
},
{
"id": "question-2",
"isAnswer": false,
"parentMessageId": "00000000-0000-0000-0000-000000000000"
},
{
"id": "2",
"isAnswer": true,
"parentMessageId": "question-2"
},
{
"id": "question-3",
"isAnswer": false,
"parentMessageId": "00000000-0000-0000-0000-000000000000"
},
{
"id": "3",
"isAnswer": true,
"parentMessageId": "question-3"
},
{
"id": "question-4",
"isAnswer": false,
"parentMessageId": "00000000-0000-0000-0000-000000000000"
},
{
"id": "4",
"isAnswer": true,
"parentMessageId": "question-4"
}
]

View File

@ -0,0 +1,42 @@
[
{
"id": "question-1",
"isAnswer": false,
"parentMessageId": "00000000-0000-0000-0000-000000000000"
},
{
"id": "1",
"isAnswer": true,
"parentMessageId": "question-1"
},
{
"id": "question-2",
"isAnswer": false,
"parentMessageId": "00000000-0000-0000-0000-000000000000"
},
{
"id": "2",
"isAnswer": true,
"parentMessageId": "question-2"
},
{
"id": "question-3",
"isAnswer": false,
"parentMessageId": "2"
},
{
"id": "3",
"isAnswer": true,
"parentMessageId": "question-3"
},
{
"id": "question-4",
"isAnswer": false,
"parentMessageId": "1"
},
{
"id": "4",
"isAnswer": true,
"parentMessageId": "question-4"
}
]

View File

@ -0,0 +1,52 @@
[
{
"id": "question-1",
"isAnswer": false,
"parentMessageId": null
},
{
"id": "1",
"isAnswer": true,
"parentMessageId": "question-1"
},
{
"id": "question-2",
"isAnswer": false,
"parentMessageId": "1"
},
{
"id": "2",
"isAnswer": true,
"parentMessageId": "question-2"
},
{
"id": "question-3",
"isAnswer": false,
"parentMessageId": "2"
},
{
"id": "3",
"isAnswer": true,
"parentMessageId": "question-3"
},
{
"id": "question-4",
"isAnswer": false,
"parentMessageId": "1"
},
{
"id": "4",
"isAnswer": true,
"parentMessageId": "question-4"
},
{
"id": "question-5",
"isAnswer": false,
"parentMessageId": null
},
{
"id": "5",
"isAnswer": true,
"parentMessageId": "question-5"
}
]

View File

@ -0,0 +1,52 @@
[
{
"id": "question-1",
"isAnswer": false,
"parentMessageId": "00000000-0000-0000-0000-000000000000"
},
{
"id": "1",
"isAnswer": true,
"parentMessageId": "question-1"
},
{
"id": "question-2",
"isAnswer": false,
"parentMessageId": "00000000-0000-0000-0000-000000000000"
},
{
"id": "2",
"isAnswer": true,
"parentMessageId": "question-2"
},
{
"id": "question-3",
"isAnswer": false,
"parentMessageId": "00000000-0000-0000-0000-000000000000"
},
{
"id": "3",
"isAnswer": true,
"parentMessageId": "question-3"
},
{
"id": "question-4",
"isAnswer": false,
"parentMessageId": "1"
},
{
"id": "4",
"isAnswer": true,
"parentMessageId": "question-4"
},
{
"id": "question-5",
"isAnswer": false,
"parentMessageId": null
},
{
"id": "5",
"isAnswer": true,
"parentMessageId": "question-5"
}
]

View File

@ -0,0 +1,441 @@
[
{
"id": "question-ff4c2b43-48a5-47ad-9dc5-08b34ddba61b",
"content": "Let's play a game, I say a number , and you response me with another bigger, yet random-looking number. I'll start first, 38",
"isAnswer": false,
"message_files": []
},
{
"id": "ff4c2b43-48a5-47ad-9dc5-08b34ddba61b",
"content": "Sure, I'll play! My number is 57. Your turn!",
"agent_thoughts": [
{
"id": "f9d7ff7c-3a3b-4d9a-a289-657817f4caff",
"chain_id": null,
"message_id": "ff4c2b43-48a5-47ad-9dc5-08b34ddba61b",
"position": 1,
"thought": "Sure, I'll play! My number is 57. Your turn!",
"tool": "",
"tool_labels": {},
"tool_input": "",
"created_at": 1726105791,
"observation": "",
"files": []
}
],
"feedbackDisabled": false,
"isAnswer": true,
"message_files": [],
"log": [
{
"role": "user",
"text": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38",
"files": []
},
{
"role": "assistant",
"text": "Sure, I'll play! My number is 57. Your turn!",
"files": []
}
],
"workflow_run_id": null,
"conversationId": "dd6c9cfd-2656-48ec-bd51-2139c1790d80",
"input": {
"inputs": {},
"query": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38"
},
"more": {
"time": "09/11/2024 09:49 PM",
"tokens": 49,
"latency": "1.56"
},
"parentMessageId": "question-ff4c2b43-48a5-47ad-9dc5-08b34ddba61b"
},
{
"id": "question-73bbad14-d915-499d-87bf-0df14d40779d",
"content": "58",
"isAnswer": false,
"message_files": [],
"parentMessageId": "ff4c2b43-48a5-47ad-9dc5-08b34ddba61b"
},
{
"id": "73bbad14-d915-499d-87bf-0df14d40779d",
"content": "I choose 83. What's your next number?",
"agent_thoughts": [
{
"id": "f61a3fce-37ac-4f9d-9935-95f97e598dfe",
"chain_id": null,
"message_id": "73bbad14-d915-499d-87bf-0df14d40779d",
"position": 1,
"thought": "I choose 83. What's your next number?",
"tool": "",
"tool_labels": {},
"tool_input": "",
"created_at": 1726105795,
"observation": "",
"files": []
}
],
"feedbackDisabled": false,
"isAnswer": true,
"message_files": [],
"log": [
{
"role": "user",
"text": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38",
"files": []
},
{
"role": "assistant",
"text": "Sure, I'll play! My number is 57. Your turn!",
"files": []
},
{
"role": "user",
"text": "58",
"files": []
},
{
"role": "assistant",
"text": "I choose 83. What's your next number?",
"files": []
}
],
"workflow_run_id": null,
"conversationId": "dd6c9cfd-2656-48ec-bd51-2139c1790d80",
"input": {
"inputs": {},
"query": "58"
},
"more": {
"time": "09/11/2024 09:49 PM",
"tokens": 68,
"latency": "1.33"
},
"parentMessageId": "question-73bbad14-d915-499d-87bf-0df14d40779d"
},
{
"id": "question-4c5d0841-1206-463e-95d8-71f812877658",
"content": "99",
"isAnswer": false,
"message_files": [],
"parentMessageId": "73bbad14-d915-499d-87bf-0df14d40779d"
},
{
"id": "4c5d0841-1206-463e-95d8-71f812877658",
"content": "I'll go with 112. Your turn!",
"agent_thoughts": [
{
"id": "9730d587-9268-4683-9dd9-91a1cab9510b",
"chain_id": null,
"message_id": "4c5d0841-1206-463e-95d8-71f812877658",
"position": 1,
"thought": "I'll go with 112. Your turn!",
"tool": "",
"tool_labels": {},
"tool_input": "",
"created_at": 1726105799,
"observation": "",
"files": []
}
],
"feedbackDisabled": false,
"isAnswer": true,
"message_files": [],
"log": [
{
"role": "user",
"text": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38",
"files": []
},
{
"role": "assistant",
"text": "Sure, I'll play! My number is 57. Your turn!",
"files": []
},
{
"role": "user",
"text": "58",
"files": []
},
{
"role": "assistant",
"text": "I choose 83. What's your next number?",
"files": []
},
{
"role": "user",
"text": "99",
"files": []
},
{
"role": "assistant",
"text": "I'll go with 112. Your turn!",
"files": []
}
],
"workflow_run_id": null,
"conversationId": "dd6c9cfd-2656-48ec-bd51-2139c1790d80",
"input": {
"inputs": {},
"query": "99"
},
"more": {
"time": "09/11/2024 09:50 PM",
"tokens": 86,
"latency": "1.49"
},
"parentMessageId": "question-4c5d0841-1206-463e-95d8-71f812877658"
},
{
"id": "question-cd5affb0-7bc2-4a6f-be7e-25e74595c9dd",
"content": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38",
"isAnswer": false,
"message_files": []
},
{
"id": "cd5affb0-7bc2-4a6f-be7e-25e74595c9dd",
"content": "Sure! My number is 54. Your turn!",
"agent_thoughts": [
{
"id": "1019cd79-d141-4f9f-880a-fc1441cfd802",
"chain_id": null,
"message_id": "cd5affb0-7bc2-4a6f-be7e-25e74595c9dd",
"position": 1,
"thought": "Sure! My number is 54. Your turn!",
"tool": "",
"tool_labels": {},
"tool_input": "",
"created_at": 1726105809,
"observation": "",
"files": []
}
],
"feedbackDisabled": false,
"isAnswer": true,
"message_files": [],
"log": [
{
"role": "user",
"text": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38",
"files": []
},
{
"role": "assistant",
"text": "Sure! My number is 54. Your turn!",
"files": []
}
],
"workflow_run_id": null,
"conversationId": "dd6c9cfd-2656-48ec-bd51-2139c1790d80",
"input": {
"inputs": {},
"query": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38"
},
"more": {
"time": "09/11/2024 09:50 PM",
"tokens": 46,
"latency": "1.52"
},
"parentMessageId": "question-cd5affb0-7bc2-4a6f-be7e-25e74595c9dd"
},
{
"id": "question-324bce32-c98c-435d-a66b-bac974ebb5ed",
"content": "3306",
"isAnswer": false,
"message_files": [],
"parentMessageId": "cd5affb0-7bc2-4a6f-be7e-25e74595c9dd"
},
{
"id": "324bce32-c98c-435d-a66b-bac974ebb5ed",
"content": "My number is 4729. Your turn!",
"agent_thoughts": [
{
"id": "0773bec7-b992-4a53-92b2-20ebaeae8798",
"chain_id": null,
"message_id": "324bce32-c98c-435d-a66b-bac974ebb5ed",
"position": 1,
"thought": "My number is 4729. Your turn!",
"tool": "",
"tool_labels": {},
"tool_input": "",
"created_at": 1726105822,
"observation": "",
"files": []
}
],
"feedbackDisabled": false,
"isAnswer": true,
"message_files": [],
"log": [
{
"role": "user",
"text": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38",
"files": []
},
{
"role": "assistant",
"text": "Sure! My number is 54. Your turn!",
"files": []
},
{
"role": "user",
"text": "3306",
"files": []
},
{
"role": "assistant",
"text": "My number is 4729. Your turn!",
"files": []
}
],
"workflow_run_id": null,
"conversationId": "dd6c9cfd-2656-48ec-bd51-2139c1790d80",
"input": {
"inputs": {},
"query": "3306"
},
"more": {
"time": "09/11/2024 09:50 PM",
"tokens": 66,
"latency": "1.30"
},
"parentMessageId": "question-324bce32-c98c-435d-a66b-bac974ebb5ed"
},
{
"id": "question-684b5396-4e91-4043-88e9-aabe48b21acc",
"content": "3306",
"isAnswer": false,
"message_files": [],
"parentMessageId": "cd5affb0-7bc2-4a6f-be7e-25e74595c9dd"
},
{
"id": "684b5396-4e91-4043-88e9-aabe48b21acc",
"content": "My number is 4821. Your turn!",
"agent_thoughts": [
{
"id": "5ca650f3-982c-4399-8b95-9ea241c76707",
"chain_id": null,
"message_id": "684b5396-4e91-4043-88e9-aabe48b21acc",
"position": 1,
"thought": "My number is 4821. Your turn!",
"tool": "",
"tool_labels": {},
"tool_input": "",
"created_at": 1726107812,
"observation": "",
"files": []
}
],
"feedbackDisabled": false,
"isAnswer": true,
"message_files": [],
"log": [
{
"role": "user",
"text": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38",
"files": []
},
{
"role": "assistant",
"text": "Sure! My number is 54. Your turn!",
"files": []
},
{
"role": "user",
"text": "3306",
"files": []
},
{
"role": "assistant",
"text": "My number is 4821. Your turn!",
"files": []
}
],
"workflow_run_id": null,
"conversationId": "dd6c9cfd-2656-48ec-bd51-2139c1790d80",
"input": {
"inputs": {},
"query": "3306"
},
"more": {
"time": "09/11/2024 10:23 PM",
"tokens": 66,
"latency": "1.48"
},
"parentMessageId": "question-684b5396-4e91-4043-88e9-aabe48b21acc"
},
{
"id": "question-19904a7b-7494-4ed8-b72c-1d18668cea8c",
"content": "1003",
"isAnswer": false,
"message_files": [],
"parentMessageId": "684b5396-4e91-4043-88e9-aabe48b21acc"
},
{
"id": "19904a7b-7494-4ed8-b72c-1d18668cea8c",
"content": "My number is 1456. Your turn!",
"agent_thoughts": [
{
"id": "095cacab-afad-4387-a41d-1662578b8b13",
"chain_id": null,
"message_id": "19904a7b-7494-4ed8-b72c-1d18668cea8c",
"position": 1,
"thought": "My number is 1456. Your turn!",
"tool": "",
"tool_labels": {},
"tool_input": "",
"created_at": 1726111024,
"observation": "",
"files": []
}
],
"feedbackDisabled": false,
"isAnswer": true,
"message_files": [],
"log": [
{
"role": "user",
"text": "Let's play a game, I say a number , and you response me with another bigger, yet randomly number. I'll start first, 38",
"files": []
},
{
"role": "assistant",
"text": "Sure! My number is 54. Your turn!",
"files": []
},
{
"role": "user",
"text": "3306",
"files": []
},
{
"role": "assistant",
"text": "My number is 4821. Your turn!",
"files": []
},
{
"role": "user",
"text": "1003",
"files": []
},
{
"role": "assistant",
"text": "My number is 1456. Your turn!",
"files": []
}
],
"workflow_run_id": null,
"conversationId": "dd6c9cfd-2656-48ec-bd51-2139c1790d80",
"input": {
"inputs": {},
"query": "1003"
},
"more": {
"time": "09/11/2024 11:17 PM",
"tokens": 86,
"latency": "1.38"
},
"parentMessageId": "question-19904a7b-7494-4ed8-b72c-1d18668cea8c"
}
]

View File

@ -0,0 +1,258 @@
import { get } from 'lodash'
import { buildChatItemTree, getThreadMessages } from '../utils'
import type { ChatItemInTree } from '../types'
import branchedTestMessages from './branchedTestMessages.json'
import legacyTestMessages from './legacyTestMessages.json'
import mixedTestMessages from './mixedTestMessages.json'
import multiRootNodesMessages from './multiRootNodesMessages.json'
import multiRootNodesWithLegacyTestMessages from './multiRootNodesWithLegacyTestMessages.json'
import realWorldMessages from './realWorldMessages.json'
function visitNode(tree: ChatItemInTree | ChatItemInTree[], path: string): ChatItemInTree {
return get(tree, path)
}
describe('build chat item tree and get thread messages', () => {
const tree1 = buildChatItemTree(branchedTestMessages as ChatItemInTree[])
it('should build chat item tree1', () => {
const a1 = visitNode(tree1, '0.children.0')
expect(a1.id).toBe('1')
expect(a1.children).toHaveLength(2)
const a2 = visitNode(a1, 'children.0.children.0')
expect(a2.id).toBe('2')
expect(a2.siblingIndex).toBe(0)
const a3 = visitNode(a2, 'children.0.children.0')
expect(a3.id).toBe('3')
const a4 = visitNode(a1, 'children.1.children.0')
expect(a4.id).toBe('4')
expect(a4.siblingIndex).toBe(1)
})
it('should get thread messages from tree1, using the last message as the target', () => {
const threadChatItems1_1 = getThreadMessages(tree1)
expect(threadChatItems1_1).toHaveLength(4)
const q1 = visitNode(threadChatItems1_1, '0')
const a1 = visitNode(threadChatItems1_1, '1')
const q4 = visitNode(threadChatItems1_1, '2')
const a4 = visitNode(threadChatItems1_1, '3')
expect(q1.id).toBe('question-1')
expect(a1.id).toBe('1')
expect(q4.id).toBe('question-4')
expect(a4.id).toBe('4')
expect(a4.siblingCount).toBe(2)
expect(a4.siblingIndex).toBe(1)
})
it('should get thread messages from tree1, using the message with id 3 as the target', () => {
const threadChatItems1_2 = getThreadMessages(tree1, '3')
expect(threadChatItems1_2).toHaveLength(6)
const q1 = visitNode(threadChatItems1_2, '0')
const a1 = visitNode(threadChatItems1_2, '1')
const q2 = visitNode(threadChatItems1_2, '2')
const a2 = visitNode(threadChatItems1_2, '3')
const q3 = visitNode(threadChatItems1_2, '4')
const a3 = visitNode(threadChatItems1_2, '5')
expect(q1.id).toBe('question-1')
expect(a1.id).toBe('1')
expect(q2.id).toBe('question-2')
expect(a2.id).toBe('2')
expect(q3.id).toBe('question-3')
expect(a3.id).toBe('3')
expect(a2.siblingCount).toBe(2)
expect(a2.siblingIndex).toBe(0)
})
const tree2 = buildChatItemTree(legacyTestMessages as ChatItemInTree[])
it('should work with legacy chat items', () => {
expect(tree2).toHaveLength(1)
const q1 = visitNode(tree2, '0')
const a1 = visitNode(q1, 'children.0')
const q2 = visitNode(a1, 'children.0')
const a2 = visitNode(q2, 'children.0')
const q3 = visitNode(a2, 'children.0')
const a3 = visitNode(q3, 'children.0')
const q4 = visitNode(a3, 'children.0')
const a4 = visitNode(q4, 'children.0')
expect(q1.id).toBe('question-1')
expect(a1.id).toBe('1')
expect(q2.id).toBe('question-2')
expect(a2.id).toBe('2')
expect(q3.id).toBe('question-3')
expect(a3.id).toBe('3')
expect(q4.id).toBe('question-4')
expect(a4.id).toBe('4')
})
it('should get thread messages from tree2, using the last message as the target', () => {
const threadMessages2 = getThreadMessages(tree2)
expect(threadMessages2).toHaveLength(8)
const q1 = visitNode(threadMessages2, '0')
const a1 = visitNode(threadMessages2, '1')
const q2 = visitNode(threadMessages2, '2')
const a2 = visitNode(threadMessages2, '3')
const q3 = visitNode(threadMessages2, '4')
const a3 = visitNode(threadMessages2, '5')
const q4 = visitNode(threadMessages2, '6')
const a4 = visitNode(threadMessages2, '7')
expect(q1.id).toBe('question-1')
expect(a1.id).toBe('1')
expect(q2.id).toBe('question-2')
expect(a2.id).toBe('2')
expect(q3.id).toBe('question-3')
expect(a3.id).toBe('3')
expect(q4.id).toBe('question-4')
expect(a4.id).toBe('4')
expect(a1.siblingCount).toBe(1)
expect(a1.siblingIndex).toBe(0)
expect(a2.siblingCount).toBe(1)
expect(a2.siblingIndex).toBe(0)
expect(a3.siblingCount).toBe(1)
expect(a3.siblingIndex).toBe(0)
expect(a4.siblingCount).toBe(1)
expect(a4.siblingIndex).toBe(0)
})
const tree3 = buildChatItemTree(mixedTestMessages as ChatItemInTree[])
it('should build mixed chat items tree', () => {
expect(tree3).toHaveLength(1)
const a1 = visitNode(tree3, '0.children.0')
expect(a1.id).toBe('1')
expect(a1.children).toHaveLength(2)
const a2 = visitNode(a1, 'children.0.children.0')
expect(a2.id).toBe('2')
expect(a2.siblingIndex).toBe(0)
const a3 = visitNode(a2, 'children.0.children.0')
expect(a3.id).toBe('3')
const a4 = visitNode(a1, 'children.1.children.0')
expect(a4.id).toBe('4')
expect(a4.siblingIndex).toBe(1)
})
it('should get thread messages from tree3, using the last message as the target', () => {
const threadMessages3_1 = getThreadMessages(tree3)
expect(threadMessages3_1).toHaveLength(4)
const q1 = visitNode(threadMessages3_1, '0')
const a1 = visitNode(threadMessages3_1, '1')
const q4 = visitNode(threadMessages3_1, '2')
const a4 = visitNode(threadMessages3_1, '3')
expect(q1.id).toBe('question-1')
expect(a1.id).toBe('1')
expect(q4.id).toBe('question-4')
expect(a4.id).toBe('4')
expect(a4.siblingCount).toBe(2)
expect(a4.siblingIndex).toBe(1)
})
it('should get thread messages from tree3, using the message with id 3 as the target', () => {
const threadMessages3_2 = getThreadMessages(tree3, '3')
expect(threadMessages3_2).toHaveLength(6)
const q1 = visitNode(threadMessages3_2, '0')
const a1 = visitNode(threadMessages3_2, '1')
const q2 = visitNode(threadMessages3_2, '2')
const a2 = visitNode(threadMessages3_2, '3')
const q3 = visitNode(threadMessages3_2, '4')
const a3 = visitNode(threadMessages3_2, '5')
expect(q1.id).toBe('question-1')
expect(a1.id).toBe('1')
expect(q2.id).toBe('question-2')
expect(a2.id).toBe('2')
expect(q3.id).toBe('question-3')
expect(a3.id).toBe('3')
expect(a2.siblingCount).toBe(2)
expect(a2.siblingIndex).toBe(0)
})
const tree4 = buildChatItemTree(multiRootNodesMessages as ChatItemInTree[])
it('should build multi root nodes chat items tree', () => {
expect(tree4).toHaveLength(2)
const a5 = visitNode(tree4, '1.children.0')
expect(a5.id).toBe('5')
expect(a5.siblingIndex).toBe(1)
})
it('should get thread messages from tree4, using the last message as the target', () => {
const threadMessages4 = getThreadMessages(tree4)
expect(threadMessages4).toHaveLength(2)
const a1 = visitNode(threadMessages4, '0.children.0')
expect(a1.id).toBe('5')
})
it('should get thread messages from tree4, using the message with id 2 as the target', () => {
const threadMessages4_1 = getThreadMessages(tree4, '2')
expect(threadMessages4_1).toHaveLength(6)
const a1 = visitNode(threadMessages4_1, '1')
expect(a1.id).toBe('1')
const a2 = visitNode(threadMessages4_1, '3')
expect(a2.id).toBe('2')
const a3 = visitNode(threadMessages4_1, '5')
expect(a3.id).toBe('3')
})
const tree5 = buildChatItemTree(multiRootNodesWithLegacyTestMessages as ChatItemInTree[])
it('should work with multi root nodes chat items with legacy chat items', () => {
expect(tree5).toHaveLength(2)
const q5 = visitNode(tree5, '1')
expect(q5.id).toBe('question-5')
expect(q5.parentMessageId).toBe(null)
const a5 = visitNode(q5, 'children.0')
expect(a5.id).toBe('5')
expect(a5.children).toHaveLength(0)
})
it('should get thread messages from tree5, using the last message as the target', () => {
const threadMessages5 = getThreadMessages(tree5)
expect(threadMessages5).toHaveLength(2)
const q5 = visitNode(threadMessages5, '0')
const a5 = visitNode(threadMessages5, '1')
expect(q5.id).toBe('question-5')
expect(a5.id).toBe('5')
expect(a5.siblingCount).toBe(2)
expect(a5.siblingIndex).toBe(1)
})
const tree6 = buildChatItemTree(realWorldMessages as ChatItemInTree[])
it('should work with real world messages', () => {
expect(tree6).toMatchSnapshot()
})
it ('should get thread messages from tree6, using the last message as target', () => {
const threadMessages6_1 = getThreadMessages(tree6)
expect(threadMessages6_1).toMatchSnapshot()
})
it ('should get thread messages from tree6, using specified message as target', () => {
const threadMessages6_2 = getThreadMessages(tree6, 'ff4c2b43-48a5-47ad-9dc5-08b34ddba61b')
expect(threadMessages6_2).toMatchSnapshot()
})
})

View File

@ -19,6 +19,7 @@ import Citation from '@/app/components/base/chat/chat/citation'
import { EditTitle } from '@/app/components/app/annotation/edit-annotation-modal/edit-item'
import type { AppData } from '@/models/share'
import AnswerIcon from '@/app/components/base/answer-icon'
import { ChevronRight } from '@/app/components/base/icons/src/vender/line/arrows'
import cn from '@/utils/classnames'
import { FileList } from '@/app/components/base/file-uploader'
@ -34,6 +35,7 @@ type AnswerProps = {
hideProcessDetail?: boolean
appData?: AppData
noChatInput?: boolean
switchSibling?: (siblingMessageId: string) => void
}
const Answer: FC<AnswerProps> = ({
item,
@ -47,6 +49,7 @@ const Answer: FC<AnswerProps> = ({
hideProcessDetail,
appData,
noChatInput,
switchSibling,
}) => {
const { t } = useTranslation()
const {
@ -203,6 +206,23 @@ const Answer: FC<AnswerProps> = ({
<Citation data={citation} showHitInfo={config?.supportCitationHitInfo} />
)
}
{item.siblingCount && item.siblingCount > 1 && item.siblingIndex !== undefined && <div className="pt-3.5 flex justify-center items-center text-sm">
<button
className={`${item.prevSibling ? 'opacity-100' : 'opacity-65'}`}
disabled={!item.prevSibling}
onClick={() => item.prevSibling && switchSibling?.(item.prevSibling)}
>
<ChevronRight className="w-[14px] h-[14px] rotate-180 text-gray-500" />
</button>
<span className="px-2 text-xs text-gray-700">{item.siblingIndex + 1} / {item.siblingCount}</span>
<button
className={`${item.nextSibling ? 'opacity-100' : 'opacity-65'}`}
disabled={!item.nextSibling}
onClick={() => item.nextSibling && switchSibling?.(item.nextSibling)}
>
<ChevronRight className="w-[14px] h-[14px] text-gray-500" />
</button>
</div>}
</div>
</div>
<More more={more} />

View File

@ -65,6 +65,7 @@ export type ChatProps = {
hideProcessDetail?: boolean
hideLogModal?: boolean
themeBuilder?: ThemeBuilder
switchSibling?: (siblingMessageId: string) => void
showFeatureBar?: boolean
showFileUpload?: boolean
onFeatureBarClick?: (state: boolean) => void
@ -100,6 +101,7 @@ const Chat: FC<ChatProps> = ({
hideProcessDetail,
hideLogModal,
themeBuilder,
switchSibling,
showFeatureBar,
showFileUpload,
onFeatureBarClick,
@ -232,6 +234,7 @@ const Chat: FC<ChatProps> = ({
chatAnswerContainerInner={chatAnswerContainerInner}
hideProcessDetail={hideProcessDetail}
noChatInput={noChatInput}
switchSibling={switchSibling}
/>
)
}

View File

@ -97,7 +97,11 @@ export type IChatItem = {
// for agent log
conversationId?: string
input?: any
parentMessageId?: string
parentMessageId?: string | null
siblingCount?: number
siblingIndex?: number
prevSibling?: string
nextSibling?: string
}
export type Metadata = {

View File

@ -65,6 +65,10 @@ export type ChatItem = IChatItem & {
allFiles?: FileEntity[]
}
export type ChatItemInTree = {
children?: ChatItemInTree[]
} & IChatItem
export type OnSend = (message: string, files?: FileEntity[], last_answer?: ChatItem | null) => void
export type OnRegenerate = (chatItem: ChatItem) => void

View File

@ -1,6 +1,7 @@
import { addFileInfos, sortAgentSorts } from '../../tools/utils'
import { UUID_NIL } from './constants'
import type { ChatItem } from './types'
import type { IChatItem } from './chat/type'
import type { ChatItem, ChatItemInTree } from './types'
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
async function decodeBase64AndDecompress(base64String: string) {
@ -81,8 +82,131 @@ function getPrevChatList(fetchedMessages: any[]) {
return ret.reverse()
}
function buildChatItemTree(allMessages: IChatItem[]): ChatItemInTree[] {
const map: Record<string, ChatItemInTree> = {}
const rootNodes: ChatItemInTree[] = []
const childrenCount: Record<string, number> = {}
let lastAppendedLegacyAnswer: ChatItemInTree | null = null
for (let i = 0; i < allMessages.length; i += 2) {
const question = allMessages[i]!
const answer = allMessages[i + 1]!
const isLegacy = question.parentMessageId === UUID_NIL
const parentMessageId = isLegacy
? (lastAppendedLegacyAnswer?.id || '')
: (question.parentMessageId || '')
// Process question
childrenCount[parentMessageId] = (childrenCount[parentMessageId] || 0) + 1
const questionNode: ChatItemInTree = {
...question,
children: [],
}
map[question.id] = questionNode
// Process answer
childrenCount[question.id] = 1
const answerNode: ChatItemInTree = {
...answer,
children: [],
siblingIndex: isLegacy ? 0 : childrenCount[parentMessageId] - 1,
}
map[answer.id] = answerNode
// Connect question and answer
questionNode.children!.push(answerNode)
// Append to parent or add to root
if (isLegacy) {
if (!lastAppendedLegacyAnswer)
rootNodes.push(questionNode)
else
lastAppendedLegacyAnswer.children!.push(questionNode)
lastAppendedLegacyAnswer = answerNode
}
else {
if (!parentMessageId)
rootNodes.push(questionNode)
else
map[parentMessageId]?.children!.push(questionNode)
}
}
return rootNodes
}
function getThreadMessages(tree: ChatItemInTree[], targetMessageId?: string): ChatItemInTree[] {
let ret: ChatItemInTree[] = []
let targetNode: ChatItemInTree | undefined
// find path to the target message
const stack = tree.toReversed().map(rootNode => ({
node: rootNode,
path: [rootNode],
}))
while (stack.length > 0) {
const { node, path } = stack.pop()!
if (
node.id === targetMessageId
|| (!targetMessageId && !node.children?.length && !stack.length) // if targetMessageId is not provided, we use the last message in the tree as the target
) {
targetNode = node
ret = path.map((item, index) => {
if (!item.isAnswer)
return item
const parentAnswer = path[index - 2]
const siblingCount = !parentAnswer ? tree.length : parentAnswer.children!.length
const prevSibling = !parentAnswer ? tree[item.siblingIndex! - 1]?.children?.[0]?.id : parentAnswer.children![item.siblingIndex! - 1]?.children?.[0].id
const nextSibling = !parentAnswer ? tree[item.siblingIndex! + 1]?.children?.[0]?.id : parentAnswer.children![item.siblingIndex! + 1]?.children?.[0].id
return { ...item, siblingCount, prevSibling, nextSibling }
})
break
}
if (node.children) {
for (let i = node.children.length - 1; i >= 0; i--) {
stack.push({
node: node.children[i],
path: [...path, node.children[i]],
})
}
}
}
// append all descendant messages to the path
if (targetNode) {
const stack = [targetNode]
while (stack.length > 0) {
const node = stack.pop()!
if (node !== targetNode)
ret.push(node)
if (node.children?.length) {
const lastChild = node.children.at(-1)!
if (!lastChild.isAnswer) {
stack.push(lastChild)
continue
}
const parentAnswer = ret.at(-2)
const siblingCount = parentAnswer?.children?.length
const prevSibling = parentAnswer?.children?.at(-2)?.children?.[0]?.id
stack.push({ ...lastChild, siblingCount, prevSibling })
}
}
}
return ret
}
export {
getProcessedInputsFromUrlParams,
getLastAnswer,
getPrevChatList,
getLastAnswer,
buildChatItemTree,
getThreadMessages,
}

View File

@ -12,59 +12,44 @@ import {
import { useWorkflowRun } from '../../hooks'
import UserInput from './user-input'
import Chat from '@/app/components/base/chat/chat'
import type { ChatItem } from '@/app/components/base/chat/types'
import type { ChatItem, ChatItemInTree } from '@/app/components/base/chat/types'
import { fetchConversationMessages } from '@/service/debug'
import { useStore as useAppStore } from '@/app/components/app/store'
import Loading from '@/app/components/base/loading'
import { UUID_NIL } from '@/app/components/base/chat/constants'
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
function appendQAToChatList(newChatList: ChatItem[], item: any) {
const answerFiles = item.message_files?.filter((file: any) => file.belongs_to === 'assistant') || []
newChatList.push({
id: item.id,
content: item.answer,
feedback: item.feedback,
isAnswer: true,
citation: item.metadata?.retriever_resources,
message_files: getProcessedFilesFromResponse(answerFiles.map((item: any) => ({ ...item, related_id: item.id }))),
workflow_run_id: item.workflow_run_id,
})
const questionFiles = item.message_files?.filter((file: any) => file.belongs_to === 'user') || []
newChatList.push({
id: `question-${item.id}`,
content: item.query,
isAnswer: false,
message_files: getProcessedFilesFromResponse(questionFiles.map((item: any) => ({ ...item, related_id: item.id }))),
})
}
import type { IChatItem } from '@/app/components/base/chat/chat/type'
import { buildChatItemTree, getThreadMessages } from '@/app/components/base/chat/utils'
function getFormattedChatList(messages: any[]) {
const newChatList: ChatItem[] = []
let nextMessageId = null
for (const item of messages) {
if (!item.parent_message_id) {
appendQAToChatList(newChatList, item)
break
}
if (!nextMessageId) {
appendQAToChatList(newChatList, item)
nextMessageId = item.parent_message_id
}
else {
if (item.id === nextMessageId || nextMessageId === UUID_NIL) {
appendQAToChatList(newChatList, item)
nextMessageId = item.parent_message_id
}
}
}
return newChatList.reverse()
const res: ChatItem[] = []
messages.forEach((item: any) => {
const questionFiles = item.message_files?.filter((file: any) => file.belongs_to === 'user') || []
res.push({
id: `question-${item.id}`,
content: item.query,
isAnswer: false,
message_files: getProcessedFilesFromResponse(questionFiles.map((item: any) => ({ ...item, related_id: item.id }))),
parentMessageId: item.parent_message_id || undefined,
})
const answerFiles = item.message_files?.filter((file: any) => file.belongs_to === 'assistant') || []
res.push({
id: item.id,
content: item.answer,
feedback: item.feedback,
isAnswer: true,
citation: item.metadata?.retriever_resources,
message_files: getProcessedFilesFromResponse(answerFiles.map((item: any) => ({ ...item, related_id: item.id }))),
workflow_run_id: item.workflow_run_id,
parentMessageId: `question-${item.id}`,
})
})
return res
}
const ChatRecord = () => {
const [fetched, setFetched] = useState(false)
const [chatList, setChatList] = useState<ChatItem[]>([])
const [chatItemTree, setChatItemTree] = useState<ChatItemInTree[]>([])
const [threadChatItems, setThreadChatItems] = useState<IChatItem[]>([])
const appDetail = useAppStore(s => s.appDetail)
const workflowStore = useWorkflowStore()
const { handleLoadBackupDraft } = useWorkflowRun()
@ -76,20 +61,29 @@ const ChatRecord = () => {
try {
setFetched(false)
const res = await fetchConversationMessages(appDetail.id, currentConversationID)
setChatList(getFormattedChatList((res as any).data))
const newAllChatItems = getFormattedChatList((res as any).data)
const tree = buildChatItemTree(newAllChatItems)
setChatItemTree(tree)
setThreadChatItems(getThreadMessages(tree, newAllChatItems.at(-1)?.id))
}
catch (e) {
console.error(e)
}
finally {
setFetched(true)
}
}
}, [appDetail, currentConversationID])
useEffect(() => {
handleFetchConversationMessages()
}, [currentConversationID, appDetail, handleFetchConversationMessages])
const switchSibling = useCallback((siblingMessageId: string) => {
setThreadChatItems(getThreadMessages(chatItemTree, siblingMessageId))
}, [chatItemTree])
return (
<div
className={`
@ -123,7 +117,7 @@ const ChatRecord = () => {
config={{
supportCitationHitInfo: true,
} as any}
chatList={chatList}
chatList={threadChatItems}
chatContainerClassName='px-3'
chatContainerInnerClassName='pt-6 w-full max-w-full mx-auto'
chatFooterClassName='px-4 rounded-b-2xl'
@ -132,6 +126,7 @@ const ChatRecord = () => {
noChatInput
allToolIcons={{}}
showPromptLog
switchSibling={switchSibling}
noSpacing
chatAnswerContainerInner='!pr-2'
/>

View File

@ -99,6 +99,7 @@ const config: Config = {
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
moduleNameMapper: {
'^@/components/(.*)$': '<rootDir>/components/$1',
'^lodash-es$': 'lodash',
},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader