fix: chat generation render

This commit is contained in:
zxhlyh
2026-01-27 13:36:22 +08:00
parent 74f94633d7
commit 53bc060cea
12 changed files with 201 additions and 120 deletions

View File

@ -12,7 +12,7 @@ import {
} from 'react'
import { useStore as useAppStore } from '@/app/components/app/store'
import Chat from '@/app/components/base/chat/chat'
import { buildChatItemTree, buildToolCallsFromHistorySequence, getThreadMessages } from '@/app/components/base/chat/utils'
import { buildChatItemTree, buildLLMGenerationItemsFromHistorySequence, getThreadMessages } from '@/app/components/base/chat/utils'
import { getProcessedFilesFromResponse } from '@/app/components/base/file-uploader/utils'
import Loading from '@/app/components/base/loading'
import { fetchConversationMessages } from '@/service/debug'
@ -38,8 +38,8 @@ function getFormattedChatList(messages: ChatMessageRes[]) {
const answerFiles = item.message_files?.filter((file: any) => file.belongs_to === 'assistant') || []
res.push({
id: item.id,
content: buildToolCallsFromHistorySequence(item).message,
toolCalls: buildToolCallsFromHistorySequence(item).toolCalls,
content: buildLLMGenerationItemsFromHistorySequence(item).message,
llmGenerationItems: buildLLMGenerationItemsFromHistorySequence(item).llmGenerationItems,
feedback: item.feedback,
isAnswer: true,
citation: item.metadata?.retriever_resources,

View File

@ -158,14 +158,37 @@ export function useChatMessageSender({
}) => {
if (!isCurrentRun())
return
if (chunk_type === 'text')
if (chunk_type === 'text') {
responseItem.content = responseItem.content + message
if (!responseItem.llmGenerationItems)
responseItem.llmGenerationItems = []
const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
if (isNotCompletedTextItemIndex > -1) {
responseItem.llmGenerationItems![isNotCompletedTextItemIndex].text += message
}
else {
toolCallId = uuidV4()
responseItem.llmGenerationItems?.push({
id: toolCallId,
type: 'text',
text: message,
})
}
}
if (chunk_type === 'tool_call') {
if (!responseItem.toolCalls)
responseItem.toolCalls = []
if (!responseItem.llmGenerationItems)
responseItem.llmGenerationItems = []
const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
if (isNotCompletedTextItemIndex > -1) {
responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
}
toolCallId = uuidV4()
responseItem.toolCalls?.push({
responseItem.llmGenerationItems?.push({
id: toolCallId,
type: 'tool',
toolName: tool_name,
@ -176,21 +199,26 @@ export function useChatMessageSender({
}
if (chunk_type === 'tool_result') {
const currentToolCallIndex = responseItem.toolCalls?.findIndex(item => item.id === toolCallId) ?? -1
const currentToolCallIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === toolCallId) ?? -1
if (currentToolCallIndex > -1) {
responseItem.toolCalls![currentToolCallIndex].toolError = tool_error
responseItem.toolCalls![currentToolCallIndex].toolDuration = tool_elapsed_time
responseItem.toolCalls![currentToolCallIndex].toolFiles = tool_files
responseItem.toolCalls![currentToolCallIndex].toolOutput = message
responseItem.llmGenerationItems![currentToolCallIndex].toolError = tool_error
responseItem.llmGenerationItems![currentToolCallIndex].toolDuration = tool_elapsed_time
responseItem.llmGenerationItems![currentToolCallIndex].toolFiles = tool_files
responseItem.llmGenerationItems![currentToolCallIndex].toolOutput = message
}
}
if (chunk_type === 'thought_start') {
if (!responseItem.toolCalls)
responseItem.toolCalls = []
if (!responseItem.llmGenerationItems)
responseItem.llmGenerationItems = []
const isNotCompletedTextItemIndex = responseItem.llmGenerationItems?.findIndex(item => item.type === 'text' && !item.textCompleted)
if (isNotCompletedTextItemIndex > -1) {
responseItem.llmGenerationItems![isNotCompletedTextItemIndex].textCompleted = true
}
thoughtId = uuidV4()
responseItem.toolCalls.push({
responseItem.llmGenerationItems?.push({
id: thoughtId,
type: 'thought',
thoughtOutput: '',
@ -198,17 +226,17 @@ export function useChatMessageSender({
}
if (chunk_type === 'thought') {
const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
}
}
if (chunk_type === 'thought_end') {
const currentThoughtIndex = responseItem.toolCalls?.findIndex(item => item.id === thoughtId) ?? -1
const currentThoughtIndex = responseItem.llmGenerationItems?.findIndex(item => item.id === thoughtId) ?? -1
if (currentThoughtIndex > -1) {
responseItem.toolCalls![currentThoughtIndex].thoughtOutput += message
responseItem.toolCalls![currentThoughtIndex].thoughtCompleted = true
responseItem.llmGenerationItems![currentThoughtIndex].thoughtOutput += message
responseItem.llmGenerationItems![currentThoughtIndex].thoughtCompleted = true
}
}
@ -245,6 +273,10 @@ export function useChatMessageSender({
if (errorMessage) {
responseItem.content = errorMessage
responseItem.isError = true
responseItem.llmGenerationItems?.forEach((item) => {
if (item.type === 'text')
item.isError = true
})
updateCurrentQAOnTree({
placeholderQuestionId,
questionItem,

View File

@ -2,8 +2,8 @@
import type { FC } from 'react'
import type {
LLMGenerationItem,
LLMTraceItem,
ToolCallItem,
} from '@/types/workflow'
import {
RiArrowLeftLine,
@ -63,7 +63,7 @@ const LLMResultPanel: FC<Props> = ({
<div className="space-y-1 p-2">
{
formattedList.map((item, index) => (
<ToolCallItemComponent key={index} payload={item as ToolCallItem} />
<ToolCallItemComponent key={index} payload={item as LLMGenerationItem} />
))
}
</div>

View File

@ -1,4 +1,4 @@
import type { ToolCallItem } from '@/types/workflow'
import type { LLMGenerationItem } from '@/types/workflow'
import {
RiArrowDownSLine,
} from '@remixicon/react'
@ -6,6 +6,7 @@ import { useState } from 'react'
import { useTranslation } from 'react-i18next'
import AppIcon from '@/app/components/base/app-icon'
import { Thinking } from '@/app/components/base/icons/src/vender/workflow'
import { Markdown } from '@/app/components/base/markdown'
import BlockIcon from '@/app/components/workflow/block-icon'
import CodeEditor from '@/app/components/workflow/nodes/_base/components/editor/code-editor'
import { CodeLanguage } from '@/app/components/workflow/nodes/code/types'
@ -14,7 +15,7 @@ import { cn } from '@/utils/classnames'
type ToolCallItemComponentProps = {
className?: string
payload: ToolCallItem
payload: LLMGenerationItem
}
const ToolCallItemComponent = ({
className,
@ -22,6 +23,19 @@ const ToolCallItemComponent = ({
}: ToolCallItemComponentProps) => {
const { t } = useTranslation()
const [expand, setExpand] = useState(false)
if (payload.type === 'text') {
return (
<Markdown
className={cn(
'px-2',
payload.isError && '!text-[#F04438]',
)}
content={payload.text ?? ''}
/>
)
}
return (
<div
className={cn('rounded-[10px] border-[0.5px] border-components-panel-border bg-background-default-subtle px-2 pb-1 pt-2 shadow-xs', className)}