Merge branch 'main' into feat/memory-orchestration-fed

This commit is contained in:
zxhlyh
2025-10-11 14:08:09 +08:00
717 changed files with 13706 additions and 7996 deletions

View File

@ -73,7 +73,7 @@ const Tool: FC<Props> = ({
if (isHovering && !isAllSelected) {
return (
<span className='system-xs-regular text-components-button-secondary-accent-text'
onClick={(e) => {
onClick={() => {
onSelectMultiple?.(BlockEnum.Tool, actions.filter(action => !getIsDisabled(action)).map((tool) => {
const params: Record<string, string> = {}
if (tool.parameters) {

View File

@ -35,8 +35,6 @@ export const NODE_LAYOUT_HORIZONTAL_PADDING = 60
export const NODE_LAYOUT_VERTICAL_PADDING = 60
export const NODE_LAYOUT_MIN_DISTANCE = 100
export const PARALLEL_DEPTH_LIMIT = 3
export const RETRIEVAL_OUTPUT_STRUCT = `{
"content": "",
"title": "",

View File

@ -22,4 +22,3 @@ export * from './use-DSL'
export * from './use-inspect-vars-crud'
export * from './use-set-workflow-vars-with-value'
export * from './use-workflow-search'
export * from './use-format-time-from-now'

View File

@ -1,12 +0,0 @@
import dayjs from 'dayjs'
import { useCallback } from 'react'
import { useI18N } from '@/context/i18n'
export const useFormatTimeFromNow = () => {
const { locale } = useI18N()
const formatTimeFromNow = useCallback((time: number) => {
return dayjs(time).locale(locale === 'zh-Hans' ? 'zh-cn' : locale).fromNow()
}, [locale])
return { formatTimeFromNow }
}

View File

@ -70,7 +70,7 @@ export const useNodesInteractions = () => {
const reactflow = useReactFlow()
const { store: workflowHistoryStore } = useWorkflowHistoryStore()
const { handleSyncWorkflowDraft } = useNodesSyncDraft()
const { checkNestedParallelLimit, getAfterNodesInSameBranch } = useWorkflow()
const { getAfterNodesInSameBranch } = useWorkflow()
const { getNodesReadOnly } = useNodesReadOnly()
const { getWorkflowReadOnly } = useWorkflowReadOnly()
const { handleSetHelpline } = useHelpline()
@ -436,21 +436,13 @@ export const useNodesInteractions = () => {
draft.push(newEdge)
})
if (checkNestedParallelLimit(newNodes, newEdges, targetNode)) {
setNodes(newNodes)
setEdges(newEdges)
setNodes(newNodes)
setEdges(newEdges)
handleSyncWorkflowDraft()
saveStateToHistory(WorkflowHistoryEvent.NodeConnect, {
nodeId: targetNode?.id,
})
}
else {
const { setConnectingNodePayload, setEnteringNodePayload }
= workflowStore.getState()
setConnectingNodePayload(undefined)
setEnteringNodePayload(undefined)
}
handleSyncWorkflowDraft()
saveStateToHistory(WorkflowHistoryEvent.NodeConnect, {
nodeId: targetNode?.id,
})
},
[
getNodesReadOnly,
@ -458,7 +450,6 @@ export const useNodesInteractions = () => {
workflowStore,
handleSyncWorkflowDraft,
saveStateToHistory,
checkNestedParallelLimit,
],
)
@ -934,13 +925,8 @@ export const useNodesInteractions = () => {
if (newEdge) draft.push(newEdge)
})
if (checkNestedParallelLimit(newNodes, newEdges, prevNode)) {
setNodes(newNodes)
setEdges(newEdges)
}
else {
return false
}
setNodes(newNodes)
setEdges(newEdges)
}
if (!prevNodeId && nextNodeId) {
const nextNodeIndex = nodes.findIndex(node => node.id === nextNodeId)
@ -1087,17 +1073,11 @@ export const useNodesInteractions = () => {
draft.push(newEdge)
})
if (checkNestedParallelLimit(newNodes, newEdges, nextNode)) {
setNodes(newNodes)
setEdges(newEdges)
}
else {
return false
}
setNodes(newNodes)
setEdges(newEdges)
}
else {
if (checkNestedParallelLimit(newNodes, edges)) setNodes(newNodes)
else return false
setNodes(newNodes)
}
}
if (prevNodeId && nextNodeId) {
@ -1297,7 +1277,6 @@ export const useNodesInteractions = () => {
saveStateToHistory,
workflowStore,
getAfterNodesInSameBranch,
checkNestedParallelLimit,
nodesMetaDataMap,
],
)

View File

@ -107,7 +107,8 @@ export const useShortcuts = (): void => {
const { showDebugAndPreviewPanel } = workflowStore.getState()
if (shouldHandleShortcut(e) && !showDebugAndPreviewPanel) {
e.preventDefault()
workflowHistoryShortcutsEnabled && handleHistoryBack()
if (workflowHistoryShortcutsEnabled)
handleHistoryBack()
}
}, { exactMatch: true, useCapture: true })
@ -116,7 +117,8 @@ export const useShortcuts = (): void => {
(e) => {
if (shouldHandleShortcut(e)) {
e.preventDefault()
workflowHistoryShortcutsEnabled && handleHistoryForward()
if (workflowHistoryShortcutsEnabled)
handleHistoryForward()
}
},
{ exactMatch: true, useCapture: true },

View File

@ -16,39 +16,41 @@ import type { WorkflowHistoryEventMeta } from '../workflow-history-store'
* - InputChange events in Node Panels do not trigger state changes.
* - Resizing UI elements does not trigger state changes.
*/
export enum WorkflowHistoryEvent {
NodeTitleChange = 'NodeTitleChange',
NodeDescriptionChange = 'NodeDescriptionChange',
NodeDragStop = 'NodeDragStop',
NodeChange = 'NodeChange',
NodeConnect = 'NodeConnect',
NodePaste = 'NodePaste',
NodeDelete = 'NodeDelete',
EdgeDelete = 'EdgeDelete',
EdgeDeleteByDeleteBranch = 'EdgeDeleteByDeleteBranch',
NodeAdd = 'NodeAdd',
NodeResize = 'NodeResize',
NoteAdd = 'NoteAdd',
NoteChange = 'NoteChange',
NoteDelete = 'NoteDelete',
LayoutOrganize = 'LayoutOrganize',
}
export const WorkflowHistoryEvent = {
NodeTitleChange: 'NodeTitleChange',
NodeDescriptionChange: 'NodeDescriptionChange',
NodeDragStop: 'NodeDragStop',
NodeChange: 'NodeChange',
NodeConnect: 'NodeConnect',
NodePaste: 'NodePaste',
NodeDelete: 'NodeDelete',
EdgeDelete: 'EdgeDelete',
EdgeDeleteByDeleteBranch: 'EdgeDeleteByDeleteBranch',
NodeAdd: 'NodeAdd',
NodeResize: 'NodeResize',
NoteAdd: 'NoteAdd',
NoteChange: 'NoteChange',
NoteDelete: 'NoteDelete',
LayoutOrganize: 'LayoutOrganize',
} as const
export type WorkflowHistoryEventT = keyof typeof WorkflowHistoryEvent
export const useWorkflowHistory = () => {
const store = useStoreApi()
const { store: workflowHistoryStore } = useWorkflowHistoryStore()
const { t } = useTranslation()
const [undoCallbacks, setUndoCallbacks] = useState<any[]>([])
const [redoCallbacks, setRedoCallbacks] = useState<any[]>([])
const [undoCallbacks, setUndoCallbacks] = useState<(() => void)[]>([])
const [redoCallbacks, setRedoCallbacks] = useState<(() => void)[]>([])
const onUndo = useCallback((callback: unknown) => {
setUndoCallbacks((prev: any) => [...prev, callback])
const onUndo = useCallback((callback: () => void) => {
setUndoCallbacks(prev => [...prev, callback])
return () => setUndoCallbacks(prev => prev.filter(cb => cb !== callback))
}, [])
const onRedo = useCallback((callback: unknown) => {
setRedoCallbacks((prev: any) => [...prev, callback])
const onRedo = useCallback((callback: () => void) => {
setRedoCallbacks(prev => [...prev, callback])
return () => setRedoCallbacks(prev => prev.filter(cb => cb !== callback))
}, [])
@ -65,7 +67,7 @@ export const useWorkflowHistory = () => {
// Some events may be triggered multiple times in a short period of time.
// We debounce the history state update to avoid creating multiple history states
// with minimal changes.
const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => {
const saveStateToHistoryRef = useRef(debounce((event: WorkflowHistoryEventT, meta?: WorkflowHistoryEventMeta) => {
workflowHistoryStore.setState({
workflowHistoryEvent: event,
workflowHistoryEventMeta: meta,
@ -74,7 +76,7 @@ export const useWorkflowHistory = () => {
})
}, 500))
const saveStateToHistory = useCallback((event: WorkflowHistoryEvent, meta?: WorkflowHistoryEventMeta) => {
const saveStateToHistory = useCallback((event: WorkflowHistoryEventT, meta?: WorkflowHistoryEventMeta) => {
switch (event) {
case WorkflowHistoryEvent.NoteChange:
// Hint: Note change does not trigger when note text changes,
@ -105,7 +107,7 @@ export const useWorkflowHistory = () => {
}
}, [])
const getHistoryLabel = useCallback((event: WorkflowHistoryEvent) => {
const getHistoryLabel = useCallback((event: WorkflowHistoryEventT) => {
switch (event) {
case WorkflowHistoryEvent.NodeTitleChange:
return t('workflow.changeHistory.nodeTitleChange')

View File

@ -10,7 +10,7 @@ import {
NODE_LAYOUT_VERTICAL_PADDING,
WORKFLOW_DATA_UPDATE,
} from '../constants'
import type { Node, WorkflowDataUpdater } from '../types'
import type { WorkflowDataUpdater } from '../types'
import { BlockEnum, ControlMode } from '../types'
import {
getLayoutByDagre,
@ -18,6 +18,7 @@ import {
initialEdges,
initialNodes,
} from '../utils'
import type { LayoutResult } from '../utils'
import {
useNodesReadOnly,
useSelectionInteractions,
@ -102,10 +103,17 @@ export const useWorkflowOrganize = () => {
&& node.type === CUSTOM_NODE,
)
const childLayoutsMap: Record<string, any> = {}
loopAndIterationNodes.forEach((node) => {
childLayoutsMap[node.id] = getLayoutForChildNodes(node.id, nodes, edges)
})
const childLayoutEntries = await Promise.all(
loopAndIterationNodes.map(async node => [
node.id,
await getLayoutForChildNodes(node.id, nodes, edges),
] as const),
)
const childLayoutsMap = childLayoutEntries.reduce((acc, [nodeId, layout]) => {
if (layout)
acc[nodeId] = layout
return acc
}, {} as Record<string, LayoutResult>)
const containerSizeChanges: Record<string, { width: number, height: number }> = {}
@ -113,37 +121,20 @@ export const useWorkflowOrganize = () => {
const childLayout = childLayoutsMap[parentNode.id]
if (!childLayout) return
let minX = Infinity
let minY = Infinity
let maxX = -Infinity
let maxY = -Infinity
let hasChildren = false
const {
bounds,
nodes: layoutNodes,
} = childLayout
const childNodes = nodes.filter(node => node.parentId === parentNode.id)
if (!layoutNodes.size)
return
childNodes.forEach((node) => {
if (childLayout.node(node.id)) {
hasChildren = true
const childNodeWithPosition = childLayout.node(node.id)
const requiredWidth = (bounds.maxX - bounds.minX) + NODE_LAYOUT_HORIZONTAL_PADDING * 2
const requiredHeight = (bounds.maxY - bounds.minY) + NODE_LAYOUT_VERTICAL_PADDING * 2
const nodeX = childNodeWithPosition.x - node.width! / 2
const nodeY = childNodeWithPosition.y - node.height! / 2
minX = Math.min(minX, nodeX)
minY = Math.min(minY, nodeY)
maxX = Math.max(maxX, nodeX + node.width!)
maxY = Math.max(maxY, nodeY + node.height!)
}
})
if (hasChildren) {
const requiredWidth = maxX - minX + NODE_LAYOUT_HORIZONTAL_PADDING * 2
const requiredHeight = maxY - minY + NODE_LAYOUT_VERTICAL_PADDING * 2
containerSizeChanges[parentNode.id] = {
width: Math.max(parentNode.width || 0, requiredWidth),
height: Math.max(parentNode.height || 0, requiredHeight),
}
containerSizeChanges[parentNode.id] = {
width: Math.max(parentNode.width || 0, requiredWidth),
height: Math.max(parentNode.height || 0, requiredHeight),
}
})
@ -166,63 +157,65 @@ export const useWorkflowOrganize = () => {
})
})
const layout = getLayoutByDagre(nodesWithUpdatedSizes, edges)
const layout = await getLayoutByDagre(nodesWithUpdatedSizes, edges)
const rankMap = {} as Record<string, Node>
nodesWithUpdatedSizes.forEach((node) => {
if (!node.parentId && node.type === CUSTOM_NODE) {
const rank = layout.node(node.id).rank!
if (!rankMap[rank]) {
rankMap[rank] = node
}
else {
if (rankMap[rank].position.y > node.position.y)
rankMap[rank] = node
// Build layer map for vertical alignment - nodes in the same layer should align
const layerMap = new Map<number, { minY: number; maxHeight: number }>()
layout.nodes.forEach((layoutInfo) => {
if (layoutInfo.layer !== undefined) {
const existing = layerMap.get(layoutInfo.layer)
const newLayerInfo = {
minY: existing ? Math.min(existing.minY, layoutInfo.y) : layoutInfo.y,
maxHeight: existing ? Math.max(existing.maxHeight, layoutInfo.height) : layoutInfo.height,
}
layerMap.set(layoutInfo.layer, newLayerInfo)
}
})
const newNodes = produce(nodesWithUpdatedSizes, (draft) => {
draft.forEach((node) => {
if (!node.parentId && node.type === CUSTOM_NODE) {
const nodeWithPosition = layout.node(node.id)
const layoutInfo = layout.nodes.get(node.id)
if (!layoutInfo)
return
// Calculate vertical position with layer alignment
let yPosition = layoutInfo.y
if (layoutInfo.layer !== undefined) {
const layerInfo = layerMap.get(layoutInfo.layer)
if (layerInfo) {
// Align to the center of the tallest node in this layer
const layerCenterY = layerInfo.minY + layerInfo.maxHeight / 2
yPosition = layerCenterY - layoutInfo.height / 2
}
}
node.position = {
x: nodeWithPosition.x - node.width! / 2,
y: nodeWithPosition.y - node.height! / 2 + rankMap[nodeWithPosition.rank!].height! / 2,
x: layoutInfo.x,
y: yPosition,
}
}
})
loopAndIterationNodes.forEach((parentNode) => {
const childLayout = childLayoutsMap[parentNode.id]
if (!childLayout) return
if (!childLayout)
return
const childNodes = draft.filter(node => node.parentId === parentNode.id)
const {
bounds,
nodes: layoutNodes,
} = childLayout
let minX = Infinity
let minY = Infinity
childNodes.forEach((childNode) => {
const layoutInfo = layoutNodes.get(childNode.id)
if (!layoutInfo)
return
childNodes.forEach((node) => {
if (childLayout.node(node.id)) {
const childNodeWithPosition = childLayout.node(node.id)
const nodeX = childNodeWithPosition.x - node.width! / 2
const nodeY = childNodeWithPosition.y - node.height! / 2
minX = Math.min(minX, nodeX)
minY = Math.min(minY, nodeY)
}
})
childNodes.forEach((node) => {
if (childLayout.node(node.id)) {
const childNodeWithPosition = childLayout.node(node.id)
node.position = {
x: NODE_LAYOUT_HORIZONTAL_PADDING + (childNodeWithPosition.x - node.width! / 2 - minX),
y: NODE_LAYOUT_VERTICAL_PADDING + (childNodeWithPosition.y - node.height! / 2 - minY),
}
childNode.position = {
x: NODE_LAYOUT_HORIZONTAL_PADDING + (layoutInfo.x - bounds.minX),
y: NODE_LAYOUT_VERTICAL_PADDING + (layoutInfo.y - bounds.minY),
}
})
})

View File

@ -2,7 +2,6 @@ import {
useCallback,
} from 'react'
import { uniqBy } from 'lodash-es'
import { useTranslation } from 'react-i18next'
import {
getIncomers,
getOutgoers,
@ -24,9 +23,7 @@ import {
useStore,
useWorkflowStore,
} from '../store'
import { getParallelInfo } from '../utils'
import {
PARALLEL_DEPTH_LIMIT,
SUPPORT_OUTPUT_VARS_NODE,
} from '../constants'
import type { IterationNodeType } from '../nodes/iteration/types'
@ -44,7 +41,6 @@ import {
import { CUSTOM_ITERATION_START_NODE } from '@/app/components/workflow/nodes/iteration-start/constants'
import { CUSTOM_LOOP_START_NODE } from '@/app/components/workflow/nodes/loop-start/constants'
import { basePath } from '@/utils/var'
import { MAX_PARALLEL_LIMIT } from '@/config'
import { useNodesMetaData } from '.'
export const useIsChatMode = () => {
@ -54,9 +50,7 @@ export const useIsChatMode = () => {
}
export const useWorkflow = () => {
const { t } = useTranslation()
const store = useStoreApi()
const workflowStore = useWorkflowStore()
const { getAvailableBlocks } = useAvailableBlocks()
const { nodesMap } = useNodesMetaData()
@ -290,20 +284,6 @@ export const useWorkflow = () => {
return isUsed
}, [isVarUsedInNodes])
const checkParallelLimit = useCallback((nodeId: string, nodeHandle = 'source') => {
const {
edges,
} = store.getState()
const connectedEdges = edges.filter(edge => edge.source === nodeId && edge.sourceHandle === nodeHandle)
if (connectedEdges.length > MAX_PARALLEL_LIMIT - 1) {
const { setShowTips } = workflowStore.getState()
setShowTips(t('workflow.common.parallelTip.limit', { num: MAX_PARALLEL_LIMIT }))
return false
}
return true
}, [store, workflowStore, t])
const getRootNodesById = useCallback((nodeId: string) => {
const {
getNodes,
@ -374,34 +354,7 @@ export const useWorkflow = () => {
return startNodes
}, [nodesMap, getRootNodesById])
const checkNestedParallelLimit = useCallback((nodes: Node[], edges: Edge[], targetNode?: Node) => {
const startNodes = getStartNodes(nodes, targetNode)
for (let i = 0; i < startNodes.length; i++) {
const {
parallelList,
hasAbnormalEdges,
} = getParallelInfo(startNodes[i], nodes, edges)
const { workflowConfig } = workflowStore.getState()
if (hasAbnormalEdges)
return false
for (let i = 0; i < parallelList.length; i++) {
const parallel = parallelList[i]
if (parallel.depth > (workflowConfig?.parallel_depth_limit || PARALLEL_DEPTH_LIMIT)) {
const { setShowTips } = workflowStore.getState()
setShowTips(t('workflow.common.parallelTip.depthLimit', { num: (workflowConfig?.parallel_depth_limit || PARALLEL_DEPTH_LIMIT) }))
return false
}
}
}
return true
}, [t, workflowStore, getStartNodes])
const isValidConnection = useCallback(({ source, sourceHandle, target }: Connection) => {
const isValidConnection = useCallback(({ source, sourceHandle: _sourceHandle, target }: Connection) => {
const {
edges,
getNodes,
@ -410,9 +363,6 @@ export const useWorkflow = () => {
const sourceNode: Node = nodes.find(node => node.id === source)!
const targetNode: Node = nodes.find(node => node.id === target)!
if (!checkParallelLimit(source!, sourceHandle || 'source'))
return false
if (sourceNode.type === CUSTOM_NOTE_NODE || targetNode.type === CUSTOM_NOTE_NODE)
return false
@ -445,7 +395,7 @@ export const useWorkflow = () => {
}
return !hasCycle(targetNode)
}, [store, checkParallelLimit, getAvailableBlocks])
}, [store, getAvailableBlocks])
return {
getNodeById,
@ -457,8 +407,6 @@ export const useWorkflow = () => {
isVarUsedInNodes,
removeUsedVarInNodes,
isNodeVarsUsedInNodes,
checkParallelLimit,
checkNestedParallelLimit,
isValidConnection,
getBeforeNodeById,
getIterationNodeChildren,

View File

@ -71,7 +71,6 @@ import PanelContextmenu from './panel-contextmenu'
import NodeContextmenu from './node-contextmenu'
import SelectionContextmenu from './selection-contextmenu'
import SyncingDataModal from './syncing-data-modal'
import LimitTips from './limit-tips'
import { setupScrollToNodeListener } from './utils/node-navigation'
import {
useStore,
@ -378,7 +377,6 @@ export const Workflow: FC<WorkflowProps> = memo(({
/>
)
}
<LimitTips />
{children}
<ReactFlow
nodeTypes={nodeTypes}
@ -414,10 +412,10 @@ export const Workflow: FC<WorkflowProps> = memo(({
nodesFocusable={!nodesReadOnly}
edgesFocusable={!nodesReadOnly}
panOnScroll={false}
panOnDrag={controlMode === ControlMode.Hand && !workflowReadOnly}
zoomOnPinch={!workflowReadOnly}
zoomOnScroll={!workflowReadOnly}
zoomOnDoubleClick={!workflowReadOnly}
panOnDrag={controlMode === ControlMode.Hand}
zoomOnPinch={true}
zoomOnScroll={true}
zoomOnDoubleClick={true}
isValidConnection={isValidConnection}
selectionKeyCode={null}
selectionMode={SelectionMode.Partial}

View File

@ -1,39 +0,0 @@
import {
RiAlertFill,
RiCloseLine,
} from '@remixicon/react'
import { useStore } from './store'
import ActionButton from '@/app/components/base/action-button'
const LimitTips = () => {
const showTips = useStore(s => s.showTips)
const setShowTips = useStore(s => s.setShowTips)
if (!showTips)
return null
return (
<div className='absolute bottom-16 left-1/2 z-[9] flex h-10 -translate-x-1/2 items-center rounded-xl border border-components-panel-border bg-components-panel-bg-blur p-2 shadow-md'>
<div
className='absolute inset-0 rounded-xl opacity-[0.4]'
style={{
background: 'linear-gradient(92deg, rgba(247, 144, 9, 0.25) 0%, rgba(255, 255, 255, 0.00) 100%)',
}}
></div>
<div className='flex h-5 w-5 items-center justify-center'>
<RiAlertFill className='h-4 w-4 text-text-warning-secondary' />
</div>
<div className='system-xs-medium mx-1 px-1 text-text-primary'>
{showTips}
</div>
<ActionButton
className='z-[1]'
onClick={() => setShowTips('')}
>
<RiCloseLine className='h-4 w-4' />
</ActionButton>
</div>
)
}
export default LimitTips

View File

@ -12,7 +12,6 @@ import {
useAvailableBlocks,
useNodesInteractions,
useNodesReadOnly,
useWorkflow,
} from '@/app/components/workflow/hooks'
import BlockSelector from '@/app/components/workflow/block-selector'
import type {
@ -39,7 +38,6 @@ const Add = ({
const { handleNodeAdd } = useNodesInteractions()
const { nodesReadOnly } = useNodesReadOnly()
const { availableNextBlocks } = useAvailableBlocks(nodeData.type, nodeData.isInIteration || nodeData.isInLoop)
const { checkParallelLimit } = useWorkflow()
const handleSelect = useCallback<OnSelectBlock>((type, toolDefaultValue) => {
handleNodeAdd(
@ -52,14 +50,11 @@ const Add = ({
prevNodeSourceHandle: sourceHandle,
},
)
}, [nodeId, sourceHandle, handleNodeAdd])
}, [handleNodeAdd])
const handleOpenChange = useCallback((newOpen: boolean) => {
if (newOpen && !checkParallelLimit(nodeId, sourceHandle))
return
setOpen(newOpen)
}, [checkParallelLimit, nodeId, sourceHandle])
}, [])
const tip = useMemo(() => {
if (isFailBranch)

View File

@ -22,7 +22,6 @@ import {
useIsChatMode,
useNodesInteractions,
useNodesReadOnly,
useWorkflow,
} from '../../../hooks'
import {
useStore,
@ -132,7 +131,6 @@ export const NodeSourceHandle = memo(({
const { availableNextBlocks } = useAvailableBlocks(data.type, data.isInIteration || data.isInLoop)
const isConnectable = !!availableNextBlocks.length
const isChatMode = useIsChatMode()
const { checkParallelLimit } = useWorkflow()
const connected = data._connectedSourceHandleIds?.includes(handleId)
const handleOpenChange = useCallback((v: boolean) => {
@ -140,9 +138,8 @@ export const NodeSourceHandle = memo(({
}, [])
const handleHandleClick = useCallback((e: MouseEvent) => {
e.stopPropagation()
if (checkParallelLimit(id, handleId))
setOpen(v => !v)
}, [checkParallelLimit, id, handleId])
setOpen(v => !v)
}, [])
const handleSelect = useCallback((type: BlockEnum, toolDefaultValue?: ToolDefaultValue) => {
handleNodeAdd(
{

View File

@ -42,6 +42,7 @@ import type { RAGPipelineVariable } from '@/models/pipeline'
import {
AGENT_OUTPUT_STRUCT,
FILE_STRUCT,
HTTP_REQUEST_OUTPUT_STRUCT,
KNOWLEDGE_RETRIEVAL_OUTPUT_STRUCT,
LLM_OUTPUT_STRUCT,
@ -138,6 +139,10 @@ export const varTypeToStructType = (type: VarType): Type => {
[VarType.boolean]: Type.boolean,
[VarType.object]: Type.object,
[VarType.array]: Type.array,
[VarType.arrayString]: Type.array,
[VarType.arrayNumber]: Type.array,
[VarType.arrayObject]: Type.array,
[VarType.arrayFile]: Type.array,
} as any
)[type] || Type.string
)
@ -282,15 +287,6 @@ const findExceptVarInObject = (
children: filteredObj.children,
}
})
if (isFile && Array.isArray(childrenResult)) {
if (childrenResult.length === 0) {
childrenResult = OUTPUT_FILE_SUB_VARIABLES.map(key => ({
variable: key,
type: key === 'size' ? VarType.number : VarType.string,
}))
}
}
}
else {
childrenResult = []
@ -587,17 +583,15 @@ const formatItem = (
variable: outputKey,
type:
output.type === 'array'
? (`Array[${
output.items?.type
? output.items.type.slice(0, 1).toLocaleUpperCase()
+ output.items.type.slice(1)
: 'Unknown'
? (`Array[${output.items?.type
? output.items.type.slice(0, 1).toLocaleUpperCase()
+ output.items.type.slice(1)
: 'Unknown'
}]` as VarType)
: (`${
output.type
? output.type.slice(0, 1).toLocaleUpperCase()
+ output.type.slice(1)
: 'Unknown'
: (`${output.type
? output.type.slice(0, 1).toLocaleUpperCase()
+ output.type.slice(1)
: 'Unknown'
}` as VarType),
})
},
@ -693,9 +687,10 @@ const formatItem = (
const children = (() => {
if (isFile) {
return OUTPUT_FILE_SUB_VARIABLES.map((key) => {
const def = FILE_STRUCT.find(c => c.variable === key)
return {
variable: key,
type: key === 'size' ? VarType.number : VarType.string,
type: def?.type || VarType.string,
}
})
}
@ -717,9 +712,10 @@ const formatItem = (
if (isFile) {
return {
children: OUTPUT_FILE_SUB_VARIABLES.map((key) => {
const def = FILE_STRUCT.find(c => c.variable === key)
return {
variable: key,
type: key === 'size' ? VarType.number : VarType.string,
type: def?.type || VarType.string,
}
}),
}

View File

@ -127,7 +127,7 @@ const VarReferencePicker: FC<Props> = ({
const reactflow = useReactFlow()
const startNode = availableNodes.find((node: any) => {
const startNode = availableNodes.find((node: Node) => {
return node.data.type === BlockEnum.Start
})
@ -407,7 +407,10 @@ const VarReferencePicker: FC<Props> = ({
<WrapElem onClick={() => {
if (readonly)
return
!isConstant ? setOpen(!open) : setControlFocus(Date.now())
if (!isConstant)
setOpen(!open)
else
setControlFocus(Date.now())
}} className='group/picker-trigger-wrap relative !flex'>
<>
{isAddBtnTrigger
@ -457,7 +460,10 @@ const VarReferencePicker: FC<Props> = ({
onClick={() => {
if (readonly)
return
!isConstant ? setOpen(!open) : setControlFocus(Date.now())
if (!isConstant)
setOpen(!open)
else
setControlFocus(Date.now())
}}
className='h-full grow'
>

View File

@ -18,25 +18,12 @@ import { Type } from '../../../llm/types'
import PickerStructurePanel from '@/app/components/workflow/nodes/_base/components/variable/object-child-tree-panel/picker'
import { isSpecialVar, varTypeToStructType } from './utils'
import type { Field } from '@/app/components/workflow/nodes/llm/types'
import { FILE_STRUCT } from '@/app/components/workflow/constants'
import { noop } from 'lodash-es'
import { CodeAssistant, MagicEdit } from '@/app/components/base/icons/src/vender/line/general'
import ManageInputField from './manage-input-field'
import { VariableIconWithColor } from '@/app/components/workflow/nodes/_base/components/variable/variable-label'
import { Variable02 } from '@/app/components/base/icons/src/vender/solid/development'
type ObjectChildrenProps = {
nodeId: string
title: string
data: Var[]
objPath: string[]
onChange: (value: ValueSelector, item: Var) => void
onHovering?: (value: boolean) => void
itemWidth?: number
isSupportFileVar?: boolean
preferSchemaType?: boolean
}
type ItemProps = {
nodeId: string
title: string
@ -55,8 +42,6 @@ type ItemProps = {
preferSchemaType?: boolean
}
const objVarTypes = [VarType.object, VarType.file]
const Item: FC<ItemProps> = ({
nodeId,
title,
@ -106,8 +91,9 @@ const Item: FC<ItemProps> = ({
const objStructuredOutput: StructuredOutput | null = useMemo(() => {
if (!isObj) return null
const properties: Record<string, Field> = {};
(isFile ? FILE_STRUCT : (itemData.children as Var[])).forEach((c) => {
const properties: Record<string, Field> = {}
const childrenVars = (itemData.children as Var[]) || []
childrenVars.forEach((c) => {
properties[c.variable] = {
type: varTypeToStructType(c.type),
}
@ -120,7 +106,7 @@ const Item: FC<ItemProps> = ({
additionalProperties: false,
},
}
}, [isFile, isObj, itemData.children])
}, [isObj, itemData.children])
const structuredOutput = (() => {
if (isStructureOutput)
@ -151,7 +137,7 @@ const Item: FC<ItemProps> = ({
const isHovering = isItemHovering || isChildrenHovering
const open = (isObj || isStructureOutput) && isHovering
useEffect(() => {
onHovering && onHovering(isHovering)
onHovering?.(isHovering)
}, [isHovering])
const handleChosen = (e: React.MouseEvent) => {
e.stopPropagation()
@ -240,68 +226,6 @@ const Item: FC<ItemProps> = ({
)
}
const ObjectChildren: FC<ObjectChildrenProps> = ({
title,
nodeId,
objPath,
data,
onChange,
onHovering,
itemWidth,
isSupportFileVar,
preferSchemaType,
}) => {
const currObjPath = objPath
const itemRef = useRef<HTMLDivElement>(null)
const [isItemHovering, setIsItemHovering] = useState(false)
useHover(itemRef, {
onChange: (hovering) => {
if (hovering) {
setIsItemHovering(true)
}
else {
setTimeout(() => {
setIsItemHovering(false)
}, 100)
}
},
})
const [isChildrenHovering, setIsChildrenHovering] = useState(false)
const isHovering = isItemHovering || isChildrenHovering
useEffect(() => {
onHovering && onHovering(isHovering)
}, [isHovering])
useEffect(() => {
onHovering && onHovering(isItemHovering)
}, [isItemHovering])
// absolute top-[-2px]
return (
<div ref={itemRef} className=' space-y-1 rounded-lg border border-gray-200 bg-white shadow-lg' style={{
right: itemWidth ? itemWidth - 10 : 215,
minWidth: 252,
}}>
<div className='flex h-[22px] items-center px-3 text-xs font-normal text-gray-700'><span className='text-gray-500'>{title}.</span>{currObjPath.join('.')}</div>
{
(data && data.length > 0)
&& data.map((v, i) => (
<Item
key={i}
nodeId={nodeId}
title={title}
objPath={objPath}
itemData={v}
onChange={onChange}
onHovering={setIsChildrenHovering}
isSupportFileVar={isSupportFileVar}
isException={v.isException}
preferSchemaType={preferSchemaType}
/>
))
}
</div>
)
}
type Props = {
hideSearch?: boolean
searchBoxClassName?: string
@ -448,4 +372,5 @@ const VarReferenceVars: FC<Props> = ({
</>
)
}
export default React.memo(VarReferenceVars)

View File

@ -25,12 +25,12 @@ type Props = {
} & Partial<ResultPanelProps>
const LastRun: FC<Props> = ({
appId,
appId: _appId,
nodeId,
canSingleRun,
isRunAfterSingleRun,
updateNodeRunningStatus,
nodeInfo,
nodeInfo: _nodeInfo,
runningStatus: oneStepRunRunningStatus,
onSingleRunClicked,
singleRunResult,

View File

@ -33,7 +33,7 @@ export const useResizePanel = (params?: UseResizePanelParams) => {
const initContainerWidthRef = useRef(0)
const initContainerHeightRef = useRef(0)
const isResizingRef = useRef(false)
const [prevUserSelectStyle, setPrevUserSelectStyle] = useState(getComputedStyle(document.body).userSelect)
const [prevUserSelectStyle, setPrevUserSelectStyle] = useState(() => getComputedStyle(document.body).userSelect)
const handleStartResize = useCallback((e: MouseEvent) => {
initXRef.current = e.clientX

View File

@ -5,6 +5,8 @@ import { useTranslation } from 'react-i18next'
import type { Timeout as TimeoutPayloadType } from '../../types'
import Input from '@/app/components/base/input'
import { FieldCollapse } from '@/app/components/workflow/nodes/_base/components/collapse'
import { useStore } from '@/app/components/workflow/store'
import { BlockEnum } from '@/app/components/workflow/types'
type Props = {
readonly: boolean
@ -61,6 +63,11 @@ const Timeout: FC<Props> = ({ readonly, payload, onChange }) => {
const { t } = useTranslation()
const { connect, read, write, max_connect_timeout, max_read_timeout, max_write_timeout } = payload ?? {}
// Get default config from store for max timeout values
const nodesDefaultConfigs = useStore(s => s.nodesDefaultConfigs)
const defaultConfig = nodesDefaultConfigs?.[BlockEnum.HttpRequest]
const defaultTimeout = defaultConfig?.timeout || {}
return (
<FieldCollapse title={t(`${i18nPrefix}.timeout.title`)}>
<div className='mt-2 space-y-1'>
@ -73,7 +80,7 @@ const Timeout: FC<Props> = ({ readonly, payload, onChange }) => {
value={connect}
onChange={v => onChange?.({ ...payload, connect: v })}
min={1}
max={max_connect_timeout || 300}
max={max_connect_timeout || defaultTimeout.max_connect_timeout || 10}
/>
<InputField
title={t('workflow.nodes.http.timeout.readLabel')!}
@ -83,7 +90,7 @@ const Timeout: FC<Props> = ({ readonly, payload, onChange }) => {
value={read}
onChange={v => onChange?.({ ...payload, read: v })}
min={1}
max={max_read_timeout || 600}
max={max_read_timeout || defaultTimeout.max_read_timeout || 600}
/>
<InputField
title={t('workflow.nodes.http.timeout.writeLabel')!}
@ -93,7 +100,7 @@ const Timeout: FC<Props> = ({ readonly, payload, onChange }) => {
value={write}
onChange={v => onChange?.({ ...payload, write: v })}
min={1}
max={max_write_timeout || 600}
max={max_write_timeout || defaultTimeout.max_write_timeout || 600}
/>
</div>
</div>

View File

@ -16,7 +16,7 @@ const strToKeyValueList = (value: string) => {
}
const useKeyValueList = (value: string, onChange: (value: string) => void, noFilter?: boolean) => {
const [list, doSetList] = useState<KeyValue[]>(value ? strToKeyValueList(value) : [])
const [list, doSetList] = useState<KeyValue[]>(() => value ? strToKeyValueList(value) : [])
const setList = (l: KeyValue[]) => {
doSetList(l.map((item) => {
return {

View File

@ -2,6 +2,7 @@ import type { FC } from 'react'
import {
memo,
useEffect,
useState,
} from 'react'
import {
Background,
@ -27,19 +28,20 @@ const Node: FC<NodeProps<IterationNodeType>> = ({
const nodesInitialized = useNodesInitialized()
const { handleNodeIterationRerender } = useNodeIterationInteractions()
const { t } = useTranslation()
const [showTips, setShowTips] = useState(data._isShowTips)
useEffect(() => {
if (nodesInitialized)
handleNodeIterationRerender(id)
if (data.is_parallel && data._isShowTips) {
if (data.is_parallel && showTips) {
Toast.notify({
type: 'warning',
message: t(`${i18nPrefix}.answerNodeWarningDesc`),
duration: 5000,
})
data._isShowTips = false
setShowTips(false)
}
}, [nodesInitialized, id, handleNodeIterationRerender, data, t])
}, [nodesInitialized, id, handleNodeIterationRerender, data.is_parallel, showTips, t])
return (
<div className={cn(

View File

@ -86,7 +86,11 @@ const OptionCard = memo(({
readonly && 'cursor-not-allowed',
wrapperClassName && (typeof wrapperClassName === 'function' ? wrapperClassName(isActive) : wrapperClassName),
)}
onClick={() => !readonly && enableSelect && id && onClick?.(id)}
onClick={(e) => {
e.stopPropagation()
if (!readonly && enableSelect && id)
onClick?.(id)
}}
>
<div className={cn(
'relative flex rounded-t-xl p-2',

View File

@ -2,6 +2,7 @@ import type { NodeDefault } from '../../types'
import type { KnowledgeBaseNodeType } from './types'
import { genNodeMetaData } from '@/app/components/workflow/utils'
import { BlockEnum } from '@/app/components/workflow/types'
import { IndexingType } from '@/app/components/datasets/create/step-two'
const metaData = genNodeMetaData({
sort: 3.1,
@ -27,8 +28,17 @@ const nodeDefault: NodeDefault<KnowledgeBaseNodeType> = {
chunk_structure,
indexing_technique,
retrieval_model,
embedding_model,
embedding_model_provider,
index_chunk_variable_selector,
} = payload
const {
search_method,
reranking_enable,
reranking_model,
} = retrieval_model || {}
if (!chunk_structure) {
return {
isValid: false,
@ -36,6 +46,13 @@ const nodeDefault: NodeDefault<KnowledgeBaseNodeType> = {
}
}
if (index_chunk_variable_selector.length === 0) {
return {
isValid: false,
errorMessage: t('workflow.nodes.knowledgeBase.chunksVariableIsRequired'),
}
}
if (!indexing_technique) {
return {
isValid: false,
@ -43,13 +60,27 @@ const nodeDefault: NodeDefault<KnowledgeBaseNodeType> = {
}
}
if (!retrieval_model || !retrieval_model.search_method) {
if (indexing_technique === IndexingType.QUALIFIED && (!embedding_model || !embedding_model_provider)) {
return {
isValid: false,
errorMessage: t('workflow.nodes.knowledgeBase.embeddingModelIsRequired'),
}
}
if (!retrieval_model || !search_method) {
return {
isValid: false,
errorMessage: t('workflow.nodes.knowledgeBase.retrievalSettingIsRequired'),
}
}
if (reranking_enable && (!reranking_model || !reranking_model.reranking_provider_name || !reranking_model.reranking_model_name)) {
return {
isValid: false,
errorMessage: t('workflow.nodes.knowledgeBase.rerankingModelIsRequired'),
}
}
return {
isValid: true,
errorMessage: '',

View File

@ -9,13 +9,17 @@ import {
ChunkStructureEnum,
IndexMethodEnum,
RetrievalSearchMethodEnum,
WeightedScoreEnum,
} from '../types'
import type {
HybridSearchModeEnum,
KnowledgeBaseNodeType,
RerankingModel,
} from '../types'
import {
HybridSearchModeEnum,
} from '../types'
import { isHighQualitySearchMethod } from '../utils'
import { DEFAULT_WEIGHTED_SCORE, RerankingModeEnum } from '@/models/datasets'
export const useConfig = (id: string) => {
const store = useStoreApi()
@ -35,6 +39,25 @@ export const useConfig = (id: string) => {
})
}, [id, handleNodeDataUpdateWithSyncDraft])
const getDefaultWeights = useCallback(({
embeddingModel,
embeddingModelProvider,
}: {
embeddingModel: string
embeddingModelProvider: string
}) => {
return {
vector_setting: {
vector_weight: DEFAULT_WEIGHTED_SCORE.other.semantic,
embedding_provider_name: embeddingModelProvider || '',
embedding_model_name: embeddingModel,
},
keyword_setting: {
keyword_weight: DEFAULT_WEIGHTED_SCORE.other.keyword,
},
}
}, [])
const handleChunkStructureChange = useCallback((chunkStructure: ChunkStructureEnum) => {
const nodeData = getNodeData()
const {
@ -80,39 +103,72 @@ export const useConfig = (id: string) => {
embeddingModelProvider: string
}) => {
const nodeData = getNodeData()
handleNodeDataUpdate({
const defaultWeights = getDefaultWeights({
embeddingModel,
embeddingModelProvider,
})
const changeData = {
embedding_model: embeddingModel,
embedding_model_provider: embeddingModelProvider,
retrieval_model: {
...nodeData?.data.retrieval_model,
vector_setting: {
...nodeData?.data.retrieval_model.vector_setting,
embedding_provider_name: embeddingModelProvider,
embedding_model_name: embeddingModel,
},
},
})
}, [getNodeData, handleNodeDataUpdate])
}
if (changeData.retrieval_model.weights) {
changeData.retrieval_model = {
...changeData.retrieval_model,
weights: {
...changeData.retrieval_model.weights,
vector_setting: {
...changeData.retrieval_model.weights.vector_setting,
embedding_provider_name: embeddingModelProvider,
embedding_model_name: embeddingModel,
},
},
}
}
else {
changeData.retrieval_model = {
...changeData.retrieval_model,
weights: defaultWeights,
}
}
handleNodeDataUpdate(changeData)
}, [getNodeData, getDefaultWeights, handleNodeDataUpdate])
const handleRetrievalSearchMethodChange = useCallback((searchMethod: RetrievalSearchMethodEnum) => {
const nodeData = getNodeData()
handleNodeDataUpdate({
const changeData = {
retrieval_model: {
...nodeData?.data.retrieval_model,
search_method: searchMethod,
reranking_mode: nodeData?.data.retrieval_model.reranking_mode || RerankingModeEnum.RerankingModel,
},
})
}
if (searchMethod === RetrievalSearchMethodEnum.hybrid) {
changeData.retrieval_model = {
...changeData.retrieval_model,
reranking_enable: changeData.retrieval_model.reranking_mode === RerankingModeEnum.RerankingModel,
}
}
handleNodeDataUpdate(changeData)
}, [getNodeData, handleNodeDataUpdate])
const handleHybridSearchModeChange = useCallback((hybridSearchMode: HybridSearchModeEnum) => {
const nodeData = getNodeData()
const defaultWeights = getDefaultWeights({
embeddingModel: nodeData?.data.embedding_model || '',
embeddingModelProvider: nodeData?.data.embedding_model_provider || '',
})
handleNodeDataUpdate({
retrieval_model: {
...nodeData?.data.retrieval_model,
reranking_mode: hybridSearchMode,
reranking_enable: hybridSearchMode === HybridSearchModeEnum.RerankingModel,
weights: nodeData?.data.retrieval_model.weights || defaultWeights,
},
})
}, [getNodeData, handleNodeDataUpdate])
}, [getNodeData, getDefaultWeights, handleNodeDataUpdate])
const handleRerankingModelEnabledChange = useCallback((rerankingModelEnabled: boolean) => {
const nodeData = getNodeData()
@ -130,11 +186,10 @@ export const useConfig = (id: string) => {
retrieval_model: {
...nodeData?.data.retrieval_model,
weights: {
weight_type: 'weighted_score',
weight_type: WeightedScoreEnum.Customized,
vector_setting: {
...nodeData?.data.retrieval_model.weights?.vector_setting,
vector_weight: weightedScore.value[0],
embedding_provider_name: '',
embedding_model_name: '',
},
keyword_setting: {
keyword_weight: weightedScore.value[1],

View File

@ -28,9 +28,9 @@ const Node: FC<NodeProps<KnowledgeBaseNodeType>> = ({ data }) => {
</div>
<div
className='system-xs-medium grow truncate text-right text-text-secondary'
title={data.retrieval_model.search_method}
title={data.retrieval_model?.search_method}
>
{settingsDisplay[data.retrieval_model.search_method as keyof typeof settingsDisplay]}
{settingsDisplay[data.retrieval_model?.search_method as keyof typeof settingsDisplay]}
</div>
</div>
</div>

View File

@ -1,6 +1,6 @@
'use client'
import type { FC } from 'react'
import React, { useCallback, useState } from 'react'
import React, { useCallback, useMemo } from 'react'
import { RiEqualizer2Line } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import type { MultipleRetrievalConfig, SingleRetrievalConfig } from '../types'
@ -14,8 +14,6 @@ import {
import ConfigRetrievalContent from '@/app/components/app/configuration/dataset-config/params-config/config-content'
import { RETRIEVE_TYPE } from '@/types/app'
import { DATASET_DEFAULT } from '@/config'
import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import Button from '@/app/components/base/button'
import type { DatasetConfigs } from '@/models/debug'
import type { DataSet } from '@/models/datasets'
@ -32,8 +30,8 @@ type Props = {
onSingleRetrievalModelChange?: (config: ModelConfig) => void
onSingleRetrievalModelParamsChange?: (config: ModelConfig) => void
readonly?: boolean
openFromProps?: boolean
onOpenFromPropsChange?: (openFromProps: boolean) => void
rerankModalOpen: boolean
onRerankModelOpenChange: (open: boolean) => void
selectedDatasets: DataSet[]
}
@ -45,26 +43,52 @@ const RetrievalConfig: FC<Props> = ({
onSingleRetrievalModelChange,
onSingleRetrievalModelParamsChange,
readonly,
openFromProps,
onOpenFromPropsChange,
rerankModalOpen,
onRerankModelOpenChange,
selectedDatasets,
}) => {
const { t } = useTranslation()
const [open, setOpen] = useState(false)
const mergedOpen = openFromProps !== undefined ? openFromProps : open
const { retrieval_mode, multiple_retrieval_config } = payload
const handleOpen = useCallback((newOpen: boolean) => {
setOpen(newOpen)
onOpenFromPropsChange?.(newOpen)
}, [onOpenFromPropsChange])
onRerankModelOpenChange(newOpen)
}, [onRerankModelOpenChange])
const {
currentProvider: validRerankDefaultProvider,
currentModel: validRerankDefaultModel,
} = useModelListAndDefaultModelAndCurrentProviderAndModel(ModelTypeEnum.rerank)
const datasetConfigs = useMemo(() => {
const {
reranking_model,
top_k,
score_threshold,
reranking_mode,
weights,
reranking_enable,
} = multiple_retrieval_config || {}
return {
retrieval_model: retrieval_mode,
reranking_model: (reranking_model?.provider && reranking_model?.model)
? {
reranking_provider_name: reranking_model?.provider,
reranking_model_name: reranking_model?.model,
}
: {
reranking_provider_name: '',
reranking_model_name: '',
},
top_k: top_k || DATASET_DEFAULT.top_k,
score_threshold_enabled: !(score_threshold === undefined || score_threshold === null),
score_threshold,
datasets: {
datasets: [],
},
reranking_mode,
weights,
reranking_enable,
}
}, [retrieval_mode, multiple_retrieval_config])
const { multiple_retrieval_config } = payload
const handleChange = useCallback((configs: DatasetConfigs, isRetrievalModeChange?: boolean) => {
// Legacy code, for compatibility, have to keep it
if (isRetrievalModeChange) {
onRetrievalModeChange(configs.retrieval_model)
return
@ -72,13 +96,11 @@ const RetrievalConfig: FC<Props> = ({
onMultipleRetrievalConfigChange({
top_k: configs.top_k,
score_threshold: configs.score_threshold_enabled ? (configs.score_threshold ?? DATASET_DEFAULT.score_threshold) : null,
reranking_model: payload.retrieval_mode === RETRIEVE_TYPE.oneWay
reranking_model: retrieval_mode === RETRIEVE_TYPE.oneWay
? undefined
// eslint-disable-next-line sonarjs/no-nested-conditional
: (!configs.reranking_model?.reranking_provider_name
? {
provider: validRerankDefaultProvider?.provider || '',
model: validRerankDefaultModel?.model || '',
}
? undefined
: {
provider: configs.reranking_model?.reranking_provider_name,
model: configs.reranking_model?.reranking_model_name,
@ -87,11 +109,11 @@ const RetrievalConfig: FC<Props> = ({
weights: configs.weights,
reranking_enable: configs.reranking_enable,
})
}, [onMultipleRetrievalConfigChange, payload.retrieval_mode, validRerankDefaultProvider, validRerankDefaultModel, onRetrievalModeChange])
}, [onMultipleRetrievalConfigChange, retrieval_mode, onRetrievalModeChange])
return (
<PortalToFollowElem
open={mergedOpen}
open={rerankModalOpen}
onOpenChange={handleOpen}
placement='bottom-end'
offset={{
@ -102,14 +124,14 @@ const RetrievalConfig: FC<Props> = ({
onClick={() => {
if (readonly)
return
handleOpen(!mergedOpen)
handleOpen(!rerankModalOpen)
}}
>
<Button
variant='ghost'
size='small'
disabled={readonly}
className={cn(open && 'bg-components-button-ghost-bg-hover')}
className={cn(rerankModalOpen && 'bg-components-button-ghost-bg-hover')}
>
<RiEqualizer2Line className='mr-1 h-3.5 w-3.5' />
{t('dataset.retrievalSettings')}
@ -118,35 +140,13 @@ const RetrievalConfig: FC<Props> = ({
<PortalToFollowElemContent style={{ zIndex: 1001 }}>
<div className='w-[404px] rounded-2xl border border-components-panel-border bg-components-panel-bg px-4 pb-4 pt-3 shadow-xl'>
<ConfigRetrievalContent
datasetConfigs={
{
retrieval_model: payload.retrieval_mode,
reranking_model: multiple_retrieval_config?.reranking_model?.provider
? {
reranking_provider_name: multiple_retrieval_config.reranking_model?.provider,
reranking_model_name: multiple_retrieval_config.reranking_model?.model,
}
: {
reranking_provider_name: '',
reranking_model_name: '',
},
top_k: multiple_retrieval_config?.top_k || DATASET_DEFAULT.top_k,
score_threshold_enabled: !(multiple_retrieval_config?.score_threshold === undefined || multiple_retrieval_config.score_threshold === null),
score_threshold: multiple_retrieval_config?.score_threshold,
datasets: {
datasets: [],
},
reranking_mode: multiple_retrieval_config?.reranking_mode,
weights: multiple_retrieval_config?.weights,
reranking_enable: multiple_retrieval_config?.reranking_enable,
}
}
datasetConfigs={datasetConfigs}
onChange={handleChange}
selectedDatasets={selectedDatasets}
isInWorkflow
singleRetrievalModelConfig={singleRetrievalModelConfig}
onSingleRetrievalModelChange={onSingleRetrievalModelChange}
onSingleRetrievalModelParamsChange={onSingleRetrievalModelParamsChange}
selectedDatasets={selectedDatasets}
/>
</div>
</PortalToFollowElemContent>

View File

@ -1,6 +1,6 @@
import type { NodeDefault } from '../../types'
import type { KnowledgeRetrievalNodeType } from './types'
import { checkoutRerankModelConfigedInRetrievalSettings } from './utils'
import { checkoutRerankModelConfiguredInRetrievalSettings } from './utils'
import { DATASET_DEFAULT } from '@/config'
import { RETRIEVE_TYPE } from '@/types/app'
import { genNodeMetaData } from '@/app/components/workflow/utils'
@ -36,7 +36,7 @@ const nodeDefault: NodeDefault<KnowledgeRetrievalNodeType> = {
const { _datasets, multiple_retrieval_config, retrieval_mode } = payload
if (retrieval_mode === RETRIEVE_TYPE.multiWay) {
const checked = checkoutRerankModelConfigedInRetrievalSettings(_datasets || [], multiple_retrieval_config)
const checked = checkoutRerankModelConfiguredInRetrievalSettings(_datasets || [], multiple_retrieval_config)
if (!errorMessages && !checked)
errorMessages = t(`${i18nPrefix}.errorMsg.fieldRequired`, { field: t(`${i18nPrefix}.errorMsg.fields.rerankModel`) })

View File

@ -1,7 +1,6 @@
import type { FC } from 'react'
import {
memo,
useCallback,
useMemo,
} from 'react'
import { intersectionBy } from 'lodash-es'
@ -53,10 +52,6 @@ const Panel: FC<NodePanelProps<KnowledgeRetrievalNodeType>> = ({
availableNumberNodesWithParent,
} = useConfig(id, data)
const handleOpenFromPropsChange = useCallback((openFromProps: boolean) => {
setRerankModelOpen(openFromProps)
}, [setRerankModelOpen])
const metadataList = useMemo(() => {
return intersectionBy(...selectedDatasets.filter((dataset) => {
return !!dataset.doc_metadata
@ -68,7 +63,6 @@ const Panel: FC<NodePanelProps<KnowledgeRetrievalNodeType>> = ({
return (
<div className='pt-2'>
<div className='space-y-4 px-4 pb-2'>
{/* {JSON.stringify(inputs, null, 2)} */}
<Field
title={t(`${i18nPrefix}.queryVariable`)}
required
@ -100,8 +94,8 @@ const Panel: FC<NodePanelProps<KnowledgeRetrievalNodeType>> = ({
onSingleRetrievalModelChange={handleModelChanged as any}
onSingleRetrievalModelParamsChange={handleCompletionParamsChange}
readonly={readOnly || !selectedDatasets.length}
openFromProps={rerankModelOpen}
onOpenFromPropsChange={handleOpenFromPropsChange}
rerankModalOpen={rerankModelOpen}
onRerankModelOpenChange={setRerankModelOpen}
selectedDatasets={selectedDatasets}
/>
{!readOnly && (<div className='h-3 w-px bg-divider-regular'></div>)}

View File

@ -204,10 +204,11 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
const handleMultipleRetrievalConfigChange = useCallback((newConfig: MultipleRetrievalConfig) => {
const newInputs = produce(inputs, (draft) => {
draft.multiple_retrieval_config = getMultipleRetrievalConfig(newConfig!, selectedDatasets, selectedDatasets, {
const newMultipleRetrievalConfig = getMultipleRetrievalConfig(newConfig!, selectedDatasets, selectedDatasets, {
provider: currentRerankProvider?.provider,
model: currentRerankModel?.model,
})
draft.multiple_retrieval_config = newMultipleRetrievalConfig
})
setInputs(newInputs)
}, [inputs, setInputs, selectedDatasets, currentRerankModel, currentRerankProvider])
@ -254,10 +255,11 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
if (payload.retrieval_mode === RETRIEVE_TYPE.multiWay && newDatasets.length > 0) {
const multipleRetrievalConfig = draft.multiple_retrieval_config
draft.multiple_retrieval_config = getMultipleRetrievalConfig(multipleRetrievalConfig!, newDatasets, selectedDatasets, {
const newMultipleRetrievalConfig = getMultipleRetrievalConfig(multipleRetrievalConfig!, newDatasets, selectedDatasets, {
provider: currentRerankProvider?.provider,
model: currentRerankModel?.model,
})
draft.multiple_retrieval_config = newMultipleRetrievalConfig
}
})
updateDatasetsDetail(newDatasets)

View File

@ -10,6 +10,7 @@ import type {
import {
DEFAULT_WEIGHTED_SCORE,
RerankingModeEnum,
WeightedScoreEnum,
} from '@/models/datasets'
import { RETRIEVE_METHOD } from '@/types/app'
import { DATASET_DEFAULT } from '@/config'
@ -93,10 +94,12 @@ export const getMultipleRetrievalConfig = (
multipleRetrievalConfig: MultipleRetrievalConfig,
selectedDatasets: DataSet[],
originalDatasets: DataSet[],
validRerankModel?: { provider?: string; model?: string },
fallbackRerankModel?: { provider?: string; model?: string }, // fallback rerank model
) => {
const shouldSetWeightDefaultValue = xorBy(selectedDatasets, originalDatasets, 'id').length > 0
const rerankModelIsValid = validRerankModel?.provider && validRerankModel?.model
// Check if the selected datasets are different from the original datasets
const isDatasetsChanged = xorBy(selectedDatasets, originalDatasets, 'id').length > 0
// Check if the rerank model is valid
const isFallbackRerankModelValid = !!(fallbackRerankModel?.provider && fallbackRerankModel?.model)
const {
allHighQuality,
@ -125,14 +128,16 @@ export const getMultipleRetrievalConfig = (
reranking_mode,
reranking_model,
weights,
reranking_enable: ((allInternal && allEconomic) || allExternal) ? reranking_enable : shouldSetWeightDefaultValue,
reranking_enable,
}
const setDefaultWeights = () => {
result.weights = {
weight_type: WeightedScoreEnum.Customized,
vector_setting: {
vector_weight: allHighQualityVectorSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityVectorSearch.semantic
// eslint-disable-next-line sonarjs/no-nested-conditional
: allHighQualityFullTextSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityFullTextSearch.semantic
: DEFAULT_WEIGHTED_SCORE.other.semantic,
@ -142,6 +147,7 @@ export const getMultipleRetrievalConfig = (
keyword_setting: {
keyword_weight: allHighQualityVectorSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityVectorSearch.keyword
// eslint-disable-next-line sonarjs/no-nested-conditional
: allHighQualityFullTextSearch
? DEFAULT_WEIGHTED_SCORE.allHighQualityFullTextSearch.keyword
: DEFAULT_WEIGHTED_SCORE.other.keyword,
@ -149,65 +155,106 @@ export const getMultipleRetrievalConfig = (
}
}
if (allEconomic || mixtureHighQualityAndEconomic || inconsistentEmbeddingModel || allExternal || mixtureInternalAndExternal) {
/**
* In this case, user can manually toggle reranking
* So should keep the reranking_enable value
* But the default reranking_model should be set
*/
if ((allEconomic && allInternal) || allExternal) {
result.reranking_mode = RerankingModeEnum.RerankingModel
if (!result.reranking_model?.provider || !result.reranking_model?.model) {
if (rerankModelIsValid) {
result.reranking_enable = reranking_enable !== false
result.reranking_model = {
provider: validRerankModel?.provider || '',
model: validRerankModel?.model || '',
}
}
else {
result.reranking_model = {
provider: '',
model: '',
}
// Need to check if the reranking model should be set to default when first time initialized
if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) {
result.reranking_model = {
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
else {
result.reranking_enable = reranking_enable !== false
}
result.reranking_enable = reranking_enable
}
/**
* In this case, reranking_enable must be true
* And if rerank model is not set, should set the default rerank model
*/
if (mixtureHighQualityAndEconomic || inconsistentEmbeddingModel || mixtureInternalAndExternal) {
result.reranking_mode = RerankingModeEnum.RerankingModel
// Need to check if the reranking model should be set to default when first time initialized
if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) {
result.reranking_model = {
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
result.reranking_enable = true
}
/**
* In this case, user can choose to use weighted score or rerank model
* But if the reranking_mode is not initialized, should set the default rerank model and reranking_enable to true
* and set reranking_mode to reranking_model
*/
if (allHighQuality && !inconsistentEmbeddingModel && allInternal) {
// If not initialized, check if the default rerank model is valid
if (!reranking_mode) {
if (validRerankModel?.provider && validRerankModel?.model) {
if (isFallbackRerankModelValid) {
result.reranking_mode = RerankingModeEnum.RerankingModel
result.reranking_enable = reranking_enable !== false
result.reranking_enable = true
result.reranking_model = {
provider: validRerankModel.provider,
model: validRerankModel.model,
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
else {
result.reranking_mode = RerankingModeEnum.WeightedScore
result.reranking_enable = false
setDefaultWeights()
}
}
if (reranking_mode === RerankingModeEnum.WeightedScore && !weights)
setDefaultWeights()
if (reranking_mode === RerankingModeEnum.WeightedScore && weights && shouldSetWeightDefaultValue) {
if (rerankModelIsValid) {
result.reranking_mode = RerankingModeEnum.RerankingModel
result.reranking_enable = reranking_enable !== false
// After initialization, if datasets has no change, make sure the config has correct value
if (reranking_mode === RerankingModeEnum.WeightedScore) {
result.reranking_enable = false
if (!weights)
setDefaultWeights()
}
if (reranking_mode === RerankingModeEnum.RerankingModel) {
if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) {
result.reranking_model = {
provider: validRerankModel.provider || '',
model: validRerankModel.model || '',
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
result.reranking_enable = true
}
// Need to check if reranking_mode should be set to reranking_model when datasets changed
if (reranking_mode === RerankingModeEnum.WeightedScore && weights && isDatasetsChanged) {
if ((result.reranking_model?.provider && result.reranking_model?.model) || isFallbackRerankModelValid) {
result.reranking_mode = RerankingModeEnum.RerankingModel
result.reranking_enable = true
// eslint-disable-next-line sonarjs/nested-control-flow
if ((!result.reranking_model?.provider || !result.reranking_model?.model) && isFallbackRerankModelValid) {
result.reranking_model = {
provider: fallbackRerankModel.provider || '',
model: fallbackRerankModel.model || '',
}
}
}
else {
setDefaultWeights()
}
}
if (reranking_mode === RerankingModeEnum.RerankingModel && !rerankModelIsValid && shouldSetWeightDefaultValue) {
// Need to switch to weighted score when reranking model is not valid and datasets changed
if (
reranking_mode === RerankingModeEnum.RerankingModel
&& (!result.reranking_model?.provider || !result.reranking_model?.model)
&& !isFallbackRerankModelValid
&& isDatasetsChanged
) {
result.reranking_mode = RerankingModeEnum.WeightedScore
result.reranking_enable = false
setDefaultWeights()
}
}
@ -215,7 +262,7 @@ export const getMultipleRetrievalConfig = (
return result
}
export const checkoutRerankModelConfigedInRetrievalSettings = (
export const checkoutRerankModelConfiguredInRetrievalSettings = (
datasets: DataSet[],
multipleRetrievalConfig?: MultipleRetrievalConfig,
) => {
@ -225,6 +272,7 @@ export const checkoutRerankModelConfigedInRetrievalSettings = (
const {
allEconomic,
allExternal,
allInternal,
} = getSelectedDatasetsMode(datasets)
const {
@ -233,12 +281,8 @@ export const checkoutRerankModelConfigedInRetrievalSettings = (
reranking_model,
} = multipleRetrievalConfig
if (reranking_mode === RerankingModeEnum.RerankingModel && (!reranking_model?.provider || !reranking_model?.model)) {
if ((allEconomic || allExternal) && !reranking_enable)
return true
return false
}
if (reranking_mode === RerankingModeEnum.RerankingModel && (!reranking_model?.provider || !reranking_model?.model))
return ((allEconomic && allInternal) || allExternal) && !reranking_enable
return true
}

View File

@ -55,6 +55,7 @@ const Panel: FC<NodePanelProps<ListFilterNodeType>> = ({
value={inputs.variable || []}
onChange={handleVarChanges}
filterVar={filterVar}
isSupportFileVar={false}
typePlaceHolder='Array'
/>
</Field>

View File

@ -55,7 +55,7 @@ const JsonSchemaConfig: FC<JsonSchemaConfigProps> = ({
const docLink = useDocLink()
const [currentTab, setCurrentTab] = useState(SchemaView.VisualEditor)
const [jsonSchema, setJsonSchema] = useState(defaultSchema || DEFAULT_SCHEMA)
const [json, setJson] = useState(JSON.stringify(jsonSchema, null, 2))
const [json, setJson] = useState(() => JSON.stringify(jsonSchema, null, 2))
const [btnWidth, setBtnWidth] = useState(0)
const [parseError, setParseError] = useState<Error | null>(null)
const [validationError, setValidationError] = useState<string>('')
@ -120,7 +120,7 @@ const JsonSchemaConfig: FC<JsonSchemaConfigProps> = ({
setJson(JSON.stringify(schema, null, 2))
}, [currentTab])
const handleSubmit = useCallback((schema: any) => {
const handleSubmit = useCallback((schema: Record<string, unknown>) => {
const jsonSchema = jsonToSchema(schema) as SchemaRoot
if (currentTab === SchemaView.VisualEditor)
setJsonSchema(jsonSchema)
@ -139,8 +139,10 @@ const JsonSchemaConfig: FC<JsonSchemaConfigProps> = ({
const handleResetDefaults = useCallback(() => {
if (currentTab === SchemaView.VisualEditor) {
setHoveringProperty(null)
advancedEditing && setAdvancedEditing(false)
isAddingNewField && setIsAddingNewField(false)
if (advancedEditing)
setAdvancedEditing(false)
if (isAddingNewField)
setIsAddingNewField(false)
}
setJsonSchema(DEFAULT_SCHEMA)
setJson(JSON.stringify(DEFAULT_SCHEMA, null, 2))

View File

@ -30,7 +30,7 @@ enum GeneratorView {
result = 'result',
}
export const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
const JsonSchemaGenerator: FC<JsonSchemaGeneratorProps> = ({
onApply,
crossAxisOffset,
}) => {

View File

@ -87,8 +87,10 @@ const EditCard: FC<EditCardProps> = ({
})
useSubscribe('fieldChangeSuccess', () => {
isAddingNewField && setIsAddingNewField(false)
advancedEditing && setAdvancedEditing(false)
if (isAddingNewField)
setIsAddingNewField(false)
if (advancedEditing)
setAdvancedEditing(false)
})
const emitPropertyNameChange = useCallback(() => {
@ -150,14 +152,16 @@ const EditCard: FC<EditCardProps> = ({
}, [isAdvancedEditing, emitPropertyOptionsChange, currentFields])
const handleAdvancedOptionsChange = useCallback((options: AdvancedOptionsType) => {
let enumValue: any = options.enum
if (enumValue === '') {
let enumValue: SchemaEnumType | undefined
if (options.enum === '') {
enumValue = undefined
}
else {
enumValue = options.enum.replace(/\s/g, '').split(',')
const stringArray = options.enum.replace(/\s/g, '').split(',')
if (currentFields.type === Type.number)
enumValue = (enumValue as SchemaEnumType).map(value => Number(value)).filter(num => !Number.isNaN(num))
enumValue = stringArray.map(value => Number(value)).filter(num => !Number.isNaN(num))
else
enumValue = stringArray
}
setCurrentFields(prev => ({ ...prev, enum: enumValue }))
if (isAdvancedEditing) return

View File

@ -45,8 +45,10 @@ export const useSchemaNodeOperations = (props: VisualEditorProps) => {
onChange(backupSchema)
setBackupSchema(null)
}
isAddingNewField && setIsAddingNewField(false)
advancedEditing && setAdvancedEditing(false)
if (isAddingNewField)
setIsAddingNewField(false)
if (advancedEditing)
setAdvancedEditing(false)
setHoveringProperty(null)
})
@ -221,7 +223,8 @@ export const useSchemaNodeOperations = (props: VisualEditorProps) => {
})
useSubscribe('addField', (params) => {
advancedEditing && setAdvancedEditing(false)
if (advancedEditing)
setAdvancedEditing(false)
setBackupSchema(jsonSchema)
const { path } = params as AddEventParams
setIsAddingNewField(true)

View File

@ -99,6 +99,7 @@ const SchemaNode: FC<SchemaNodeProps> = ({
indentLeft[depth - 1],
)}>
<button
type="button"
onClick={handleExpand}
className='py-0.5 text-text-tertiary hover:text-text-accent'
>

View File

@ -301,6 +301,11 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
type='string'
description={t(`${i18nPrefix}.outputVars.output`)}
/>
<VarItem
name='reasoning_content'
type='string'
description={t(`${i18nPrefix}.outputVars.reasoning_content`)}
/>
<VarItem
name='usage'
type='object'

View File

@ -22,7 +22,7 @@ type ConditionValueProps = {
}
const ConditionValue = ({
variableSelector,
labelName,
labelName: _labelName,
operator,
value,
}: ConditionValueProps) => {

View File

@ -34,7 +34,7 @@ const ClassItem: FC<Props> = ({
filterVar,
}) => {
const { t } = useTranslation()
const [instanceId, setInstanceId] = useState(uniqueId())
const [instanceId, setInstanceId] = useState(() => uniqueId())
useEffect(() => {
setInstanceId(`${nodeId}-${uniqueId()}`)

View File

@ -124,7 +124,7 @@ const useConfig = (id: string, payload: VariableAssignerNodeType) => {
const handleAddGroup = useCallback(() => {
let maxInGroupName = 1
inputs.advanced_settings.groups.forEach((item) => {
const match = item.group_name.match(/(\d+)$/)
const match = /(\d+)$/.exec(item.group_name)
if (match) {
const num = Number.parseInt(match[1], 10)
if (num > maxInGroupName)

View File

@ -35,7 +35,8 @@ const VariableModalTrigger = ({
open={open}
onOpenChange={() => {
setOpen(v => !v)
open && onClose()
if (open)
onClose()
}}
placement='left-start'
offset={{
@ -45,7 +46,8 @@ const VariableModalTrigger = ({
>
<PortalToFollowElemTrigger onClick={() => {
setOpen(v => !v)
open && onClose()
if (open)
onClose()
}}>
<Button variant='primary'>
<RiAddLine className='mr-1 h-4 w-4' />

View File

@ -33,7 +33,8 @@ const VariableTrigger = ({
open={open}
onOpenChange={() => {
setOpen(v => !v)
open && onClose()
if (open)
onClose()
}}
placement='left-start'
offset={{
@ -43,7 +44,8 @@ const VariableTrigger = ({
>
<PortalToFollowElemTrigger onClick={() => {
setOpen(v => !v)
open && onClose()
if (open)
onClose()
}}>
<Button variant='primary'>
<RiAddLine className='mr-1 h-4 w-4' />

View File

@ -81,9 +81,12 @@ const RunPanel: FC<RunProps> = ({
const switchTab = async (tab: string) => {
setCurrentTab(tab)
if (tab === 'RESULT')
runDetailUrl && await getResult()
tracingListUrl && await getTracingList()
if (tab === 'RESULT') {
if (runDetailUrl)
await getResult()
}
if (tracingListUrl)
await getTracingList()
}
useEffect(() => {

View File

@ -109,7 +109,7 @@ const TracingPanel: FC<TracingPanelProps> = ({
onMouseLeave={handleParallelMouseLeave}
>
<div className="mb-1 flex items-center">
<button
<button type="button"
onClick={() => toggleCollapse(node.id)}
className={cn(
'mr-2 transition-colors',

View File

@ -29,10 +29,6 @@ export type WorkflowSliceShape = {
setControlPromptEditorRerenderKey: (controlPromptEditorRerenderKey: number) => void
showImportDSLModal: boolean
setShowImportDSLModal: (showImportDSLModal: boolean) => void
showTips: string
setShowTips: (showTips: string) => void
workflowConfig?: Record<string, any>
setWorkflowConfig: (workflowConfig: Record<string, any>) => void
fileUploadConfig?: FileUploadConfigResponse
setFileUploadConfig: (fileUploadConfig: FileUploadConfigResponse) => void
}
@ -59,10 +55,6 @@ export const createWorkflowSlice: StateCreator<WorkflowSliceShape> = set => ({
setControlPromptEditorRerenderKey: controlPromptEditorRerenderKey => set(() => ({ controlPromptEditorRerenderKey })),
showImportDSLModal: false,
setShowImportDSLModal: showImportDSLModal => set(() => ({ showImportDSLModal })),
showTips: '',
setShowTips: showTips => set(() => ({ showTips })),
workflowConfig: undefined,
setWorkflowConfig: workflowConfig => set(() => ({ workflowConfig })),
fileUploadConfig: undefined,
setFileUploadConfig: fileUploadConfig => set(() => ({ fileUploadConfig })),
})

View File

@ -1,246 +0,0 @@
import dagre from '@dagrejs/dagre'
import {
cloneDeep,
} from 'lodash-es'
import type {
Edge,
Node,
} from '../types'
import {
BlockEnum,
} from '../types'
import {
CUSTOM_NODE,
NODE_LAYOUT_HORIZONTAL_PADDING,
NODE_LAYOUT_MIN_DISTANCE,
NODE_LAYOUT_VERTICAL_PADDING,
} from '../constants'
import { CUSTOM_ITERATION_START_NODE } from '../nodes/iteration-start/constants'
import { CUSTOM_LOOP_START_NODE } from '../nodes/loop-start/constants'
export const getLayoutByDagre = (originNodes: Node[], originEdges: Edge[]) => {
const dagreGraph = new dagre.graphlib.Graph({ compound: true })
dagreGraph.setDefaultEdgeLabel(() => ({}))
const nodes = cloneDeep(originNodes).filter(node => !node.parentId && node.type === CUSTOM_NODE)
const edges = cloneDeep(originEdges).filter(edge => (!edge.data?.isInIteration && !edge.data?.isInLoop))
// The default dagre layout algorithm often fails to correctly order the branches
// of an If/Else node, leading to crossed edges.
//
// To solve this, we employ a "virtual container" strategy:
// 1. A virtual, compound parent node (the "container") is created for each If/Else node's branches.
// 2. Each direct child of the If/Else node is preceded by a virtual dummy node. These dummies are placed inside the container.
// 3. A rigid, sequential chain of invisible edges is created between these dummy nodes (e.g., dummy_IF -> dummy_ELIF -> dummy_ELSE).
//
// This forces dagre to treat the ordered branches as an unbreakable, atomic group,
// ensuring their layout respects the intended logical sequence.
const ifElseNodes = nodes.filter(node => node.data.type === BlockEnum.IfElse)
let virtualLogicApplied = false
ifElseNodes.forEach((ifElseNode) => {
const childEdges = edges.filter(e => e.source === ifElseNode.id)
if (childEdges.length <= 1)
return
virtualLogicApplied = true
const sortedChildEdges = childEdges.sort((edgeA, edgeB) => {
const handleA = edgeA.sourceHandle
const handleB = edgeB.sourceHandle
if (handleA && handleB) {
const cases = (ifElseNode.data as any).cases || []
const isAElse = handleA === 'false'
const isBElse = handleB === 'false'
if (isAElse) return 1
if (isBElse) return -1
const indexA = cases.findIndex((c: any) => c.case_id === handleA)
const indexB = cases.findIndex((c: any) => c.case_id === handleB)
if (indexA !== -1 && indexB !== -1)
return indexA - indexB
}
return 0
})
const parentDummyId = `dummy-parent-${ifElseNode.id}`
dagreGraph.setNode(parentDummyId, { width: 1, height: 1 })
const dummyNodes: string[] = []
sortedChildEdges.forEach((edge) => {
const dummyNodeId = `dummy-${edge.source}-${edge.target}`
dummyNodes.push(dummyNodeId)
dagreGraph.setNode(dummyNodeId, { width: 1, height: 1 })
dagreGraph.setParent(dummyNodeId, parentDummyId)
const edgeIndex = edges.findIndex(e => e.id === edge.id)
if (edgeIndex > -1)
edges.splice(edgeIndex, 1)
edges.push({ id: `e-${edge.source}-${dummyNodeId}`, source: edge.source, target: dummyNodeId, sourceHandle: edge.sourceHandle } as Edge)
edges.push({ id: `e-${dummyNodeId}-${edge.target}`, source: dummyNodeId, target: edge.target, targetHandle: edge.targetHandle } as Edge)
})
for (let i = 0; i < dummyNodes.length - 1; i++) {
const sourceDummy = dummyNodes[i]
const targetDummy = dummyNodes[i + 1]
edges.push({ id: `e-dummy-${sourceDummy}-${targetDummy}`, source: sourceDummy, target: targetDummy } as Edge)
}
})
dagreGraph.setGraph({
rankdir: 'LR',
align: 'UL',
nodesep: 40,
ranksep: virtualLogicApplied ? 30 : 60,
ranker: 'tight-tree',
marginx: 30,
marginy: 200,
})
nodes.forEach((node) => {
dagreGraph.setNode(node.id, {
width: node.width!,
height: node.height!,
})
})
edges.forEach((edge) => {
dagreGraph.setEdge(edge.source, edge.target)
})
dagre.layout(dagreGraph)
return dagreGraph
}
export const getLayoutForChildNodes = (parentNodeId: string, originNodes: Node[], originEdges: Edge[]) => {
const dagreGraph = new dagre.graphlib.Graph()
dagreGraph.setDefaultEdgeLabel(() => ({}))
const nodes = cloneDeep(originNodes).filter(node => node.parentId === parentNodeId)
const edges = cloneDeep(originEdges).filter(edge =>
(edge.data?.isInIteration && edge.data?.iteration_id === parentNodeId)
|| (edge.data?.isInLoop && edge.data?.loop_id === parentNodeId),
)
const startNode = nodes.find(node =>
node.type === CUSTOM_ITERATION_START_NODE
|| node.type === CUSTOM_LOOP_START_NODE
|| node.data?.type === BlockEnum.LoopStart
|| node.data?.type === BlockEnum.IterationStart,
)
if (!startNode) {
dagreGraph.setGraph({
rankdir: 'LR',
align: 'UL',
nodesep: 40,
ranksep: 60,
marginx: NODE_LAYOUT_HORIZONTAL_PADDING,
marginy: NODE_LAYOUT_VERTICAL_PADDING,
})
nodes.forEach((node) => {
dagreGraph.setNode(node.id, {
width: node.width || 244,
height: node.height || 100,
})
})
edges.forEach((edge) => {
dagreGraph.setEdge(edge.source, edge.target)
})
dagre.layout(dagreGraph)
return dagreGraph
}
const startNodeOutEdges = edges.filter(edge => edge.source === startNode.id)
const firstConnectedNodes = startNodeOutEdges.map(edge =>
nodes.find(node => node.id === edge.target),
).filter(Boolean) as Node[]
const nonStartNodes = nodes.filter(node => node.id !== startNode.id)
const nonStartEdges = edges.filter(edge => edge.source !== startNode.id && edge.target !== startNode.id)
dagreGraph.setGraph({
rankdir: 'LR',
align: 'UL',
nodesep: 40,
ranksep: 60,
marginx: NODE_LAYOUT_HORIZONTAL_PADDING / 2,
marginy: NODE_LAYOUT_VERTICAL_PADDING / 2,
})
nonStartNodes.forEach((node) => {
dagreGraph.setNode(node.id, {
width: node.width || 244,
height: node.height || 100,
})
})
nonStartEdges.forEach((edge) => {
dagreGraph.setEdge(edge.source, edge.target)
})
dagre.layout(dagreGraph)
const startNodeSize = {
width: startNode.width || 44,
height: startNode.height || 48,
}
const startNodeX = NODE_LAYOUT_HORIZONTAL_PADDING / 1.5
let startNodeY = 100
let minFirstLayerX = Infinity
let avgFirstLayerY = 0
let firstLayerCount = 0
if (firstConnectedNodes.length > 0) {
firstConnectedNodes.forEach((node) => {
if (dagreGraph.node(node.id)) {
const nodePos = dagreGraph.node(node.id)
avgFirstLayerY += nodePos.y
firstLayerCount++
minFirstLayerX = Math.min(minFirstLayerX, nodePos.x - nodePos.width / 2)
}
})
if (firstLayerCount > 0) {
avgFirstLayerY /= firstLayerCount
startNodeY = avgFirstLayerY
}
const minRequiredX = startNodeX + startNodeSize.width + NODE_LAYOUT_MIN_DISTANCE
if (minFirstLayerX < minRequiredX) {
const shiftX = minRequiredX - minFirstLayerX
nonStartNodes.forEach((node) => {
if (dagreGraph.node(node.id)) {
const nodePos = dagreGraph.node(node.id)
dagreGraph.setNode(node.id, {
x: nodePos.x + shiftX,
y: nodePos.y,
width: nodePos.width,
height: nodePos.height,
})
}
})
}
}
dagreGraph.setNode(startNode.id, {
x: startNodeX + startNodeSize.width / 2,
y: startNodeY,
width: startNodeSize.width,
height: startNodeSize.height,
})
startNodeOutEdges.forEach((edge) => {
dagreGraph.setEdge(edge.source, edge.target)
})
return dagreGraph
}

View File

@ -1,7 +1,7 @@
export * from './node'
export * from './edge'
export * from './workflow-init'
export * from './dagre-layout'
export * from './layout'
export * from './common'
export * from './tool'
export * from './workflow'

View File

@ -0,0 +1,529 @@
import ELK from 'elkjs/lib/elk.bundled.js'
import type { ElkNode, LayoutOptions } from 'elkjs/lib/elk-api'
import { cloneDeep } from 'lodash-es'
import type {
Edge,
Node,
} from '../types'
import {
BlockEnum,
} from '../types'
import {
CUSTOM_NODE,
NODE_LAYOUT_HORIZONTAL_PADDING,
NODE_LAYOUT_VERTICAL_PADDING,
} from '../constants'
import { CUSTOM_ITERATION_START_NODE } from '../nodes/iteration-start/constants'
import { CUSTOM_LOOP_START_NODE } from '../nodes/loop-start/constants'
import type { CaseItem, IfElseNodeType } from '../nodes/if-else/types'
// Although the file name refers to Dagre, the implementation now relies on ELK's layered algorithm.
// Keep the export signatures unchanged to minimise the blast radius while we migrate the layout stack.
const elk = new ELK()
const DEFAULT_NODE_WIDTH = 244
const DEFAULT_NODE_HEIGHT = 100
const ROOT_LAYOUT_OPTIONS = {
'elk.algorithm': 'layered',
'elk.direction': 'RIGHT',
// === Spacing - Maximum spacing to prevent any overlap ===
'elk.layered.spacing.nodeNodeBetweenLayers': '100',
'elk.spacing.nodeNode': '80',
'elk.spacing.edgeNode': '50',
'elk.spacing.edgeEdge': '30',
'elk.spacing.edgeLabel': '10',
'elk.spacing.portPort': '20',
// === Port Configuration ===
'elk.portConstraints': 'FIXED_ORDER',
'elk.layered.considerModelOrder.strategy': 'PREFER_EDGES',
'elk.port.side': 'SOUTH',
// === Node Placement - Best quality ===
'elk.layered.nodePlacement.strategy': 'NETWORK_SIMPLEX',
'elk.layered.nodePlacement.favorStraightEdges': 'true',
'elk.layered.nodePlacement.linearSegments.deflectionDampening': '0.5',
'elk.layered.nodePlacement.networkSimplex.nodeFlexibility': 'NODE_SIZE',
// === Edge Routing - Maximum quality ===
'elk.edgeRouting': 'SPLINES',
'elk.layered.edgeRouting.selfLoopPlacement': 'NORTH',
'elk.layered.edgeRouting.sloppySplineRouting': 'false',
'elk.layered.edgeRouting.splines.mode': 'CONSERVATIVE',
'elk.layered.edgeRouting.splines.sloppy.layerSpacingFactor': '1.2',
// === Crossing Minimization - Most aggressive ===
'elk.layered.crossingMinimization.strategy': 'LAYER_SWEEP',
'elk.layered.crossingMinimization.greedySwitch.type': 'TWO_SIDED',
'elk.layered.crossingMinimization.greedySwitchHierarchical.type': 'TWO_SIDED',
'elk.layered.crossingMinimization.semiInteractive': 'true',
'elk.layered.crossingMinimization.hierarchicalSweepiness': '0.9',
// === Layering Strategy - Best quality ===
'elk.layered.layering.strategy': 'NETWORK_SIMPLEX',
'elk.layered.layering.networkSimplex.nodeFlexibility': 'NODE_SIZE',
'elk.layered.layering.layerConstraint': 'NONE',
'elk.layered.layering.minWidth.upperBoundOnWidth': '4',
// === Cycle Breaking ===
'elk.layered.cycleBreaking.strategy': 'DEPTH_FIRST',
// === Connected Components ===
'elk.separateConnectedComponents': 'true',
'elk.spacing.componentComponent': '100',
// === Node Size Constraints ===
'elk.nodeSize.constraints': 'NODE_LABELS',
'elk.nodeSize.options': 'DEFAULT_MINIMUM_SIZE MINIMUM_SIZE_ACCOUNTS_FOR_PADDING',
// === Edge Label Placement ===
'elk.edgeLabels.placement': 'CENTER',
'elk.edgeLabels.inline': 'true',
// === Compaction ===
'elk.layered.compaction.postCompaction.strategy': 'EDGE_LENGTH',
'elk.layered.compaction.postCompaction.constraints': 'EDGE_LENGTH',
// === High-Quality Mode ===
'elk.layered.thoroughness': '10',
'elk.layered.wrapping.strategy': 'OFF',
'elk.hierarchyHandling': 'INCLUDE_CHILDREN',
// === Additional Optimizations ===
'elk.layered.feedbackEdges': 'true',
'elk.layered.mergeEdges': 'false',
'elk.layered.mergeHierarchyEdges': 'false',
'elk.layered.allowNonFlowPortsToSwitchSides': 'false',
'elk.layered.northOrSouthPort': 'false',
'elk.partitioning.activate': 'false',
'elk.junctionPoints': 'true',
// === Content Alignment ===
'elk.contentAlignment': 'V_TOP H_LEFT',
'elk.alignment': 'AUTOMATIC',
}
const CHILD_LAYOUT_OPTIONS = {
'elk.algorithm': 'layered',
'elk.direction': 'RIGHT',
// === Spacing - High quality for child nodes ===
'elk.layered.spacing.nodeNodeBetweenLayers': '80',
'elk.spacing.nodeNode': '60',
'elk.spacing.edgeNode': '40',
'elk.spacing.edgeEdge': '25',
'elk.spacing.edgeLabel': '8',
'elk.spacing.portPort': '15',
// === Node Placement - Best quality ===
'elk.layered.nodePlacement.strategy': 'NETWORK_SIMPLEX',
'elk.layered.nodePlacement.favorStraightEdges': 'true',
'elk.layered.nodePlacement.linearSegments.deflectionDampening': '0.5',
'elk.layered.nodePlacement.networkSimplex.nodeFlexibility': 'NODE_SIZE',
// === Edge Routing - Maximum quality ===
'elk.edgeRouting': 'SPLINES',
'elk.layered.edgeRouting.sloppySplineRouting': 'false',
'elk.layered.edgeRouting.splines.mode': 'CONSERVATIVE',
// === Crossing Minimization - Aggressive ===
'elk.layered.crossingMinimization.strategy': 'LAYER_SWEEP',
'elk.layered.crossingMinimization.greedySwitch.type': 'TWO_SIDED',
'elk.layered.crossingMinimization.semiInteractive': 'true',
// === Layering Strategy ===
'elk.layered.layering.strategy': 'NETWORK_SIMPLEX',
'elk.layered.layering.networkSimplex.nodeFlexibility': 'NODE_SIZE',
// === Cycle Breaking ===
'elk.layered.cycleBreaking.strategy': 'DEPTH_FIRST',
// === Node Size ===
'elk.nodeSize.constraints': 'NODE_LABELS',
// === Compaction ===
'elk.layered.compaction.postCompaction.strategy': 'EDGE_LENGTH',
// === High-Quality Mode ===
'elk.layered.thoroughness': '10',
'elk.hierarchyHandling': 'INCLUDE_CHILDREN',
// === Additional Optimizations ===
'elk.layered.feedbackEdges': 'true',
'elk.layered.mergeEdges': 'false',
'elk.junctionPoints': 'true',
}
type LayoutInfo = {
x: number
y: number
width: number
height: number
layer?: number
}
type LayoutBounds = {
minX: number
minY: number
maxX: number
maxY: number
}
export type LayoutResult = {
nodes: Map<string, LayoutInfo>
bounds: LayoutBounds
}
// ELK Port definition for native port support
type ElkPortShape = {
id: string
layoutOptions?: LayoutOptions
}
type ElkNodeShape = {
id: string
width: number
height: number
ports?: ElkPortShape[]
layoutOptions?: LayoutOptions
children?: ElkNodeShape[]
}
type ElkEdgeShape = {
id: string
sources: string[]
targets: string[]
sourcePort?: string
targetPort?: string
}
const toElkNode = (node: Node): ElkNodeShape => ({
id: node.id,
width: node.width ?? DEFAULT_NODE_WIDTH,
height: node.height ?? DEFAULT_NODE_HEIGHT,
})
let edgeCounter = 0
const nextEdgeId = () => `elk-edge-${edgeCounter++}`
const createEdge = (
source: string,
target: string,
sourcePort?: string,
targetPort?: string,
): ElkEdgeShape => ({
id: nextEdgeId(),
sources: [source],
targets: [target],
sourcePort,
targetPort,
})
const collectLayout = (graph: ElkNode, predicate: (id: string) => boolean): LayoutResult => {
const result = new Map<string, LayoutInfo>()
let minX = Infinity
let minY = Infinity
let maxX = -Infinity
let maxY = -Infinity
const visit = (node: ElkNode) => {
node.children?.forEach((child: ElkNode) => {
if (predicate(child.id)) {
const x = child.x ?? 0
const y = child.y ?? 0
const width = child.width ?? DEFAULT_NODE_WIDTH
const height = child.height ?? DEFAULT_NODE_HEIGHT
const layer = child?.layoutOptions?.['org.eclipse.elk.layered.layerIndex']
result.set(child.id, {
x,
y,
width,
height,
layer: layer ? Number.parseInt(layer) : undefined,
})
minX = Math.min(minX, x)
minY = Math.min(minY, y)
maxX = Math.max(maxX, x + width)
maxY = Math.max(maxY, y + height)
}
if (child.children?.length)
visit(child)
})
}
visit(graph)
if (!Number.isFinite(minX) || !Number.isFinite(minY)) {
minX = 0
minY = 0
maxX = 0
maxY = 0
}
return {
nodes: result,
bounds: {
minX,
minY,
maxX,
maxY,
},
}
}
/**
* Build If/Else node with ELK native Ports instead of dummy nodes
* This is the recommended approach for handling multiple branches
*/
const buildIfElseWithPorts = (
ifElseNode: Node,
edges: Edge[],
): { node: ElkNodeShape; portMap: Map<string, string> } | null => {
const childEdges = edges.filter(edge => edge.source === ifElseNode.id)
if (childEdges.length <= 1)
return null
// Sort child edges according to case order
const sortedChildEdges = [...childEdges].sort((edgeA, edgeB) => {
const handleA = edgeA.sourceHandle
const handleB = edgeB.sourceHandle
if (handleA && handleB) {
const cases = (ifElseNode.data as IfElseNodeType).cases || []
const isAElse = handleA === 'false'
const isBElse = handleB === 'false'
if (isAElse)
return 1
if (isBElse)
return -1
const indexA = cases.findIndex((c: CaseItem) => c.case_id === handleA)
const indexB = cases.findIndex((c: CaseItem) => c.case_id === handleB)
if (indexA !== -1 && indexB !== -1)
return indexA - indexB
}
return 0
})
// Create ELK ports for each branch
const ports: ElkPortShape[] = sortedChildEdges.map((edge, index) => ({
id: `${ifElseNode.id}-port-${edge.sourceHandle || index}`,
layoutOptions: {
'port.side': 'EAST', // Ports on the right side (matching 'RIGHT' direction)
'port.index': String(index),
},
}))
// Build port mapping: sourceHandle -> portId
const portMap = new Map<string, string>()
sortedChildEdges.forEach((edge, index) => {
const portId = `${ifElseNode.id}-port-${edge.sourceHandle || index}`
portMap.set(edge.id, portId)
})
return {
node: {
id: ifElseNode.id,
width: ifElseNode.width ?? DEFAULT_NODE_WIDTH,
height: ifElseNode.height ?? DEFAULT_NODE_HEIGHT,
ports,
layoutOptions: {
'elk.portConstraints': 'FIXED_ORDER',
},
},
portMap,
}
}
const normaliseBounds = (layout: LayoutResult): LayoutResult => {
const {
nodes,
bounds,
} = layout
if (nodes.size === 0)
return layout
const offsetX = bounds.minX
const offsetY = bounds.minY
const adjustedNodes = new Map<string, LayoutInfo>()
nodes.forEach((info, id) => {
adjustedNodes.set(id, {
...info,
x: info.x - offsetX,
y: info.y - offsetY,
})
})
return {
nodes: adjustedNodes,
bounds: {
minX: 0,
minY: 0,
maxX: bounds.maxX - offsetX,
maxY: bounds.maxY - offsetY,
},
}
}
export const getLayoutByDagre = async (originNodes: Node[], originEdges: Edge[]): Promise<LayoutResult> => {
edgeCounter = 0
const nodes = cloneDeep(originNodes).filter(node => !node.parentId && node.type === CUSTOM_NODE)
const edges = cloneDeep(originEdges).filter(edge => (!edge.data?.isInIteration && !edge.data?.isInLoop))
const elkNodes: ElkNodeShape[] = []
const elkEdges: ElkEdgeShape[] = []
// Track which edges have been processed for If/Else nodes with ports
const edgeToPortMap = new Map<string, string>()
// Build nodes with ports for If/Else nodes
nodes.forEach((node) => {
if (node.data.type === BlockEnum.IfElse) {
const portsResult = buildIfElseWithPorts(node, edges)
if (portsResult) {
// Use node with ports
elkNodes.push(portsResult.node)
// Store port mappings for edges
portsResult.portMap.forEach((portId, edgeId) => {
edgeToPortMap.set(edgeId, portId)
})
}
else {
// No multiple branches, use normal node
elkNodes.push(toElkNode(node))
}
}
else {
elkNodes.push(toElkNode(node))
}
})
// Build edges with port connections
edges.forEach((edge) => {
const sourcePort = edgeToPortMap.get(edge.id)
elkEdges.push(createEdge(edge.source, edge.target, sourcePort))
})
const graph = {
id: 'workflow-root',
layoutOptions: ROOT_LAYOUT_OPTIONS,
children: elkNodes,
edges: elkEdges,
}
const layoutedGraph = await elk.layout(graph)
// No need to filter dummy nodes anymore, as we're using ports
const layout = collectLayout(layoutedGraph, () => true)
return normaliseBounds(layout)
}
const normaliseChildLayout = (
layout: LayoutResult,
nodes: Node[],
): LayoutResult => {
const result = new Map<string, LayoutInfo>()
layout.nodes.forEach((info, id) => {
result.set(id, info)
})
// Ensure iteration / loop start nodes do not collapse into the children.
const startNode = nodes.find(node =>
node.type === CUSTOM_ITERATION_START_NODE
|| node.type === CUSTOM_LOOP_START_NODE
|| node.data?.type === BlockEnum.LoopStart
|| node.data?.type === BlockEnum.IterationStart,
)
if (startNode) {
const startLayout = result.get(startNode.id)
if (startLayout) {
const desiredMinX = NODE_LAYOUT_HORIZONTAL_PADDING / 1.5
if (startLayout.x > desiredMinX) {
const shiftX = startLayout.x - desiredMinX
result.forEach((value, key) => {
result.set(key, {
...value,
x: value.x - shiftX,
})
})
}
const desiredMinY = startLayout.y
const deltaY = NODE_LAYOUT_VERTICAL_PADDING / 2
result.forEach((value, key) => {
result.set(key, {
...value,
y: value.y - desiredMinY + deltaY,
})
})
}
}
let minX = Infinity
let minY = Infinity
let maxX = -Infinity
let maxY = -Infinity
result.forEach((value) => {
minX = Math.min(minX, value.x)
minY = Math.min(minY, value.y)
maxX = Math.max(maxX, value.x + value.width)
maxY = Math.max(maxY, value.y + value.height)
})
if (!Number.isFinite(minX) || !Number.isFinite(minY))
return layout
return normaliseBounds({
nodes: result,
bounds: {
minX,
minY,
maxX,
maxY,
},
})
}
export const getLayoutForChildNodes = async (
parentNodeId: string,
originNodes: Node[],
originEdges: Edge[],
): Promise<LayoutResult | null> => {
edgeCounter = 0
const nodes = cloneDeep(originNodes).filter(node => node.parentId === parentNodeId)
if (!nodes.length)
return null
const edges = cloneDeep(originEdges).filter(edge =>
(edge.data?.isInIteration && edge.data?.iteration_id === parentNodeId)
|| (edge.data?.isInLoop && edge.data?.loop_id === parentNodeId),
)
const elkNodes: ElkNodeShape[] = nodes.map(toElkNode)
const elkEdges: ElkEdgeShape[] = edges.map(edge => createEdge(edge.source, edge.target))
const graph = {
id: parentNodeId,
layoutOptions: CHILD_LAYOUT_OPTIONS,
children: elkNodes,
edges: elkEdges,
}
const layoutedGraph = await elk.layout(graph)
const layout = collectLayout(layoutedGraph, () => true)
return normaliseChildLayout(layout, nodes)
}

View File

@ -1,12 +1,8 @@
import {
getConnectedEdges,
getIncomers,
getOutgoers,
} from 'reactflow'
import { v4 as uuid4 } from 'uuid'
import {
groupBy,
isEqual,
uniqBy,
} from 'lodash-es'
import type {
@ -168,158 +164,6 @@ export const changeNodesAndEdgesId = (nodes: Node[], edges: Edge[]) => {
return [newNodes, newEdges] as [Node[], Edge[]]
}
type ParallelInfoItem = {
parallelNodeId: string
depth: number
isBranch?: boolean
}
type NodeParallelInfo = {
parallelNodeId: string
edgeHandleId: string
depth: number
}
type NodeHandle = {
node: Node
handle: string
}
type NodeStreamInfo = {
upstreamNodes: Set<string>
downstreamEdges: Set<string>
}
export const getParallelInfo = (startNode: Node, nodes: Node[], edges: Edge[]) => {
if (!startNode)
throw new Error('Start node not found')
const parallelList = [] as ParallelInfoItem[]
const nextNodeHandles = [{ node: startNode, handle: 'source' }]
let hasAbnormalEdges = false
const traverse = (firstNodeHandle: NodeHandle) => {
const nodeEdgesSet = {} as Record<string, Set<string>>
const totalEdgesSet = new Set<string>()
const nextHandles = [firstNodeHandle]
const streamInfo = {} as Record<string, NodeStreamInfo>
const parallelListItem = {
parallelNodeId: '',
depth: 0,
} as ParallelInfoItem
const nodeParallelInfoMap = {} as Record<string, NodeParallelInfo>
nodeParallelInfoMap[firstNodeHandle.node.id] = {
parallelNodeId: '',
edgeHandleId: '',
depth: 0,
}
while (nextHandles.length) {
const currentNodeHandle = nextHandles.shift()!
const { node: currentNode, handle: currentHandle = 'source' } = currentNodeHandle
const currentNodeHandleKey = currentNode.id
const connectedEdges = edges.filter(edge => edge.source === currentNode.id && edge.sourceHandle === currentHandle)
const connectedEdgesLength = connectedEdges.length
const outgoers = nodes.filter(node => connectedEdges.some(edge => edge.target === node.id))
const incomers = getIncomers(currentNode, nodes, edges)
if (!streamInfo[currentNodeHandleKey]) {
streamInfo[currentNodeHandleKey] = {
upstreamNodes: new Set<string>(),
downstreamEdges: new Set<string>(),
}
}
if (nodeEdgesSet[currentNodeHandleKey]?.size > 0 && incomers.length > 1) {
const newSet = new Set<string>()
for (const item of totalEdgesSet) {
if (!streamInfo[currentNodeHandleKey].downstreamEdges.has(item))
newSet.add(item)
}
if (isEqual(nodeEdgesSet[currentNodeHandleKey], newSet)) {
parallelListItem.depth = nodeParallelInfoMap[currentNode.id].depth
nextNodeHandles.push({ node: currentNode, handle: currentHandle })
break
}
}
if (nodeParallelInfoMap[currentNode.id].depth > parallelListItem.depth)
parallelListItem.depth = nodeParallelInfoMap[currentNode.id].depth
outgoers.forEach((outgoer) => {
const outgoerConnectedEdges = getConnectedEdges([outgoer], edges).filter(edge => edge.source === outgoer.id)
const sourceEdgesGroup = groupBy(outgoerConnectedEdges, 'sourceHandle')
const incomers = getIncomers(outgoer, nodes, edges)
if (outgoers.length > 1 && incomers.length > 1)
hasAbnormalEdges = true
Object.keys(sourceEdgesGroup).forEach((sourceHandle) => {
nextHandles.push({ node: outgoer, handle: sourceHandle })
})
if (!outgoerConnectedEdges.length)
nextHandles.push({ node: outgoer, handle: 'source' })
const outgoerKey = outgoer.id
if (!nodeEdgesSet[outgoerKey])
nodeEdgesSet[outgoerKey] = new Set<string>()
if (nodeEdgesSet[currentNodeHandleKey]) {
for (const item of nodeEdgesSet[currentNodeHandleKey])
nodeEdgesSet[outgoerKey].add(item)
}
if (!streamInfo[outgoerKey]) {
streamInfo[outgoerKey] = {
upstreamNodes: new Set<string>(),
downstreamEdges: new Set<string>(),
}
}
if (!nodeParallelInfoMap[outgoer.id]) {
nodeParallelInfoMap[outgoer.id] = {
...nodeParallelInfoMap[currentNode.id],
}
}
if (connectedEdgesLength > 1) {
const edge = connectedEdges.find(edge => edge.target === outgoer.id)!
nodeEdgesSet[outgoerKey].add(edge.id)
totalEdgesSet.add(edge.id)
streamInfo[currentNodeHandleKey].downstreamEdges.add(edge.id)
streamInfo[outgoerKey].upstreamNodes.add(currentNodeHandleKey)
for (const item of streamInfo[currentNodeHandleKey].upstreamNodes)
streamInfo[item].downstreamEdges.add(edge.id)
if (!parallelListItem.parallelNodeId)
parallelListItem.parallelNodeId = currentNode.id
const prevDepth = nodeParallelInfoMap[currentNode.id].depth + 1
const currentDepth = nodeParallelInfoMap[outgoer.id].depth
nodeParallelInfoMap[outgoer.id].depth = Math.max(prevDepth, currentDepth)
}
else {
for (const item of streamInfo[currentNodeHandleKey].upstreamNodes)
streamInfo[outgoerKey].upstreamNodes.add(item)
nodeParallelInfoMap[outgoer.id].depth = nodeParallelInfoMap[currentNode.id].depth
}
})
}
parallelList.push(parallelListItem)
}
while (nextNodeHandles.length) {
const nodeHandle = nextNodeHandles.shift()!
traverse(nodeHandle)
}
return {
parallelList,
hasAbnormalEdges,
}
}
export const hasErrorHandleNode = (nodeType?: BlockEnum) => {
return nodeType === BlockEnum.LLM || nodeType === BlockEnum.Tool || nodeType === BlockEnum.HttpRequest || nodeType === BlockEnum.Code
}

View File

@ -69,7 +69,7 @@ const ValueContent = ({
const [json, setJson] = useState('')
const [parseError, setParseError] = useState<Error | null>(null)
const [validationError, setValidationError] = useState<string>('')
const [fileValue, setFileValue] = useState<any>(formatFileValue(currentVar))
const [fileValue, setFileValue] = useState<any>(() => formatFileValue(currentVar))
const { run: debounceValueChange } = useDebounceFn(handleValueChange, { wait: 500 })

View File

@ -3,7 +3,7 @@ import { type StoreApi, create } from 'zustand'
import { type TemporalState, temporal } from 'zundo'
import isDeepEqual from 'fast-deep-equal'
import type { Edge, Node } from './types'
import type { WorkflowHistoryEvent } from './hooks'
import type { WorkflowHistoryEventT } from './hooks'
import { noop } from 'lodash-es'
export const WorkflowHistoryStoreContext = createContext<WorkflowHistoryStoreContextType>({ store: null, shortcutsEnabled: true, setShortcutsEnabled: noop })
@ -98,7 +98,7 @@ function createStore({
export type WorkflowHistoryStore = {
nodes: Node[]
edges: Edge[]
workflowHistoryEvent: WorkflowHistoryEvent | undefined
workflowHistoryEvent: WorkflowHistoryEventT | undefined
workflowHistoryEventMeta?: WorkflowHistoryEventMeta
}

View File

@ -15,7 +15,7 @@ import { useNodeLoopInteractions } from './hooks'
const Node: FC<NodeProps<LoopNodeType>> = ({
id,
data,
data: _data,
}) => {
const { zoom } = useViewport()
const nodesInitialized = useNodesInitialized()

View File

@ -68,8 +68,8 @@ const WorkflowPreview = ({
viewport,
className,
}: WorkflowPreviewProps) => {
const [nodesData, setNodesData] = useState(initialNodes(nodes, edges))
const [edgesData, setEdgesData] = useState(initialEdges(edges, nodes))
const [nodesData, setNodesData] = useState(() => initialNodes(nodes, edges))
const [edgesData, setEdgesData] = useState(() => initialEdges(edges, nodes))
const onNodesChange = useCallback(
(changes: NodeChange[]) => setNodesData(nds => applyNodeChanges(changes, nds)),