mirror of
https://github.com/langgenius/dify.git
synced 2026-05-03 08:58:09 +08:00
Frontend: - Migrate deprecated imports: modal→dialog, toast→ui/toast, tooltip→tooltip-plus, portal-to-follow-elem→portal-to-follow-elem-plus, select→ui/select, confirm→alert-dialog - Replace next/* with @/next/* wrapper modules - Convert TypeScript enums to const objects (erasable-syntax-only) - Replace all `any` types with `unknown` or specific types in workflow types - Fix unused vars, react-hooks-extra, react-refresh/only-export-components - Extract InteractionMode to separate module, tool-block commands to commands.ts Backend: - Fix pyrefly errors: type narrowing, null guards, getattr patterns - Remove unused TYPE_CHECKING imports in LLM node - Add ignore_imports entries to .importlinter for dify_graph boundary violations Made-with: Cursor
96 lines
3.2 KiB
TypeScript
96 lines
3.2 KiB
TypeScript
import type { FetchWorkflowDraftResponse } from '@/types/workflow'
|
|
import {
|
|
useCallback,
|
|
useEffect,
|
|
useState,
|
|
} from 'react'
|
|
import {
|
|
useWorkflowStore,
|
|
} from '@/app/components/workflow/store'
|
|
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
|
import {
|
|
fetchWorkflowDraft,
|
|
syncWorkflowDraft,
|
|
} from '@/service/workflow'
|
|
import { usePipelineConfig } from './use-pipeline-config'
|
|
import { usePipelineTemplate } from './use-pipeline-template'
|
|
|
|
export const usePipelineInit = () => {
|
|
const workflowStore = useWorkflowStore()
|
|
const {
|
|
nodes: nodesTemplate,
|
|
edges: edgesTemplate,
|
|
} = usePipelineTemplate()
|
|
const [data, setData] = useState<FetchWorkflowDraftResponse>()
|
|
const [isLoading, setIsLoading] = useState(true)
|
|
const datasetId = useDatasetDetailContextWithSelector(s => s.dataset)?.pipeline_id
|
|
const knowledgeName = useDatasetDetailContextWithSelector(s => s.dataset)?.name
|
|
const knowledgeIcon = useDatasetDetailContextWithSelector(s => s.dataset)?.icon_info
|
|
|
|
useEffect(() => {
|
|
workflowStore.setState({ pipelineId: datasetId, knowledgeName, knowledgeIcon })
|
|
}, [datasetId, workflowStore, knowledgeName, knowledgeIcon])
|
|
|
|
usePipelineConfig()
|
|
|
|
const handleGetInitialWorkflowData = useCallback(async () => {
|
|
const {
|
|
setEnvSecrets,
|
|
setEnvironmentVariables,
|
|
setSyncWorkflowDraftHash,
|
|
setDraftUpdatedAt,
|
|
setToolPublished,
|
|
setRagPipelineVariables,
|
|
} = workflowStore.getState()
|
|
try {
|
|
const res = await fetchWorkflowDraft(`/rag/pipelines/${datasetId}/workflows/draft`)
|
|
setData(res)
|
|
setDraftUpdatedAt(res.updated_at)
|
|
setToolPublished(res.tool_published)
|
|
setEnvSecrets((res.environment_variables || []).filter(env => env.value_type === 'secret').reduce((acc, env) => {
|
|
acc[env.id] = typeof env.value === 'string' ? env.value : JSON.stringify(env.value)
|
|
return acc
|
|
}, {} as Record<string, string>))
|
|
setEnvironmentVariables(res.environment_variables?.map(env => env.value_type === 'secret' ? { ...env, value: '[__HIDDEN__]' } : env) || [])
|
|
setSyncWorkflowDraftHash(res.hash)
|
|
setRagPipelineVariables?.(res.rag_pipeline_variables || [])
|
|
setIsLoading(false)
|
|
}
|
|
catch (error: any) {
|
|
if (error && error.json && !error.bodyUsed && datasetId) {
|
|
error.json().then((err: any) => {
|
|
if (err.code === 'draft_workflow_not_exist') {
|
|
workflowStore.setState({
|
|
notInitialWorkflow: true,
|
|
shouldAutoOpenStartNodeSelector: true,
|
|
})
|
|
syncWorkflowDraft({
|
|
url: `/rag/pipelines/${datasetId}/workflows/draft`,
|
|
params: {
|
|
graph: {
|
|
nodes: nodesTemplate,
|
|
edges: edgesTemplate,
|
|
},
|
|
environment_variables: [],
|
|
},
|
|
}).then((res) => {
|
|
const { setDraftUpdatedAt } = workflowStore.getState()
|
|
setDraftUpdatedAt(res.updated_at)
|
|
handleGetInitialWorkflowData()
|
|
})
|
|
}
|
|
})
|
|
}
|
|
}
|
|
}, [nodesTemplate, edgesTemplate, workflowStore, datasetId])
|
|
|
|
useEffect(() => {
|
|
handleGetInitialWorkflowData()
|
|
}, [])
|
|
|
|
return {
|
|
data,
|
|
isLoading,
|
|
}
|
|
}
|