refactor(trigger): refactor app mode type to enum

This commit is contained in:
yessenia
2025-10-21 15:42:04 +08:00
parent d5e2649608
commit dc4801c014
59 changed files with 260 additions and 223 deletions

View File

@ -22,6 +22,7 @@ import type { Node } from 'reactflow'
import type { PluginMeta } from '@/app/components/plugins/types'
import { noop } from 'lodash-es'
import { useDocLink } from '@/context/i18n'
import { AppModeEnum } from '@/types/app'
export type Strategy = {
agent_strategy_provider_name: string
@ -99,7 +100,7 @@ export const AgentStrategy = memo((props: AgentStrategyProps) => {
modelConfig={
defaultModel.data
? {
mode: 'chat',
mode: AppModeEnum.CHAT,
name: defaultModel.data.model,
provider: defaultModel.data.provider.provider,
completion_params: {},

View File

@ -63,6 +63,7 @@ import type { PromptItem } from '@/models/debug'
import { VAR_REGEX } from '@/config'
import type { AgentNodeType } from '../../../agent/types'
import type { SchemaTypeDefinition } from '@/service/use-common'
import { AppModeEnum } from '@/types/app'
export const isSystemVar = (valueSelector: ValueSelector) => {
return valueSelector[0] === 'sys' || valueSelector[1] === 'sys'
@ -1278,7 +1279,7 @@ export const getNodeUsedVars = (node: Node): ValueSelector[] => {
}
case BlockEnum.LLM: {
const payload = data as LLMNodeType
const isChatModel = payload.model?.mode === 'chat'
const isChatModel = payload.model?.mode === AppModeEnum.CHAT
let prompts: string[] = []
if (isChatModel) {
prompts
@ -1581,7 +1582,7 @@ export const updateNodeVars = (
}
case BlockEnum.LLM: {
const payload = data as LLMNodeType
const isChatModel = payload.model?.mode === 'chat'
const isChatModel = payload.model?.mode === AppModeEnum.CHAT
if (isChatModel) {
payload.prompt_template = (
payload.prompt_template as PromptItem[]

View File

@ -11,6 +11,7 @@ import type { MetadataShape } from '@/app/components/workflow/nodes/knowledge-re
import { MetadataFilteringModeEnum } from '@/app/components/workflow/nodes/knowledge-retrieval/types'
import ModelParameterModal from '@/app/components/header/account-setting/model-provider-page/model-parameter-modal'
import { noop } from 'lodash-es'
import { AppModeEnum } from '@/types/app'
type MetadataFilterProps = {
metadataFilterMode?: MetadataFilteringModeEnum
@ -84,7 +85,7 @@ const MetadataFilter = ({
popupClassName='!w-[387px]'
isInWorkflow
isAdvancedMode={true}
mode={metadataModelConfig?.mode || 'chat'}
mode={metadataModelConfig?.mode || AppModeEnum.CHAT}
provider={metadataModelConfig?.provider || ''}
completionParams={metadataModelConfig?.completion_params || { temperature: 0.7 }}
modelId={metadataModelConfig?.name || ''}

View File

@ -32,7 +32,7 @@ import {
getMultipleRetrievalConfig,
getSelectedDatasetsMode,
} from './utils'
import { RETRIEVE_TYPE } from '@/types/app'
import { AppModeEnum, RETRIEVE_TYPE } from '@/types/app'
import { DATASET_DEFAULT } from '@/config'
import type { DataSet } from '@/models/datasets'
import { fetchDatasets } from '@/service/datasets'
@ -344,7 +344,7 @@ const useConfig = (id: string, payload: KnowledgeRetrievalNodeType) => {
draft.metadata_model_config = {
provider: model.provider,
name: model.modelId,
mode: model.mode || 'chat',
mode: model.mode || AppModeEnum.CHAT,
completion_params: draft.metadata_model_config?.completion_params || { temperature: 0.7 },
}
})

View File

@ -1,4 +1,5 @@
// import { RETRIEVAL_OUTPUT_STRUCT } from '../../constants'
import { AppModeEnum } from '@/types/app'
import { BlockEnum, EditionType } from '../../types'
import { type NodeDefault, type PromptItem, PromptRole } from '../../types'
import type { LLMNodeType } from './types'
@ -36,7 +37,7 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
model: {
provider: '',
name: '',
mode: 'chat',
mode: AppModeEnum.CHAT,
completion_params: {
temperature: 0.7,
},
@ -63,7 +64,7 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
errorMessages = t(`${i18nPrefix}.fieldRequired`, { field: t(`${i18nPrefix}.fields.model`) })
if (!errorMessages && !payload.memory) {
const isChatModel = payload.model.mode === 'chat'
const isChatModel = payload.model.mode === AppModeEnum.CHAT
const isPromptEmpty = isChatModel
? !(payload.prompt_template as PromptItem[]).some((t) => {
if (t.edition_type === EditionType.jinja2)
@ -77,14 +78,14 @@ const nodeDefault: NodeDefault<LLMNodeType> = {
}
if (!errorMessages && !!payload.memory) {
const isChatModel = payload.model.mode === 'chat'
const isChatModel = payload.model.mode === AppModeEnum.CHAT
// payload.memory.query_prompt_template not pass is default: {{#sys.query#}}
if (isChatModel && !!payload.memory.query_prompt_template && !payload.memory.query_prompt_template.includes('{{#sys.query#}}'))
errorMessages = t('workflow.nodes.llm.sysQueryInUser')
}
if (!errorMessages) {
const isChatModel = payload.model.mode === 'chat'
const isChatModel = payload.model.mode === AppModeEnum.CHAT
const isShowVars = (() => {
if (isChatModel)
return (payload.prompt_template as PromptItem[]).some(item => item.edition_type === EditionType.jinja2)

View File

@ -18,6 +18,7 @@ import {
import useNodeCrud from '@/app/components/workflow/nodes/_base/hooks/use-node-crud'
import { checkHasContextBlock, checkHasHistoryBlock, checkHasQueryBlock } from '@/app/components/base/prompt-editor/constants'
import useInspectVarsCrud from '@/app/components/workflow/hooks/use-inspect-vars-crud'
import { AppModeEnum } from '@/types/app'
const useConfig = (id: string, payload: LLMNodeType) => {
const { nodesReadOnly: readOnly } = useNodesReadOnly()
@ -46,7 +47,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
// model
const model = inputs.model
const modelMode = inputs.model?.mode
const isChatModel = modelMode === 'chat'
const isChatModel = modelMode === AppModeEnum.CHAT
const isCompletionModel = !isChatModel
@ -131,7 +132,7 @@ const useConfig = (id: string, payload: LLMNodeType) => {
draft.model.mode = model.mode!
const isModeChange = model.mode !== inputRef.current.model.mode
if (isModeChange && defaultConfig && Object.keys(defaultConfig).length > 0)
appendDefaultPromptConfig(draft, defaultConfig, model.mode === 'chat')
appendDefaultPromptConfig(draft, defaultConfig, model.mode === AppModeEnum.CHAT)
})
setInputs(newInputs)
setModelChanged(true)

View File

@ -12,6 +12,7 @@ import useConfigVision from '../../hooks/use-config-vision'
import { noop } from 'lodash-es'
import { findVariableWhenOnLLMVision } from '../utils'
import useAvailableVarList from '../_base/hooks/use-available-var-list'
import { AppModeEnum } from '@/types/app'
const i18nPrefix = 'workflow.nodes.llm'
type Params = {
@ -56,7 +57,7 @@ const useSingleRunFormParams = ({
// model
const model = inputs.model
const modelMode = inputs.model?.mode
const isChatModel = modelMode === 'chat'
const isChatModel = modelMode === AppModeEnum.CHAT
const {
isVisionModel,
} = useConfigVision(model, {

View File

@ -3,6 +3,7 @@ import { type ParameterExtractorNodeType, ReasoningModeType } from './types'
import { genNodeMetaData } from '@/app/components/workflow/utils'
import { BlockEnum } from '@/app/components/workflow/types'
import { BlockClassificationEnum } from '@/app/components/workflow/block-selector/types'
import { AppModeEnum } from '@/types/app'
const i18nPrefix = 'workflow'
const metaData = genNodeMetaData({
@ -17,7 +18,7 @@ const nodeDefault: NodeDefault<ParameterExtractorNodeType> = {
model: {
provider: '',
name: '',
mode: 'chat',
mode: AppModeEnum.CHAT,
completion_params: {
temperature: 0.7,
},

View File

@ -17,6 +17,7 @@ import { checkHasQueryBlock } from '@/app/components/base/prompt-editor/constant
import useAvailableVarList from '@/app/components/workflow/nodes/_base/hooks/use-available-var-list'
import { supportFunctionCall } from '@/utils/tool-call'
import useInspectVarsCrud from '../../hooks/use-inspect-vars-crud'
import { AppModeEnum } from '@/types/app'
const useConfig = (id: string, payload: ParameterExtractorNodeType) => {
const {
@ -86,13 +87,13 @@ const useConfig = (id: string, payload: ParameterExtractorNodeType) => {
const model = inputs.model || {
provider: '',
name: '',
mode: 'chat',
mode: AppModeEnum.CHAT,
completion_params: {
temperature: 0.7,
},
}
const modelMode = inputs.model?.mode
const isChatModel = modelMode === 'chat'
const isChatModel = modelMode === AppModeEnum.CHAT
const isCompletionModel = !isChatModel
const {
@ -133,7 +134,7 @@ const useConfig = (id: string, payload: ParameterExtractorNodeType) => {
draft.model.mode = model.mode!
const isModeChange = model.mode !== inputRef.current.model?.mode
if (isModeChange && defaultConfig && Object.keys(defaultConfig).length > 0)
appendDefaultPromptConfig(draft, defaultConfig, model.mode === 'chat')
appendDefaultPromptConfig(draft, defaultConfig, model.mode === AppModeEnum.CHAT)
})
setInputs(newInputs)
setModelChanged(true)

View File

@ -3,6 +3,7 @@ import type { QuestionClassifierNodeType } from './types'
import { genNodeMetaData } from '@/app/components/workflow/utils'
import { BlockEnum } from '@/app/components/workflow/types'
import { BlockClassificationEnum } from '@/app/components/workflow/block-selector/types'
import { AppModeEnum } from '@/types/app'
const i18nPrefix = 'workflow'
@ -18,7 +19,7 @@ const nodeDefault: NodeDefault<QuestionClassifierNodeType> = {
model: {
provider: '',
name: '',
mode: 'chat',
mode: AppModeEnum.CHAT,
completion_params: {
temperature: 0.7,
},

View File

@ -15,6 +15,7 @@ import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/com
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import { checkHasQueryBlock } from '@/app/components/base/prompt-editor/constants'
import { useUpdateNodeInternals } from 'reactflow'
import { AppModeEnum } from '@/types/app'
const useConfig = (id: string, payload: QuestionClassifierNodeType) => {
const updateNodeInternals = useUpdateNodeInternals()
@ -38,7 +39,7 @@ const useConfig = (id: string, payload: QuestionClassifierNodeType) => {
const model = inputs.model
const modelMode = inputs.model?.mode
const isChatModel = modelMode === 'chat'
const isChatModel = modelMode === AppModeEnum.CHAT
const {
isVisionModel,