feat: add support for file outputs in LLMNode, including auto-collection and deduplication

This commit is contained in:
Novice
2026-03-11 16:30:23 +08:00
parent f0c82f28cb
commit 8513fa2897
5 changed files with 22 additions and 1 deletions

View File

@ -573,7 +573,11 @@ class LLMNode(Node[LLMNodeData]):
"tool_calls": [self._serialize_tool_call(item) for item in generation_data.tool_calls],
"sequence": generation_data.sequence,
}
files_to_output = generation_data.files
files_to_output = list(generation_data.files)
# Merge auto-collected/structured-output files from self._file_outputs
if self._file_outputs:
existing_ids = {f.id for f in files_to_output}
files_to_output.extend(f for f in self._file_outputs if f.id not in existing_ids)
else:
# Classical runtime: use pre-computed generation-specific text pair,
# falling back to native model reasoning if no <think> tags were found.
@ -2126,6 +2130,12 @@ class LLMNode(Node[LLMNodeData]):
result = yield from self._process_tool_outputs(outputs)
# Auto-collect sandbox output/ files, deduplicate by id
collected_files = session.collect_output_files()
if collected_files:
existing_ids = {f.id for f in self._file_outputs}
self._file_outputs.extend(f for f in collected_files if f.id not in existing_ids)
if result is None:
raise LLMNodeError("SandboxSession exited unexpectedly")

View File

@ -182,6 +182,10 @@ export const LLM_OUTPUT_STRUCT: Var[] = [
variable: 'usage',
type: VarType.object,
},
{
variable: 'files',
type: VarType.arrayFile,
},
]
export const KNOWLEDGE_RETRIEVAL_OUTPUT_STRUCT: Var[] = [

View File

@ -461,6 +461,11 @@ const Panel: FC<NodePanelProps<LLMNodeType>> = ({
type="object"
description={t(`${i18nPrefix}.outputVars.usage`, { ns: 'workflow' })}
/>
<VarItem
name="files"
type="array[file]"
description={t(`${i18nPrefix}.outputVars.files`, { ns: 'workflow' })}
/>
{inputs.structured_output_enabled && (
<>
<Split className="mt-3" />

View File

@ -828,6 +828,7 @@
"nodes.llm.jsonSchema.warningTips.saveSchema": "Please finish editing the current field before saving the schema",
"nodes.llm.model": "model",
"nodes.llm.notSetContextInPromptTip": "To enable the context feature, please fill in the context variable in PROMPT.",
"nodes.llm.outputVars.files": "Files produced during execution",
"nodes.llm.outputVars.generation": "Generation Information",
"nodes.llm.outputVars.output": "Generate content",
"nodes.llm.outputVars.reasoning_content": "Reasoning Content",

View File

@ -828,6 +828,7 @@
"nodes.llm.jsonSchema.warningTips.saveSchema": "请先完成当前字段的编辑",
"nodes.llm.model": "模型",
"nodes.llm.notSetContextInPromptTip": "要启用上下文功能,请在提示中填写上下文变量。",
"nodes.llm.outputVars.files": "执行过程中产出的文件",
"nodes.llm.outputVars.generation": "生成信息",
"nodes.llm.outputVars.output": "生成内容",
"nodes.llm.outputVars.reasoning_content": "推理内容",