fix: handle prompt template correctly to extract selectors for step run

idk why. This may cause further issues.
This commit is contained in:
Stream
2026-02-04 07:23:02 +08:00
parent 9172997f24
commit 7535b67423

View File

@ -1441,9 +1441,11 @@ class LLMNode(Node[LLMNodeData]):
if isinstance(item, PromptMessageContext):
if len(item.value_selector) >= 2:
prompt_context_selectors.append(item.value_selector)
elif isinstance(item, LLMNodeChatModelMessage) and item.edition_type == "jinja2":
elif isinstance(item, LLMNodeChatModelMessage):
variable_template_parser = VariableTemplateParser(template=item.text)
variable_selectors.extend(variable_template_parser.extract_variable_selectors())
else:
raise InvalidVariableTypeError(f"Invalid prompt template type: {type(prompt_template)}")
elif isinstance(prompt_template, LLMNodeCompletionModelPromptTemplate):
if prompt_template.edition_type != "jinja2":
variable_template_parser = VariableTemplateParser(template=prompt_template.text)