refactor: rename mention node to nested_node for generic sub-graph support

This commit is contained in:
Novice
2026-01-22 13:15:13 +08:00
parent c7d106cfa4
commit 5cb8d4cc11
35 changed files with 319 additions and 289 deletions

View File

@ -70,8 +70,8 @@ class _NodeSnapshot:
"""Empty string means the node is not executing inside an iteration."""
loop_id: str = ""
"""Empty string means the node is not executing inside a loop."""
mention_parent_id: str = ""
"""Empty string means the node is not an extractor node."""
parent_node_id: str = ""
"""Empty string means the node is not an nested node (extractor node)."""
class WorkflowResponseConverter:
@ -133,7 +133,7 @@ class WorkflowResponseConverter:
start_at=event.start_at,
iteration_id=event.in_iteration_id or "",
loop_id=event.in_loop_id or "",
mention_parent_id=event.in_mention_parent_id or "",
parent_node_id=event.in_parent_node_id or "",
)
node_execution_id = NodeExecutionId(event.node_execution_id)
self._node_snapshots[node_execution_id] = snapshot
@ -290,7 +290,7 @@ class WorkflowResponseConverter:
created_at=int(snapshot.start_at.timestamp()),
iteration_id=event.in_iteration_id,
loop_id=event.in_loop_id,
mention_parent_id=event.in_mention_parent_id,
parent_node_id=event.in_parent_node_id,
agent_strategy=event.agent_strategy,
),
)
@ -377,7 +377,7 @@ class WorkflowResponseConverter:
files=self.fetch_files_from_node_outputs(event.outputs or {}),
iteration_id=event.in_iteration_id,
loop_id=event.in_loop_id,
mention_parent_id=event.in_mention_parent_id,
parent_node_id=event.in_parent_node_id,
),
)
@ -427,7 +427,7 @@ class WorkflowResponseConverter:
files=self.fetch_files_from_node_outputs(event.outputs or {}),
iteration_id=event.in_iteration_id,
loop_id=event.in_loop_id,
mention_parent_id=event.in_mention_parent_id,
parent_node_id=event.in_parent_node_id,
retry_index=event.retry_index,
),
)

View File

@ -385,7 +385,7 @@ class WorkflowBasedAppRunner:
start_at=event.start_at,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
in_mention_parent_id=event.in_mention_parent_id,
in_parent_node_id=event.in_parent_node_id,
inputs=inputs,
process_data=process_data,
outputs=outputs,
@ -406,7 +406,7 @@ class WorkflowBasedAppRunner:
start_at=event.start_at,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
in_mention_parent_id=event.in_mention_parent_id,
in_parent_node_id=event.in_parent_node_id,
agent_strategy=event.agent_strategy,
provider_type=event.provider_type,
provider_id=event.provider_id,
@ -430,7 +430,7 @@ class WorkflowBasedAppRunner:
execution_metadata=execution_metadata,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
in_mention_parent_id=event.in_mention_parent_id,
in_parent_node_id=event.in_parent_node_id,
)
)
elif isinstance(event, NodeRunFailedEvent):
@ -447,7 +447,7 @@ class WorkflowBasedAppRunner:
execution_metadata=event.node_run_result.metadata,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
in_mention_parent_id=event.in_mention_parent_id,
in_parent_node_id=event.in_parent_node_id,
)
)
elif isinstance(event, NodeRunExceptionEvent):
@ -464,7 +464,7 @@ class WorkflowBasedAppRunner:
execution_metadata=event.node_run_result.metadata,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
in_mention_parent_id=event.in_mention_parent_id,
in_parent_node_id=event.in_parent_node_id,
)
)
elif isinstance(event, NodeRunStreamChunkEvent):
@ -482,7 +482,7 @@ class WorkflowBasedAppRunner:
chunk_type=QueueChunkType(event.chunk_type.value),
tool_call=event.tool_call,
tool_result=event.tool_result,
in_mention_parent_id=event.in_mention_parent_id,
in_parent_node_id=event.in_parent_node_id,
)
)
elif isinstance(event, NodeRunRetrieverResourceEvent):
@ -491,7 +491,7 @@ class WorkflowBasedAppRunner:
retriever_resources=event.retriever_resources,
in_iteration_id=event.in_iteration_id,
in_loop_id=event.in_loop_id,
in_mention_parent_id=event.in_mention_parent_id,
in_parent_node_id=event.in_parent_node_id,
)
)
elif isinstance(event, NodeRunAgentLogEvent):

View File

@ -201,7 +201,7 @@ class QueueTextChunkEvent(AppQueueEvent):
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
in_mention_parent_id: str | None = None
in_parent_node_id: str | None = None
"""parent node id if this is an extractor node event"""
# Extended fields for Agent/Tool streaming
@ -252,7 +252,7 @@ class QueueRetrieverResourcesEvent(AppQueueEvent):
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
in_mention_parent_id: str | None = None
in_parent_node_id: str | None = None
"""parent node id if this is an extractor node event"""
@ -331,7 +331,7 @@ class QueueNodeStartedEvent(AppQueueEvent):
node_run_index: int = 1 # FIXME(-LAN-): may not used
in_iteration_id: str | None = None
in_loop_id: str | None = None
in_mention_parent_id: str | None = None
in_parent_node_id: str | None = None
"""parent node id if this is an extractor node event"""
start_at: datetime
agent_strategy: AgentNodeStrategyInit | None = None
@ -355,7 +355,7 @@ class QueueNodeSucceededEvent(AppQueueEvent):
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
in_mention_parent_id: str | None = None
in_parent_node_id: str | None = None
"""parent node id if this is an extractor node event"""
start_at: datetime
@ -412,7 +412,7 @@ class QueueNodeExceptionEvent(AppQueueEvent):
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
in_mention_parent_id: str | None = None
in_parent_node_id: str | None = None
"""parent node id if this is an extractor node event"""
start_at: datetime
@ -438,7 +438,7 @@ class QueueNodeFailedEvent(AppQueueEvent):
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
in_mention_parent_id: str | None = None
in_parent_node_id: str | None = None
"""parent node id if this is an extractor node event"""
start_at: datetime

View File

@ -294,7 +294,7 @@ class NodeStartStreamResponse(StreamResponse):
extras: dict[str, object] = Field(default_factory=dict)
iteration_id: str | None = None
loop_id: str | None = None
mention_parent_id: str | None = None
parent_node_id: str | None = None
agent_strategy: AgentNodeStrategyInit | None = None
event: StreamEvent = StreamEvent.NODE_STARTED
@ -318,7 +318,7 @@ class NodeStartStreamResponse(StreamResponse):
"extras": {},
"iteration_id": self.data.iteration_id,
"loop_id": self.data.loop_id,
"mention_parent_id": self.data.mention_parent_id,
"parent_node_id": self.data.parent_node_id,
},
}
@ -354,7 +354,7 @@ class NodeFinishStreamResponse(StreamResponse):
files: Sequence[Mapping[str, Any]] | None = []
iteration_id: str | None = None
loop_id: str | None = None
mention_parent_id: str | None = None
parent_node_id: str | None = None
event: StreamEvent = StreamEvent.NODE_FINISHED
workflow_run_id: str
@ -384,7 +384,7 @@ class NodeFinishStreamResponse(StreamResponse):
"files": [],
"iteration_id": self.data.iteration_id,
"loop_id": self.data.loop_id,
"mention_parent_id": self.data.mention_parent_id,
"parent_node_id": self.data.parent_node_id,
},
}
@ -420,7 +420,7 @@ class NodeRetryStreamResponse(StreamResponse):
files: Sequence[Mapping[str, Any]] | None = []
iteration_id: str | None = None
loop_id: str | None = None
mention_parent_id: str | None = None
parent_node_id: str | None = None
retry_index: int = 0
event: StreamEvent = StreamEvent.NODE_RETRY
@ -451,7 +451,7 @@ class NodeRetryStreamResponse(StreamResponse):
"files": [],
"iteration_id": self.data.iteration_id,
"loop_id": self.data.loop_id,
"mention_parent_id": self.data.mention_parent_id,
"parent_node_id": self.data.parent_node_id,
"retry_index": self.data.retry_index,
},
}

View File

@ -813,7 +813,19 @@ Parameter: {parameter_info.get("name")} ({param_type}) - {parameter_info.get("de
if isinstance(v, dict)
]
outputs = content.get("outputs", {"result": {"type": parameter_type}})
# Convert outputs from array format [{name, type}] to dict format {name: {type}}
# Array format is required for OpenAI/Azure strict JSON schema compatibility
raw_outputs = content.get("outputs", [])
if isinstance(raw_outputs, list):
outputs = {
item.get("name", "result"): {"type": item.get("type", parameter_type)}
for item in raw_outputs
if isinstance(item, dict) and item.get("name")
}
if not outputs:
outputs = {"result": {"type": parameter_type}}
else:
outputs = raw_outputs or {"result": {"type": parameter_type}}
return {
"variables": variables,

View File

@ -3,32 +3,65 @@ from __future__ import annotations
from pydantic import BaseModel, ConfigDict, Field
from core.variables.types import SegmentType
from core.workflow.nodes.base.entities import VariableSelector
class SuggestedQuestionsOutput(BaseModel):
"""Output model for suggested questions generation."""
model_config = ConfigDict(extra="forbid")
questions: list[str] = Field(min_length=3, max_length=3)
questions: list[str] = Field(
min_length=3,
max_length=3,
description="Exactly 3 suggested follow-up questions for the user",
)
class CodeNodeOutput(BaseModel):
class VariableSelectorOutput(BaseModel):
"""Variable selector mapping code variable to upstream node output.
Note: Separate from VariableSelector to ensure 'additionalProperties: false'
in JSON schema for OpenAI/Azure strict mode.
"""
model_config = ConfigDict(extra="forbid")
type: SegmentType
variable: str = Field(description="Variable name used in the generated code")
value_selector: list[str] = Field(description="Path to upstream node output, format: [node_id, output_name]")
class CodeNodeOutputItem(BaseModel):
"""Single output variable definition.
Note: OpenAI/Azure strict mode requires 'additionalProperties: false' and
does not support dynamic object keys, so outputs use array format.
"""
model_config = ConfigDict(extra="forbid")
name: str = Field(description="Output variable name returned by the main function")
type: SegmentType = Field(description="Data type of the output variable")
class CodeNodeStructuredOutput(BaseModel):
"""Structured output for code node generation."""
model_config = ConfigDict(extra="forbid")
variables: list[VariableSelector]
code: str
outputs: dict[str, CodeNodeOutput]
explanation: str
variables: list[VariableSelectorOutput] = Field(
description="Input variables mapping code variables to upstream node outputs"
)
code: str = Field(description="Generated code with a main function that processes inputs and returns outputs")
outputs: list[CodeNodeOutputItem] = Field(
description="Output variable definitions specifying name and type for each return value"
)
explanation: str = Field(description="Brief explanation of what the generated code does")
class InstructionModifyOutput(BaseModel):
"""Output model for instruction-based prompt modification."""
model_config = ConfigDict(extra="forbid")
modified: str
message: str
modified: str = Field(description="The modified prompt content after applying the instruction")
message: str = Field(description="Brief explanation of what changes were made")

View File

@ -1058,10 +1058,10 @@ class ToolManager:
elif tool_input.type == "mixed":
segment_group = variable_pool.convert_template(str(tool_input.value))
parameter_value = segment_group.text
elif tool_input.type == "mention":
# Mention type not supported in agent mode
elif tool_input.type == "nested_node":
# Nested node type not supported in agent mode
raise ToolParameterError(
f"Mention type not supported in agent for parameter '{parameter.name}'"
f"Nested node type not supported in agent for parameter '{parameter.name}'"
)
else:
raise ToolParameterError(f"Unknown tool input type '{tool_input.type}'")

View File

@ -256,7 +256,7 @@ class WorkflowNodeExecutionMetadataKey(StrEnum):
LLM_CONTENT_SEQUENCE = "llm_content_sequence"
LLM_TRACE = "llm_trace"
COMPLETED_REASON = "completed_reason" # completed reason for loop node
MENTION_PARENT_ID = "mention_parent_id" # parent node id for extractor nodes
PARENT_NODE_ID = "parent_node_id" # parent node id for nested nodes (extractor nodes)
class WorkflowNodeExecutionStatus(StrEnum):

View File

@ -94,7 +94,7 @@ class EventHandler:
event: The event to handle
"""
# Events in loops, iterations, or extractor groups are always collected
if event.in_loop_id or event.in_iteration_id or event.in_mention_parent_id:
if event.in_loop_id or event.in_iteration_id or event.in_parent_node_id:
self._event_collector.collect(event)
return
return self._dispatch(event)

View File

@ -68,7 +68,7 @@ class _NodeRuntimeSnapshot:
predecessor_node_id: str | None
iteration_id: str | None
loop_id: str | None
mention_parent_id: str | None
parent_node_id: str | None
created_at: datetime
@ -231,7 +231,7 @@ class WorkflowPersistenceLayer(GraphEngineLayer):
metadata = {
WorkflowNodeExecutionMetadataKey.ITERATION_ID: event.in_iteration_id,
WorkflowNodeExecutionMetadataKey.LOOP_ID: event.in_loop_id,
WorkflowNodeExecutionMetadataKey.MENTION_PARENT_ID: event.in_mention_parent_id,
WorkflowNodeExecutionMetadataKey.PARENT_NODE_ID: event.in_parent_node_id,
}
domain_execution = WorkflowNodeExecution(
@ -258,7 +258,7 @@ class WorkflowPersistenceLayer(GraphEngineLayer):
predecessor_node_id=event.predecessor_node_id,
iteration_id=event.in_iteration_id,
loop_id=event.in_loop_id,
mention_parent_id=event.in_mention_parent_id,
parent_node_id=event.in_parent_node_id,
created_at=event.start_at,
)
self._node_snapshots[event.id] = snapshot

View File

@ -21,10 +21,10 @@ class GraphNodeEventBase(GraphEngineEvent):
"""iteration id if node is in iteration"""
in_loop_id: str | None = None
"""loop id if node is in loop"""
in_mention_parent_id: str | None = None
"""Parent node id if this is an extractor node event.
in_parent_node_id: str | None = None
"""Parent node id if this is a nested node event.
When set, indicates this event belongs to an extractor node that
When set, indicates this event belongs to a nested node that
is extracting values for the specified parent node.
"""

View File

@ -288,59 +288,45 @@ class Node(Generic[NodeDataT]):
extractor_configs.append(node_config)
return extractor_configs
def _execute_mention_nodes(self) -> Generator[GraphNodeEventBase, None, None]:
def _execute_nested_nodes(self) -> Generator[GraphNodeEventBase, None, None]:
"""
Execute all extractor nodes associated with this node.
Execute all nested nodes associated with this node.
Extractor nodes are nodes with parent_node_id == self._node_id.
Nested nodes are nodes with parent_node_id == self._node_id.
They are executed before the main node to extract values from list[PromptMessage].
"""
from core.workflow.nodes.node_mapping import LATEST_VERSION, NODE_TYPE_CLASSES_MAPPING
from core.workflow.nodes.node_factory import DifyNodeFactory
extractor_configs = self._find_extractor_node_configs()
logger.debug("[Extractor] Found %d extractor nodes for parent '%s'", len(extractor_configs), self._node_id)
logger.debug("[NestedNode] Found %d nested nodes for parent '%s'", len(extractor_configs), self._node_id)
if not extractor_configs:
return
# Use DifyNodeFactory to properly instantiate nodes with required dependencies
node_factory = DifyNodeFactory(
graph_init_params=self._graph_init_params,
graph_runtime_state=self.graph_runtime_state,
)
for config in extractor_configs:
node_id = config.get("id")
node_data = config.get("data", {})
node_type_str = node_data.get("type")
if not node_id or not node_type_str:
if not node_id:
continue
# Get node class
try:
node_type = NodeType(node_type_str)
nested_node = node_factory.create_node(config)
except ValueError:
# Skip nodes that cannot be created (e.g., unknown type)
continue
node_mapping = NODE_TYPE_CLASSES_MAPPING.get(node_type)
if not node_mapping:
continue
node_version = str(node_data.get("version", "1"))
node_cls = node_mapping.get(node_version) or node_mapping.get(LATEST_VERSION)
if not node_cls:
continue
# Instantiate and execute the extractor node
extractor_node = node_cls(
id=node_id,
config=config,
graph_init_params=self._graph_init_params,
graph_runtime_state=self.graph_runtime_state,
)
# Execute and process extractor node events
for event in extractor_node.run():
# Execute and process nested node events
for event in nested_node.run():
# Tag event with parent node id for stream ordering and history tracking
if isinstance(event, GraphNodeEventBase):
event.in_mention_parent_id = self._node_id
event.in_parent_node_id = self._node_id
if isinstance(event, NodeRunSucceededEvent):
# Store extractor node outputs in variable pool
# Store nested node outputs in variable pool
outputs: Mapping[str, Any] = event.node_run_result.outputs
for variable_name, variable_value in outputs.items():
self.graph_runtime_state.variable_pool.add((node_id, variable_name), variable_value)
@ -351,8 +337,8 @@ class Node(Generic[NodeDataT]):
execution_id = self.ensure_execution_id()
self._start_at = naive_utc_now()
# Step 1: Execute associated extractor nodes before main node execution
yield from self._execute_mention_nodes()
# Step 1: Execute associated nested nodes before main node execution
yield from self._execute_nested_nodes()
# Create and push start event with required fields
start_event = NodeRunStartedEvent(

View File

@ -8,17 +8,17 @@ from pydantic_core.core_schema import ValidationInfo
from core.tools.entities.tool_entities import ToolProviderType
from core.workflow.nodes.base.entities import BaseNodeData
# Pattern to match mention value format: {{@node.context@}}instruction
# Pattern to match nested_node value format: {{@node.context@}}instruction
# The placeholder {{@node.context@}} must appear at the beginning
# Format: {{@agent_node_id.context@}} where agent_node_id is dynamic, context is fixed
MENTION_VALUE_PATTERN = re.compile(r"^\{\{@([a-zA-Z0-9_]+)\.context@\}\}(.*)$", re.DOTALL)
NESTED_NODE_VALUE_PATTERN = re.compile(r"^\{\{@([a-zA-Z0-9_]+)\.context@\}\}(.*)$", re.DOTALL)
def parse_mention_value(value: str) -> tuple[str, str]:
"""Parse mention value into (node_id, instruction).
def parse_nested_node_value(value: str) -> tuple[str, str]:
"""Parse nested_node value into (node_id, instruction).
Args:
value: The mention value string like "{{@llm.context@}}extract keywords"
value: The nested_node value string like "{{@llm.context@}}extract keywords"
Returns:
Tuple of (node_id, instruction)
@ -26,16 +26,16 @@ def parse_mention_value(value: str) -> tuple[str, str]:
Raises:
ValueError: If value format is invalid
"""
match = MENTION_VALUE_PATTERN.match(value)
match = NESTED_NODE_VALUE_PATTERN.match(value)
if not match:
raise ValueError(
"For mention type, value must start with {{@node.context@}} placeholder, "
"For nested_node type, value must start with {{@node.context@}} placeholder, "
"e.g., '{{@llm.context@}}extract keywords'"
)
return match.group(1), match.group(2)
class MentionConfig(BaseModel):
class NestedNodeConfig(BaseModel):
"""Configuration for extracting value from context variable.
Used when a tool parameter needs to be extracted from list[PromptMessage]
@ -87,9 +87,9 @@ class ToolNodeData(BaseNodeData, ToolEntity):
class ToolInput(BaseModel):
# TODO: check this type
value: Union[Any, list[str]]
type: Literal["mixed", "variable", "constant", "mention"]
# Required config for mention type, extracting value from context variable
mention_config: MentionConfig | None = None
type: Literal["mixed", "variable", "constant", "nested_node"]
# Required config for nested_node type, extracting value from context variable
nested_node_config: NestedNodeConfig | None = None
@field_validator("type", mode="before")
@classmethod
@ -102,7 +102,7 @@ class ToolNodeData(BaseNodeData, ToolEntity):
if typ == "mixed" and not isinstance(value, str):
raise ValueError("value must be a string")
elif typ == "mention":
elif typ == "nested_node":
# Skip here, will be validated in model_validator
pass
elif typ == "variable":
@ -116,9 +116,9 @@ class ToolNodeData(BaseNodeData, ToolEntity):
return typ
@model_validator(mode="after")
def check_mention_type(self) -> Self:
"""Validate mention type with mention_config."""
if self.type != "mention":
def check_nested_node_type(self) -> Self:
"""Validate nested_node type with nested_node_config."""
if self.type != "nested_node":
return self
value = self.value
@ -126,13 +126,13 @@ class ToolNodeData(BaseNodeData, ToolEntity):
return self
if not isinstance(value, str):
raise ValueError("value must be a string for mention type")
# For mention type, value must match format: {{@node.context@}}instruction
raise ValueError("value must be a string for nested_node type")
# For nested_node type, value must match format: {{@node.context@}}instruction
# This will raise ValueError if format is invalid
parse_mention_value(value)
# mention_config is required for mention type
if self.mention_config is None:
raise ValueError("mention_config is required for mention type")
parse_nested_node_value(value)
# nested_node_config is required for nested_node type
if self.nested_node_config is None:
raise ValueError("nested_node_config is required for nested_node type")
return self
tool_parameters: dict[str, ToolInput]

View File

@ -212,16 +212,16 @@ class ToolNode(Node[ToolNodeData]):
raise ToolParameterError(f"Variable {selector} does not exist")
continue
parameter_value = variable.value
elif tool_input.type == "mention":
# Mention type: get value from extractor node's output
if tool_input.mention_config is None:
elif tool_input.type == "nested_node":
# Nested node type: get value from extractor node's output
if tool_input.nested_node_config is None:
raise ToolParameterError(
f"mention_config is required for mention type parameter '{parameter_name}'"
f"nested_node_config is required for nested_node type parameter '{parameter_name}'"
)
mention_config = tool_input.mention_config.model_dump()
nested_node_config = tool_input.nested_node_config.model_dump()
try:
parameter_value, found = variable_pool.resolve_mention(
mention_config, parameter_name=parameter_name
parameter_value, found = variable_pool.resolve_nested_node(
nested_node_config, parameter_name=parameter_name
)
if not found and parameter.required:
raise ToolParameterError(
@ -518,8 +518,8 @@ class ToolNode(Node[ToolNodeData]):
if isinstance(input.value, list):
selector_key = ".".join(input.value)
result[f"#{selector_key}#"] = input.value
elif input.type == "mention":
# Mention type: value is handled by extractor node, no direct variable reference
elif input.type == "nested_node":
# Nested node type: value is handled by extractor node, no direct variable reference
pass
elif input.type == "constant":
pass

View File

@ -79,8 +79,7 @@ class ReadOnlyGraphRuntimeState(Protocol):
...
@property
def sandbox(self) -> Any:
...
def sandbox(self) -> Any: ...
def dumps(self) -> str:
"""Serialize the runtime state into a JSON snapshot (read-only)."""

View File

@ -268,21 +268,21 @@ class VariablePool(BaseModel):
continue
self.add(selector, value)
def resolve_mention(
def resolve_nested_node(
self,
mention_config: Mapping[str, Any],
nested_node_config: Mapping[str, Any],
/,
*,
parameter_name: str = "",
) -> tuple[Any, bool]:
"""
Resolve a mention parameter value from an extractor node's output.
Resolve a nested_node parameter value from an extractor node's output.
Mention parameters reference values extracted by an extractor LLM node
Nested node parameters reference values extracted by an extractor LLM node
from list[PromptMessage] context.
Args:
mention_config: A dict containing:
nested_node_config: A dict containing:
- extractor_node_id: ID of the extractor LLM node
- output_selector: Selector path for the output variable (e.g., ["text"])
- null_strategy: "raise_error" or "use_default"
@ -298,13 +298,13 @@ class VariablePool(BaseModel):
ValueError: If extractor_node_id is missing, or if null_strategy is
"raise_error" and the value is not found
"""
extractor_node_id = mention_config.get("extractor_node_id")
extractor_node_id = nested_node_config.get("extractor_node_id")
if not extractor_node_id:
raise ValueError(f"Missing extractor_node_id for mention parameter '{parameter_name}'")
raise ValueError(f"Missing extractor_node_id for nested_node parameter '{parameter_name}'")
output_selector = list(mention_config.get("output_selector", []))
null_strategy = mention_config.get("null_strategy", "raise_error")
default_value = mention_config.get("default_value")
output_selector = list(nested_node_config.get("output_selector", []))
null_strategy = nested_node_config.get("null_strategy", "raise_error")
default_value = nested_node_config.get("default_value")
# Build full selector: [extractor_node_id, ...output_selector]
full_selector = [extractor_node_id] + output_selector