mirror of
https://github.com/langgenius/dify.git
synced 2026-05-05 01:48:04 +08:00
feat: add inner graph api
This commit is contained in:
@ -163,3 +163,29 @@ class WorkflowScheduleCFSPlanEntity(BaseModel):
|
||||
|
||||
schedule_strategy: Strategy
|
||||
granularity: int = Field(default=-1) # -1 means infinite
|
||||
|
||||
|
||||
# ========== Mention Graph Entities ==========
|
||||
|
||||
|
||||
class MentionParameterSchema(BaseModel):
|
||||
"""Schema for the parameter to be extracted from mention context."""
|
||||
|
||||
name: str = Field(description="Parameter name (e.g., 'query')")
|
||||
type: str = Field(default="string", description="Parameter type (e.g., 'string', 'number')")
|
||||
description: str = Field(default="", description="Parameter description for LLM")
|
||||
|
||||
|
||||
class MentionGraphRequest(BaseModel):
|
||||
"""Request payload for generating mention graph."""
|
||||
|
||||
parent_node_id: str = Field(description="ID of the parent node that uses the extracted value")
|
||||
parameter_key: str = Field(description="Key of the parameter being extracted")
|
||||
context_source: list[str] = Field(description="Variable selector for the context source")
|
||||
parameter_schema: MentionParameterSchema = Field(description="Schema of the parameter to extract")
|
||||
|
||||
|
||||
class MentionGraphResponse(BaseModel):
|
||||
"""Response containing the generated mention graph."""
|
||||
|
||||
graph: Mapping[str, Any] = Field(description="Complete graph structure with nodes, edges, viewport")
|
||||
|
||||
140
api/services/workflow/mention_graph_service.py
Normal file
140
api/services/workflow/mention_graph_service.py
Normal file
@ -0,0 +1,140 @@
|
||||
"""
|
||||
Service for generating Mention LLM node graph structures.
|
||||
|
||||
This service creates graph structures containing LLM nodes configured for
|
||||
extracting values from list[PromptMessage] variables.
|
||||
"""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from core.model_runtime.entities import LLMMode
|
||||
from core.workflow.enums import NodeType
|
||||
from services.model_provider_service import ModelProviderService
|
||||
from services.workflow.entities import MentionGraphRequest, MentionGraphResponse, MentionParameterSchema
|
||||
|
||||
|
||||
class MentionGraphService:
|
||||
"""Service for generating Mention LLM node graph structures."""
|
||||
|
||||
def __init__(self, session: Session):
|
||||
self._session = session
|
||||
|
||||
def generate_mention_node_id(self, node_id: str, parameter_name: str) -> str:
|
||||
"""Generate mention node ID following the naming convention.
|
||||
|
||||
Format: {node_id}_ext_{parameter_name}
|
||||
"""
|
||||
return f"{node_id}_ext_{parameter_name}"
|
||||
|
||||
def generate_mention_graph(self, tenant_id: str, request: MentionGraphRequest) -> MentionGraphResponse:
|
||||
"""Generate a complete graph structure containing a Mention LLM node.
|
||||
|
||||
Args:
|
||||
tenant_id: The tenant ID for fetching default model config
|
||||
request: The mention graph generation request
|
||||
|
||||
Returns:
|
||||
Complete graph structure with nodes, edges, and viewport
|
||||
"""
|
||||
node_id = self.generate_mention_node_id(request.parent_node_id, request.parameter_key)
|
||||
model_config = self._get_default_model_config(tenant_id)
|
||||
node = self._build_mention_llm_node(
|
||||
node_id=node_id,
|
||||
parent_node_id=request.parent_node_id,
|
||||
context_source=request.context_source,
|
||||
parameter_schema=request.parameter_schema,
|
||||
model_config=model_config,
|
||||
)
|
||||
|
||||
graph = {
|
||||
"nodes": [node],
|
||||
"edges": [],
|
||||
"viewport": {},
|
||||
}
|
||||
|
||||
return MentionGraphResponse(graph=graph)
|
||||
|
||||
def _get_default_model_config(self, tenant_id: str) -> dict[str, Any]:
|
||||
"""Get the default LLM model configuration for the tenant."""
|
||||
model_provider_service = ModelProviderService()
|
||||
default_model = model_provider_service.get_default_model_of_model_type(
|
||||
tenant_id=tenant_id,
|
||||
model_type="llm",
|
||||
)
|
||||
|
||||
if default_model:
|
||||
return {
|
||||
"provider": default_model.provider.provider,
|
||||
"name": default_model.model,
|
||||
"mode": LLMMode.CHAT.value,
|
||||
"completion_params": {},
|
||||
}
|
||||
|
||||
# Fallback to empty config if no default model is configured
|
||||
return {
|
||||
"provider": "",
|
||||
"name": "",
|
||||
"mode": LLMMode.CHAT.value,
|
||||
"completion_params": {},
|
||||
}
|
||||
|
||||
def _build_mention_llm_node(
|
||||
self,
|
||||
*,
|
||||
node_id: str,
|
||||
parent_node_id: str,
|
||||
context_source: list[str],
|
||||
parameter_schema: MentionParameterSchema,
|
||||
model_config: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Build the Mention LLM node structure.
|
||||
|
||||
The node uses:
|
||||
- $context in prompt_template to reference the PromptMessage list
|
||||
- structured_output for extracting the specific parameter
|
||||
- parent_node_id to associate with the parent node
|
||||
"""
|
||||
prompt_template = [
|
||||
{
|
||||
"role": "system",
|
||||
"text": "Extract the required parameter value from the conversation context above.",
|
||||
},
|
||||
{"$context": context_source},
|
||||
{"role": "user", "text": ""},
|
||||
]
|
||||
|
||||
structured_output = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
parameter_schema.name: {
|
||||
"type": parameter_schema.type,
|
||||
"description": parameter_schema.description,
|
||||
}
|
||||
},
|
||||
"required": [parameter_schema.name],
|
||||
}
|
||||
|
||||
return {
|
||||
"id": node_id,
|
||||
"position": {"x": 0, "y": 0},
|
||||
"data": {
|
||||
"type": NodeType.LLM.value,
|
||||
"title": f"Mention: {parameter_schema.name}",
|
||||
"desc": f"Extract {parameter_schema.name} from conversation context",
|
||||
"parent_node_id": parent_node_id,
|
||||
"model": model_config,
|
||||
"prompt_template": prompt_template,
|
||||
"context": {
|
||||
"enabled": False,
|
||||
"variable_selector": None,
|
||||
},
|
||||
"vision": {
|
||||
"enabled": False,
|
||||
},
|
||||
"memory": None,
|
||||
"structured_output_enabled": True,
|
||||
"structured_output": structured_output,
|
||||
},
|
||||
}
|
||||
Reference in New Issue
Block a user