mirror of
https://github.com/langgenius/dify.git
synced 2026-05-04 01:18:05 +08:00
refactor: update model attribute references from 'model' to 'model_name' across multiple files and introduce new fetch_model_config function in llm_utils.py
This commit is contained in:
@ -36,6 +36,7 @@ from core.workflow.nodes.agent.exceptions import (
|
||||
ToolFileNotFoundError,
|
||||
)
|
||||
from dify_graph.enums import (
|
||||
BuiltinNodeTypes,
|
||||
NodeType,
|
||||
SystemVariableKey,
|
||||
WorkflowNodeExecutionMetadataKey,
|
||||
@ -79,7 +80,7 @@ class AgentNode(Node[AgentNodeData]):
|
||||
Agent Node
|
||||
"""
|
||||
|
||||
node_type = NodeType.AGENT
|
||||
node_type = BuiltinNodeTypes.AGENT
|
||||
|
||||
@classmethod
|
||||
def version(cls) -> str:
|
||||
@ -740,7 +741,7 @@ class AgentNode(Node[AgentNodeData]):
|
||||
)
|
||||
elif message.type == ToolInvokeMessage.MessageType.JSON:
|
||||
assert isinstance(message.message, ToolInvokeMessage.JsonMessage)
|
||||
if node_type == NodeType.AGENT:
|
||||
if node_type == BuiltinNodeTypes.AGENT:
|
||||
if isinstance(message.message.json_object, dict):
|
||||
msg_metadata: dict[str, Any] = message.message.json_object.pop("execution_metadata", {})
|
||||
llm_usage = LLMUsage.from_metadata(cast(LLMUsageMetadata, msg_metadata))
|
||||
|
||||
@ -47,6 +47,20 @@ from .exc import (
|
||||
from .protocols import TemplateRenderer
|
||||
|
||||
|
||||
def fetch_model_config(
|
||||
*, tenant_id: str, node_data_model: ModelConfig
|
||||
) -> tuple[ModelInstance, Any]:
|
||||
from core.app.llm.model_access import build_dify_model_access
|
||||
from core.app.llm.model_access import fetch_model_config as _fetch
|
||||
|
||||
credentials_provider, model_factory = build_dify_model_access(tenant_id)
|
||||
return _fetch(
|
||||
node_data_model=node_data_model,
|
||||
credentials_provider=credentials_provider,
|
||||
model_factory=model_factory,
|
||||
)
|
||||
|
||||
|
||||
def fetch_model_schema(*, model_instance: ModelInstance) -> AIModelEntity:
|
||||
model_schema = cast(LargeLanguageModel, model_instance.model_type_instance).get_model_schema(
|
||||
model_instance.model_name,
|
||||
|
||||
@ -1985,7 +1985,7 @@ class LLMNode(Node[LLMNodeData]):
|
||||
try:
|
||||
model_type_instance = model_instance.model_type_instance
|
||||
model_schema = model_type_instance.get_model_schema(
|
||||
model_instance.model,
|
||||
model_instance.model_name,
|
||||
model_instance.credentials,
|
||||
)
|
||||
return model_schema.features if model_schema and model_schema.features else []
|
||||
|
||||
Reference in New Issue
Block a user