Files
dify/api/dify_graph/nodes/llm/protocols.py
-LAN- 56593f20b0 refactor(api): continue decoupling dify_graph from API concerns (#33580)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: WH-2099 <wh2099@pm.me>
2026-03-25 20:32:24 +08:00

22 lines
728 B
Python

from __future__ import annotations
from typing import Any, Protocol
from dify_graph.nodes.llm.runtime_protocols import PreparedLLMProtocol
class CredentialsProvider(Protocol):
"""Port for loading runtime credentials for a provider/model pair."""
def fetch(self, provider_name: str, model_name: str) -> dict[str, Any]:
"""Return credentials for the target provider/model or raise a domain error."""
...
class ModelFactory(Protocol):
"""Port for creating prepared graph-facing LLM runtimes for execution."""
def init_model_instance(self, provider_name: str, model_name: str) -> PreparedLLMProtocol:
"""Create a prepared LLM runtime that is ready for graph execution."""
...