Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine

This commit is contained in:
-LAN-
2025-09-06 16:05:13 +08:00
295 changed files with 769 additions and 793 deletions

View File

@ -41,5 +41,5 @@ class FileTypeNotSupportError(LLMNodeError):
class UnsupportedPromptContentTypeError(LLMNodeError):
def __init__(self, *, type_name: str) -> None:
def __init__(self, *, type_name: str):
super().__init__(f"Prompt content type {type_name} is not supported.")

View File

@ -107,7 +107,7 @@ def fetch_memory(
return memory
def deduct_llm_quota(tenant_id: str, model_instance: ModelInstance, usage: LLMUsage) -> None:
def deduct_llm_quota(tenant_id: str, model_instance: ModelInstance, usage: LLMUsage):
provider_model_bundle = model_instance.provider_model_bundle
provider_configuration = provider_model_bundle.configuration

View File

@ -119,7 +119,7 @@ class LLMNode(Node):
graph_runtime_state: "GraphRuntimeState",
*,
llm_file_saver: LLMFileSaver | None = None,
) -> None:
):
super().__init__(
id=id,
config=config,
@ -136,7 +136,7 @@ class LLMNode(Node):
)
self._llm_file_saver = llm_file_saver
def init_node_data(self, data: Mapping[str, Any]) -> None:
def init_node_data(self, data: Mapping[str, Any]):
self._node_data = LLMNodeData.model_validate(data)
def _get_error_strategy(self) -> Optional[ErrorStrategy]:
@ -959,7 +959,7 @@ class LLMNode(Node):
return variable_mapping
@classmethod
def get_default_config(cls, filters: Optional[dict] = None) -> dict:
def get_default_config(cls, filters: Optional[dict] = None):
return {
"type": "llm",
"config": {