Merge remote-tracking branch 'upstream/main' into feat/rag-2

This commit is contained in:
QuantumGhost
2025-09-16 14:59:35 +08:00
791 changed files with 24372 additions and 7085 deletions

View File

@ -1,5 +1,5 @@
from collections.abc import Generator
from typing import Any, Optional
from typing import Any
from core.agent.entities import AgentInvokeMessage
from core.plugin.entities.plugin_daemon import (
@ -82,10 +82,10 @@ class PluginAgentClient(BasePluginClient):
agent_provider: str,
agent_strategy: str,
agent_params: dict[str, Any],
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
context: Optional[PluginInvokeContext] = None,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
context: PluginInvokeContext | None = None,
) -> Generator[AgentInvokeMessage, None, None]:
"""
Invoke the agent with the given tenant, user, plugin, provider, name and parameters.

View File

@ -1,6 +1,6 @@
import binascii
from collections.abc import Generator, Sequence
from typing import IO, Optional
from typing import IO
from core.model_runtime.entities.llm_entities import LLMResultChunk
from core.model_runtime.entities.message_entities import PromptMessage, PromptMessageTool
@ -151,9 +151,9 @@ class PluginModelClient(BasePluginClient):
model: str,
credentials: dict,
prompt_messages: list[PromptMessage],
model_parameters: Optional[dict] = None,
tools: Optional[list[PromptMessageTool]] = None,
stop: Optional[list[str]] = None,
model_parameters: dict | None = None,
tools: list[PromptMessageTool] | None = None,
stop: list[str] | None = None,
stream: bool = True,
) -> Generator[LLMResultChunk, None, None]:
"""
@ -200,7 +200,7 @@ class PluginModelClient(BasePluginClient):
model: str,
credentials: dict,
prompt_messages: list[PromptMessage],
tools: Optional[list[PromptMessageTool]] = None,
tools: list[PromptMessageTool] | None = None,
) -> int:
"""
Get number of tokens for llm
@ -325,8 +325,8 @@ class PluginModelClient(BasePluginClient):
credentials: dict,
query: str,
docs: list[str],
score_threshold: Optional[float] = None,
top_n: Optional[int] = None,
score_threshold: float | None = None,
top_n: int | None = None,
) -> RerankResult:
"""
Invoke rerank
@ -414,7 +414,7 @@ class PluginModelClient(BasePluginClient):
provider: str,
model: str,
credentials: dict,
language: Optional[str] = None,
language: str | None = None,
):
"""
Get tts model voices

View File

@ -1,5 +1,5 @@
from collections.abc import Generator
from typing import Any, Optional
from typing import Any
from pydantic import BaseModel
@ -91,9 +91,9 @@ class PluginToolManager(BasePluginClient):
credentials: dict[str, Any],
credential_type: CredentialType,
tool_parameters: dict[str, Any],
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
) -> Generator[ToolInvokeMessage, None, None]:
"""
Invoke the tool with the given tenant, user, plugin, provider, name, credentials and parameters.
@ -193,9 +193,9 @@ class PluginToolManager(BasePluginClient):
provider: str,
credentials: dict[str, Any],
tool: str,
conversation_id: Optional[str] = None,
app_id: Optional[str] = None,
message_id: Optional[str] = None,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
) -> list[ToolParameter]:
"""
get the runtime parameters of the tool