Merge remote-tracking branch 'origin/main' into feat/queue-based-graph-engine

This commit is contained in:
-LAN-
2025-09-08 13:56:45 +08:00
89 changed files with 2095 additions and 759 deletions

View File

@ -262,6 +262,9 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
raise MessageNotExistsError()
current_app_model_config = app_model.app_model_config
if not current_app_model_config:
raise MoreLikeThisDisabledError()
more_like_this = current_app_model_config.more_like_this_dict
if not current_app_model_config.more_like_this or more_like_this.get("enabled", False) is False:

View File

@ -124,6 +124,7 @@ class TokenBufferMemory:
messages = list(reversed(thread_messages))
curr_message_tokens = 0
prompt_messages: list[PromptMessage] = []
for message in messages:
# Process user message with files

View File

@ -17,6 +17,10 @@ from extensions.ext_redis import redis_client
from models.dataset import Dataset
logger = logging.getLogger(__name__)
from typing import ParamSpec, TypeVar
P = ParamSpec("P")
R = TypeVar("R")
class MatrixoneConfig(BaseModel):

View File

@ -334,7 +334,8 @@ class NotionExtractor(BaseExtractor):
last_edited_time = self.get_notion_last_edited_time()
data_source_info = document_model.data_source_info_dict
data_source_info["last_edited_time"] = last_edited_time
if data_source_info:
data_source_info["last_edited_time"] = last_edited_time
db.session.query(DocumentModel).filter_by(id=document_model.id).update(
{DocumentModel.data_source_info: json.dumps(data_source_info)}

View File

@ -1,5 +1,5 @@
import json
from typing import Any, Optional
from typing import Any, Optional, Self
from core.mcp.types import Tool as RemoteMCPTool
from core.tools.__base.tool_provider import ToolProviderController
@ -48,7 +48,7 @@ class MCPToolProviderController(ToolProviderController):
return ToolProviderType.MCP
@classmethod
def _from_db(cls, db_provider: MCPToolProvider) -> "MCPToolProviderController":
def from_db(cls, db_provider: MCPToolProvider) -> Self:
"""
from db provider
"""

View File

@ -777,7 +777,7 @@ class ToolManager:
if provider is None:
raise ToolProviderNotFoundError(f"mcp provider {provider_id} not found")
controller = MCPToolProviderController._from_db(provider)
controller = MCPToolProviderController.from_db(provider)
return controller
@ -932,7 +932,7 @@ class ToolManager:
tenant_id: str,
provider_type: ToolProviderType,
provider_id: str,
) -> Union[str, dict]:
) -> Union[str, dict[str, Any]]:
"""
get the tool icon

View File

@ -3,7 +3,7 @@ from collections.abc import Generator, Mapping, Sequence
from datetime import UTC, datetime
from typing import TYPE_CHECKING, Any, Optional, Union, cast
from core.variables import ArrayVariable, IntegerVariable, NoneVariable
from core.variables import IntegerVariable, NoneSegment
from core.variables.segments import ArrayAnySegment, ArraySegment
from core.workflow.entities import VariablePool
from core.workflow.enums import (
@ -97,10 +97,10 @@ class IterationNode(Node):
if not variable:
raise IteratorVariableNotFoundError(f"iterator variable {self._node_data.iterator_selector} not found")
if not isinstance(variable, ArrayVariable) and not isinstance(variable, NoneVariable):
if not isinstance(variable, ArraySegment) and not isinstance(variable, NoneSegment):
raise InvalidIteratorValueError(f"invalid iterator value: {variable}, please provide a list.")
if isinstance(variable, NoneVariable) or len(variable.value) == 0:
if isinstance(variable, NoneSegment) or len(variable.value) == 0:
# Try our best to preserve the type informat.
if isinstance(variable, ArraySegment):
output = variable.model_copy(update={"value": []})

View File

@ -50,6 +50,7 @@ from .exc import (
)
from .prompts import (
CHAT_EXAMPLE,
CHAT_GENERATE_JSON_PROMPT,
CHAT_GENERATE_JSON_USER_MESSAGE_TEMPLATE,
COMPLETION_GENERATE_JSON_PROMPT,
FUNCTION_CALLING_EXTRACTOR_EXAMPLE,
@ -746,7 +747,7 @@ class ParameterExtractorNode(Node):
if model_mode == ModelMode.CHAT:
system_prompt_messages = ChatModelMessage(
role=PromptMessageRole.SYSTEM,
text=FUNCTION_CALLING_EXTRACTOR_SYSTEM_PROMPT.format(histories=memory_str, instruction=instruction),
text=CHAT_GENERATE_JSON_PROMPT.format(histories=memory_str).replace("{{instructions}}", instruction),
)
user_prompt_message = ChatModelMessage(role=PromptMessageRole.USER, text=input_text)
return [system_prompt_messages, user_prompt_message]