Merge remote-tracking branch 'origin/main' into deploy/dev

This commit is contained in:
CodingOnStar
2025-10-10 10:00:50 +08:00
17 changed files with 105 additions and 49 deletions

View File

@ -362,11 +362,11 @@ class HttpConfig(BaseSettings):
)
HTTP_REQUEST_MAX_READ_TIMEOUT: int = Field(
ge=1, description="Maximum read timeout in seconds for HTTP requests", default=60
ge=1, description="Maximum read timeout in seconds for HTTP requests", default=600
)
HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = Field(
ge=1, description="Maximum write timeout in seconds for HTTP requests", default=20
ge=1, description="Maximum write timeout in seconds for HTTP requests", default=600
)
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
@ -771,7 +771,7 @@ class MailConfig(BaseSettings):
MAIL_TEMPLATING_TIMEOUT: int = Field(
description="""
Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates.
Timeout for email templating in seconds. Used to prevent infinite loops in malicious templates.
Only available in sandbox mode.""",
default=3,
)

View File

@ -1,6 +1,6 @@
from collections.abc import Generator
from dataclasses import dataclass, field
from typing import TypeVar, Union, cast
from typing import TypeVar, Union
from core.agent.entities import AgentInvokeMessage
from core.tools.entities.tool_entities import ToolInvokeMessage
@ -87,7 +87,8 @@ def merge_blob_chunks(
),
meta=resp.meta,
)
yield cast(MessageType, merged_message)
assert isinstance(merged_message, (ToolInvokeMessage, AgentInvokeMessage))
yield merged_message # type: ignore
# Clean up the buffer
del files[chunk_id]
else:

View File

@ -2,7 +2,7 @@ import datetime
import logging
import time
from collections.abc import Mapping
from typing import Any, cast
from typing import Any
from sqlalchemy import func, select
@ -62,7 +62,7 @@ class KnowledgeIndexNode(Node):
return self._node_data
def _run(self) -> NodeRunResult: # type: ignore
node_data = cast(KnowledgeIndexNodeData, self._node_data)
node_data = self._node_data
variable_pool = self.graph_runtime_state.variable_pool
dataset_id = variable_pool.get(["sys", SystemVariableKey.DATASET_ID])
if not dataset_id:

View File

@ -25,7 +25,6 @@
"reportMissingParameterType": "hint",
"reportMissingTypeArgument": "hint",
"reportUnnecessaryComparison": "hint",
"reportUnnecessaryCast": "hint",
"reportUnnecessaryIsInstance": "hint",
"reportUntypedFunctionDecorator": "hint",

View File

@ -1,7 +1,7 @@
import hashlib
import json
from datetime import datetime
from typing import Any, cast
from typing import Any
from sqlalchemy import or_
from sqlalchemy.exc import IntegrityError
@ -55,7 +55,7 @@ class MCPToolManageService:
cache=NoOpProviderCredentialCache(),
)
return cast(dict[str, str], encrypter_instance.encrypt(headers))
return encrypter_instance.encrypt(headers)
@staticmethod
def get_mcp_provider_by_provider_id(provider_id: str, tenant_id: str) -> MCPToolProvider:

View File

@ -15,13 +15,13 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch):
# Set environment variables using monkeypatch
monkeypatch.setenv("CONSOLE_API_URL", "https://example.com")
monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com")
monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30")
monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30") # Custom value for testing
monkeypatch.setenv("DB_USERNAME", "postgres")
monkeypatch.setenv("DB_PASSWORD", "postgres")
monkeypatch.setenv("DB_HOST", "localhost")
monkeypatch.setenv("DB_PORT", "5432")
monkeypatch.setenv("DB_DATABASE", "dify")
monkeypatch.setenv("HTTP_REQUEST_MAX_READ_TIMEOUT", "600")
monkeypatch.setenv("HTTP_REQUEST_MAX_READ_TIMEOUT", "300") # Custom value for testing
# load dotenv file with pydantic-settings
config = DifyConfig()
@ -35,16 +35,36 @@ def test_dify_config(monkeypatch: pytest.MonkeyPatch):
assert config.SENTRY_TRACES_SAMPLE_RATE == 1.0
assert config.TEMPLATE_TRANSFORM_MAX_LENGTH == 400_000
# annotated field with default value
assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 600
# annotated field with custom configured value
assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 300
# annotated field with configured value
# annotated field with custom configured value
assert config.HTTP_REQUEST_MAX_WRITE_TIMEOUT == 30
# values from pyproject.toml
assert Version(config.project.version) >= Version("1.0.0")
def test_http_timeout_defaults(monkeypatch: pytest.MonkeyPatch):
"""Test that HTTP timeout defaults are correctly set"""
# clear system environment variables
os.environ.clear()
# Set minimal required env vars
monkeypatch.setenv("DB_USERNAME", "postgres")
monkeypatch.setenv("DB_PASSWORD", "postgres")
monkeypatch.setenv("DB_HOST", "localhost")
monkeypatch.setenv("DB_PORT", "5432")
monkeypatch.setenv("DB_DATABASE", "dify")
config = DifyConfig()
# Verify default timeout values
assert config.HTTP_REQUEST_MAX_CONNECT_TIMEOUT == 10
assert config.HTTP_REQUEST_MAX_READ_TIMEOUT == 600
assert config.HTTP_REQUEST_MAX_WRITE_TIMEOUT == 600
# NOTE: If there is a `.env` file in your Workspace, this test might not succeed as expected.
# This is due to `pymilvus` loading all the variables from the `.env` file into `os.environ`.
def test_flask_configs(monkeypatch: pytest.MonkeyPatch):
@ -55,7 +75,6 @@ def test_flask_configs(monkeypatch: pytest.MonkeyPatch):
# Set environment variables using monkeypatch
monkeypatch.setenv("CONSOLE_API_URL", "https://example.com")
monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com")
monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30")
monkeypatch.setenv("DB_USERNAME", "postgres")
monkeypatch.setenv("DB_PASSWORD", "postgres")
monkeypatch.setenv("DB_HOST", "localhost")
@ -105,7 +124,6 @@ def test_inner_api_config_exist(monkeypatch: pytest.MonkeyPatch):
# Set environment variables using monkeypatch
monkeypatch.setenv("CONSOLE_API_URL", "https://example.com")
monkeypatch.setenv("CONSOLE_WEB_URL", "https://example.com")
monkeypatch.setenv("HTTP_REQUEST_MAX_WRITE_TIMEOUT", "30")
monkeypatch.setenv("DB_USERNAME", "postgres")
monkeypatch.setenv("DB_PASSWORD", "postgres")
monkeypatch.setenv("DB_HOST", "localhost")