Merge remote-tracking branch 'origin/main' into feat/support-agent-sandbox

# Conflicts:
#	api/models/model.py
#	web/contract/router.ts
This commit is contained in:
yyh
2026-01-15 10:59:45 +08:00
48 changed files with 6825 additions and 1040 deletions

View File

@ -592,9 +592,12 @@ def _get_conversation(app_model, conversation_id):
if not conversation:
raise NotFound("Conversation Not Exists.")
if not conversation.read_at:
conversation.read_at = naive_utc_now()
conversation.read_account_id = current_user.id
db.session.commit()
db.session.execute(
sa.update(Conversation)
.where(Conversation.id == conversation_id, Conversation.read_at.is_(None))
.values(read_at=naive_utc_now(), read_account_id=current_user.id)
)
db.session.commit()
db.session.refresh(conversation)
return conversation

View File

@ -1,5 +1,6 @@
from core.plugin.entities.endpoint import EndpointEntityWithInstance
from core.plugin.impl.base import BasePluginClient
from core.plugin.impl.exc import PluginDaemonInternalServerError
class PluginEndpointClient(BasePluginClient):
@ -70,18 +71,27 @@ class PluginEndpointClient(BasePluginClient):
def delete_endpoint(self, tenant_id: str, user_id: str, endpoint_id: str):
"""
Delete the given endpoint.
This operation is idempotent: if the endpoint is already deleted (record not found),
it will return True instead of raising an error.
"""
return self._request_with_plugin_daemon_response(
"POST",
f"plugin/{tenant_id}/endpoint/remove",
bool,
data={
"endpoint_id": endpoint_id,
},
headers={
"Content-Type": "application/json",
},
)
try:
return self._request_with_plugin_daemon_response(
"POST",
f"plugin/{tenant_id}/endpoint/remove",
bool,
data={
"endpoint_id": endpoint_id,
},
headers={
"Content-Type": "application/json",
},
)
except PluginDaemonInternalServerError as e:
# Make delete idempotent: if record is not found, consider it a success
if "record not found" in str(e.description).lower():
return True
raise
def enable_endpoint(self, tenant_id: str, user_id: str, endpoint_id: str):
"""

View File

@ -115,7 +115,18 @@ def build_from_mappings(
# TODO(QuantumGhost): Performance concern - each mapping triggers a separate database query.
# Implement batch processing to reduce database load when handling multiple files.
# Filter out None/empty mappings to avoid errors
valid_mappings = [m for m in mappings if m and m.get("transfer_method")]
def is_valid_mapping(m: Mapping[str, Any]) -> bool:
if not m or not m.get("transfer_method"):
return False
# For REMOTE_URL transfer method, ensure url or remote_url is provided and not None
transfer_method = m.get("transfer_method")
if transfer_method == FileTransferMethod.REMOTE_URL:
url = m.get("url") or m.get("remote_url")
if not url:
return False
return True
valid_mappings = [m for m in mappings if is_valid_mapping(m)]
files = [
build_from_mapping(
mapping=mapping,

View File

@ -2,6 +2,7 @@ from __future__ import annotations
from datetime import datetime
from typing import TypeAlias
from uuid import uuid4
from pydantic import BaseModel, ConfigDict, Field, field_validator
@ -20,8 +21,8 @@ class SimpleFeedback(ResponseModel):
class RetrieverResource(ResponseModel):
id: str
message_id: str
id: str = Field(default_factory=lambda: str(uuid4()))
message_id: str = Field(default_factory=lambda: str(uuid4()))
position: int
dataset_id: str | None = None
dataset_name: str | None = None

View File

@ -1149,7 +1149,7 @@ class DatasetCollectionBinding(TypeBase):
)
class TidbAuthBinding(Base):
class TidbAuthBinding(TypeBase):
__tablename__ = "tidb_auth_bindings"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="tidb_auth_bindings_pkey"),
@ -1158,7 +1158,13 @@ class TidbAuthBinding(Base):
sa.Index("tidb_auth_bindings_created_at_idx", "created_at"),
sa.Index("tidb_auth_bindings_status_idx", "status"),
)
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4()))
id: Mapped[str] = mapped_column(
StringUUID,
primary_key=True,
insert_default=lambda: str(uuid4()),
default_factory=lambda: str(uuid4()),
init=False,
)
tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
cluster_id: Mapped[str] = mapped_column(String(255), nullable=False)
cluster_name: Mapped[str] = mapped_column(String(255), nullable=False)
@ -1166,7 +1172,9 @@ class TidbAuthBinding(Base):
status: Mapped[str] = mapped_column(sa.String(255), nullable=False, server_default=sa.text("'CREATING'"))
account: Mapped[str] = mapped_column(String(255), nullable=False)
password: Mapped[str] = mapped_column(String(255), nullable=False)
created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=func.current_timestamp())
created_at: Mapped[datetime] = mapped_column(
DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
class Whitelist(TypeBase):

View File

@ -1461,7 +1461,7 @@ class MessageAnnotation(Base):
return account
class AppAnnotationHitHistory(Base):
class AppAnnotationHitHistory(TypeBase):
__tablename__ = "app_annotation_hit_histories"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="app_annotation_hit_histories_pkey"),
@ -1471,17 +1471,19 @@ class AppAnnotationHitHistory(Base):
sa.Index("app_annotation_hit_histories_message_idx", "message_id"),
)
id = mapped_column(StringUUID, default=lambda: str(uuid4()))
app_id = mapped_column(StringUUID, nullable=False)
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
annotation_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
source = mapped_column(LongText, nullable=False)
question = mapped_column(LongText, nullable=False)
account_id = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp())
score = mapped_column(Float, nullable=False, server_default=sa.text("0"))
message_id = mapped_column(StringUUID, nullable=False)
annotation_question = mapped_column(LongText, nullable=False)
annotation_content = mapped_column(LongText, nullable=False)
source: Mapped[str] = mapped_column(LongText, nullable=False)
question: Mapped[str] = mapped_column(LongText, nullable=False)
account_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
score: Mapped[float] = mapped_column(Float, nullable=False, server_default=sa.text("0"))
message_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
annotation_question: Mapped[str] = mapped_column(LongText, nullable=False)
annotation_content: Mapped[str] = mapped_column(LongText, nullable=False)
@property
def account(self):
@ -2181,7 +2183,7 @@ class LLMGenerationDetail(Base):
)
class TenantCreditPool(Base):
class TenantCreditPool(TypeBase):
__tablename__ = "tenant_credit_pools"
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="tenant_credit_pool_pkey"),
@ -2189,14 +2191,20 @@ class TenantCreditPool(Base):
sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"),
)
id = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"))
tenant_id = mapped_column(StringUUID, nullable=False)
pool_type = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
quota_limit = mapped_column(BigInteger, nullable=False, default=0)
quota_used = mapped_column(BigInteger, nullable=False, default=0)
created_at = mapped_column(sa.DateTime, nullable=False, server_default=text("CURRENT_TIMESTAMP"))
updated_at = mapped_column(
sa.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)
quota_used: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=text("CURRENT_TIMESTAMP"), init=False
)
updated_at: Mapped[datetime] = mapped_column(
sa.DateTime,
nullable=False,
server_default=func.current_timestamp(),
onupdate=func.current_timestamp(),
init=False,
)
@property

View File

@ -50,10 +50,13 @@ def create_clusters(batch_size):
)
for new_cluster in new_clusters:
tidb_auth_binding = TidbAuthBinding(
tenant_id=None,
cluster_id=new_cluster["cluster_id"],
cluster_name=new_cluster["cluster_name"],
account=new_cluster["account"],
password=new_cluster["password"],
active=False,
status="CREATING",
)
db.session.add(tidb_auth_binding)
db.session.commit()

View File

@ -0,0 +1,279 @@
"""Unit tests for PluginEndpointClient functionality.
This test module covers the endpoint client operations including:
- Successful endpoint deletion
- Idempotent delete behavior (record not found)
- Non-idempotent delete behavior (other errors)
Tests follow the Arrange-Act-Assert pattern for clarity.
"""
from unittest.mock import MagicMock, patch
import pytest
from core.plugin.impl.endpoint import PluginEndpointClient
from core.plugin.impl.exc import PluginDaemonInternalServerError
class TestPluginEndpointClientDelete:
"""Unit tests for PluginEndpointClient delete_endpoint operation.
Tests cover:
- Successful endpoint deletion
- Idempotent behavior when endpoint is already deleted (record not found)
- Non-idempotent behavior for other errors
"""
@pytest.fixture
def endpoint_client(self):
"""Create a PluginEndpointClient instance for testing."""
return PluginEndpointClient()
@pytest.fixture
def mock_config(self):
"""Mock plugin daemon configuration."""
with (
patch("core.plugin.impl.base.dify_config.PLUGIN_DAEMON_URL", "http://127.0.0.1:5002"),
patch("core.plugin.impl.base.dify_config.PLUGIN_DAEMON_KEY", "test-api-key"),
):
yield
def test_delete_endpoint_success(self, endpoint_client, mock_config):
"""Test successful endpoint deletion.
Given:
- A valid tenant_id, user_id, and endpoint_id
- The plugin daemon returns success response
When:
- delete_endpoint is called
Then:
- The method should return True
- The request should be made with correct parameters
"""
# Arrange
tenant_id = "tenant-123"
user_id = "user-456"
endpoint_id = "endpoint-789"
mock_response = MagicMock()
mock_response.status_code = 200
mock_response.json.return_value = {
"code": 0,
"message": "success",
"data": True,
}
with patch("httpx.request", return_value=mock_response):
# Act
result = endpoint_client.delete_endpoint(
tenant_id=tenant_id,
user_id=user_id,
endpoint_id=endpoint_id,
)
# Assert
assert result is True
def test_delete_endpoint_idempotent_record_not_found(self, endpoint_client, mock_config):
"""Test idempotent delete behavior when endpoint is already deleted.
Given:
- A valid tenant_id, user_id, and endpoint_id
- The plugin daemon returns "record not found" error
When:
- delete_endpoint is called
Then:
- The method should return True (idempotent behavior)
- No exception should be raised
"""
# Arrange
tenant_id = "tenant-123"
user_id = "user-456"
endpoint_id = "endpoint-789"
mock_response = MagicMock()
mock_response.status_code = 200
mock_response.json.return_value = {
"code": -1,
"message": (
'{"error_type": "PluginDaemonInternalServerError", '
'"message": "failed to remove endpoint: record not found"}'
),
}
with patch("httpx.request", return_value=mock_response):
# Act
result = endpoint_client.delete_endpoint(
tenant_id=tenant_id,
user_id=user_id,
endpoint_id=endpoint_id,
)
# Assert - should return True instead of raising an error
assert result is True
def test_delete_endpoint_non_idempotent_other_errors(self, endpoint_client, mock_config):
"""Test non-idempotent delete behavior for other errors.
Given:
- A valid tenant_id, user_id, and endpoint_id
- The plugin daemon returns a different error (not "record not found")
When:
- delete_endpoint is called
Then:
- The method should raise PluginDaemonInternalServerError
"""
# Arrange
tenant_id = "tenant-123"
user_id = "user-456"
endpoint_id = "endpoint-789"
mock_response = MagicMock()
mock_response.status_code = 200
mock_response.json.return_value = {
"code": -1,
"message": (
'{"error_type": "PluginDaemonInternalServerError", '
'"message": "failed to remove endpoint: internal server error"}'
),
}
with patch("httpx.request", return_value=mock_response):
# Act & Assert
with pytest.raises(PluginDaemonInternalServerError) as exc_info:
endpoint_client.delete_endpoint(
tenant_id=tenant_id,
user_id=user_id,
endpoint_id=endpoint_id,
)
# Assert - the error message should not be "record not found"
assert "record not found" not in str(exc_info.value.description)
def test_delete_endpoint_idempotent_case_insensitive(self, endpoint_client, mock_config):
"""Test idempotent delete behavior with case-insensitive error message.
Given:
- A valid tenant_id, user_id, and endpoint_id
- The plugin daemon returns "Record Not Found" error (different case)
When:
- delete_endpoint is called
Then:
- The method should return True (idempotent behavior)
"""
# Arrange
tenant_id = "tenant-123"
user_id = "user-456"
endpoint_id = "endpoint-789"
mock_response = MagicMock()
mock_response.status_code = 200
mock_response.json.return_value = {
"code": -1,
"message": '{"error_type": "PluginDaemonInternalServerError", "message": "Record Not Found"}',
}
with patch("httpx.request", return_value=mock_response):
# Act
result = endpoint_client.delete_endpoint(
tenant_id=tenant_id,
user_id=user_id,
endpoint_id=endpoint_id,
)
# Assert - should still return True
assert result is True
def test_delete_endpoint_multiple_calls_idempotent(self, endpoint_client, mock_config):
"""Test that multiple delete calls are idempotent.
Given:
- A valid tenant_id, user_id, and endpoint_id
- The first call succeeds
- Subsequent calls return "record not found"
When:
- delete_endpoint is called multiple times
Then:
- All calls should return True
"""
# Arrange
tenant_id = "tenant-123"
user_id = "user-456"
endpoint_id = "endpoint-789"
# First call - success
mock_response_success = MagicMock()
mock_response_success.status_code = 200
mock_response_success.json.return_value = {
"code": 0,
"message": "success",
"data": True,
}
# Second call - record not found
mock_response_not_found = MagicMock()
mock_response_not_found.status_code = 200
mock_response_not_found.json.return_value = {
"code": -1,
"message": (
'{"error_type": "PluginDaemonInternalServerError", '
'"message": "failed to remove endpoint: record not found"}'
),
}
with patch("httpx.request") as mock_request:
# Act - first call
mock_request.return_value = mock_response_success
result1 = endpoint_client.delete_endpoint(
tenant_id=tenant_id,
user_id=user_id,
endpoint_id=endpoint_id,
)
# Act - second call (already deleted)
mock_request.return_value = mock_response_not_found
result2 = endpoint_client.delete_endpoint(
tenant_id=tenant_id,
user_id=user_id,
endpoint_id=endpoint_id,
)
# Assert - both should return True
assert result1 is True
assert result2 is True
def test_delete_endpoint_non_idempotent_unauthorized_error(self, endpoint_client, mock_config):
"""Test that authorization errors are not treated as idempotent.
Given:
- A valid tenant_id, user_id, and endpoint_id
- The plugin daemon returns an unauthorized error
When:
- delete_endpoint is called
Then:
- The method should raise the appropriate error (not return True)
"""
# Arrange
tenant_id = "tenant-123"
user_id = "user-456"
endpoint_id = "endpoint-789"
mock_response = MagicMock()
mock_response.status_code = 200
mock_response.json.return_value = {
"code": -1,
"message": '{"error_type": "PluginDaemonUnauthorizedError", "message": "unauthorized access"}',
}
with patch("httpx.request", return_value=mock_response):
# Act & Assert
with pytest.raises(Exception) as exc_info:
endpoint_client.delete_endpoint(
tenant_id=tenant_id,
user_id=user_id,
endpoint_id=endpoint_id,
)
# Assert - should not return True for unauthorized errors
assert exc_info.value.__class__.__name__ == "PluginDaemonUnauthorizedError"