Merge branch 'feat/agent-node-v2' into deploy/dev

This commit is contained in:
Novice
2026-01-07 17:51:08 +08:00
338 changed files with 36292 additions and 4267 deletions

View File

@ -6,6 +6,7 @@ BASE_CORS_HEADERS: tuple[str, ...] = ("Content-Type", HEADER_NAME_APP_CODE, HEAD
SERVICE_API_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, "Authorization")
AUTHENTICATED_HEADERS: tuple[str, ...] = (*SERVICE_API_HEADERS, HEADER_NAME_CSRF_TOKEN)
FILES_HEADERS: tuple[str, ...] = (*BASE_CORS_HEADERS, HEADER_NAME_CSRF_TOKEN)
EMBED_HEADERS: tuple[str, ...] = ("Content-Type", HEADER_NAME_APP_CODE)
EXPOSED_HEADERS: tuple[str, ...] = ("X-Version", "X-Env", "X-Trace-Id")
@ -42,10 +43,28 @@ def init_app(app: DifyApp):
_apply_cors_once(
web_bp,
resources={r"/*": {"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS}},
supports_credentials=True,
allow_headers=list(AUTHENTICATED_HEADERS),
methods=["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
resources={
# Embedded bot endpoints (unauthenticated, cross-origin safe)
r"^/chat-messages$": {
"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS,
"supports_credentials": False,
"allow_headers": list(EMBED_HEADERS),
"methods": ["GET", "POST", "OPTIONS"],
},
r"^/chat-messages/.*": {
"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS,
"supports_credentials": False,
"allow_headers": list(EMBED_HEADERS),
"methods": ["GET", "POST", "OPTIONS"],
},
# Default web application endpoints (authenticated)
r"/*": {
"origins": dify_config.WEB_API_CORS_ALLOW_ORIGINS,
"supports_credentials": True,
"allow_headers": list(AUTHENTICATED_HEADERS),
"methods": ["GET", "PUT", "POST", "DELETE", "OPTIONS", "PATCH"],
},
},
expose_headers=list(EXPOSED_HEADERS),
)
app.register_blueprint(web_bp)

View File

@ -12,6 +12,7 @@ def init_app(app: DifyApp):
create_tenant,
extract_plugins,
extract_unique_plugins,
file_usage,
fix_app_site_missing,
install_plugins,
install_rag_pipeline_plugins,
@ -49,6 +50,7 @@ def init_app(app: DifyApp):
clear_free_plan_tenant_expired_logs,
clear_orphaned_file_records,
remove_orphaned_files_on_storage,
file_usage,
setup_system_tool_oauth_client,
setup_system_trigger_oauth_client,
cleanup_orphaned_draft_variables,

View File

@ -53,3 +53,10 @@ def _setup_gevent_compatibility():
def init_app(app: DifyApp):
db.init_app(app)
_setup_gevent_compatibility()
# Eagerly build the engine so pool_size/max_overflow/etc. come from config
try:
with app.app_context():
_ = db.engine # triggers engine creation with the configured options
except Exception:
logger.exception("Failed to initialize SQLAlchemy engine during app startup")

View File

@ -1,3 +1,5 @@
from __future__ import annotations
import logging
import os
import threading
@ -33,7 +35,7 @@ class AliyunLogStore:
Ensures only one instance exists to prevent multiple PG connection pools.
"""
_instance: "AliyunLogStore | None" = None
_instance: AliyunLogStore | None = None
_initialized: bool = False
# Track delayed PG connection for newly created projects
@ -66,7 +68,7 @@ class AliyunLogStore:
"\t",
]
def __new__(cls) -> "AliyunLogStore":
def __new__(cls) -> AliyunLogStore:
"""Implement singleton pattern."""
if cls._instance is None:
cls._instance = super().__new__(cls)

View File

@ -22,6 +22,18 @@ from models.enums import WorkflowRunTriggeredFrom
logger = logging.getLogger(__name__)
def to_serializable(obj):
"""
Convert non-JSON-serializable objects into JSON-compatible formats.
- Uses `to_dict()` if it's a callable method.
- Falls back to string representation.
"""
if hasattr(obj, "to_dict") and callable(obj.to_dict):
return obj.to_dict()
return str(obj)
class LogstoreWorkflowExecutionRepository(WorkflowExecutionRepository):
def __init__(
self,
@ -69,6 +81,11 @@ class LogstoreWorkflowExecutionRepository(WorkflowExecutionRepository):
# Set to True to enable dual-write for safe migration, False to use LogStore only
self._enable_dual_write = os.environ.get("LOGSTORE_DUAL_WRITE_ENABLED", "true").lower() == "true"
# Control flag for whether to write the `graph` field to LogStore.
# If LOGSTORE_ENABLE_PUT_GRAPH_FIELD is "true", write the full `graph` field;
# otherwise write an empty {} instead. Defaults to writing the `graph` field.
self._enable_put_graph_field = os.environ.get("LOGSTORE_ENABLE_PUT_GRAPH_FIELD", "true").lower() == "true"
def _to_logstore_model(self, domain_model: WorkflowExecution) -> list[tuple[str, str]]:
"""
Convert a domain model to a logstore model (List[Tuple[str, str]]).
@ -108,9 +125,24 @@ class LogstoreWorkflowExecutionRepository(WorkflowExecutionRepository):
),
("type", domain_model.workflow_type.value),
("version", domain_model.workflow_version),
("graph", json.dumps(domain_model.graph, ensure_ascii=False) if domain_model.graph else "{}"),
("inputs", json.dumps(domain_model.inputs, ensure_ascii=False) if domain_model.inputs else "{}"),
("outputs", json.dumps(domain_model.outputs, ensure_ascii=False) if domain_model.outputs else "{}"),
(
"graph",
json.dumps(domain_model.graph, ensure_ascii=False, default=to_serializable)
if domain_model.graph and self._enable_put_graph_field
else "{}",
),
(
"inputs",
json.dumps(domain_model.inputs, ensure_ascii=False, default=to_serializable)
if domain_model.inputs
else "{}",
),
(
"outputs",
json.dumps(domain_model.outputs, ensure_ascii=False, default=to_serializable)
if domain_model.outputs
else "{}",
),
("status", domain_model.status.value),
("error_message", domain_model.error_message or ""),
("total_tokens", str(domain_model.total_tokens)),

View File

@ -5,6 +5,8 @@ automatic cleanup, backup and restore.
Supports complete lifecycle management for knowledge base files.
"""
from __future__ import annotations
import json
import logging
import operator
@ -48,7 +50,7 @@ class FileMetadata:
return data
@classmethod
def from_dict(cls, data: dict) -> "FileMetadata":
def from_dict(cls, data: dict) -> FileMetadata:
"""Create instance from dictionary"""
data = data.copy()
data["created_at"] = datetime.fromisoformat(data["created_at"])