Compare commits

..

35 Commits

Author SHA1 Message Date
c730fec1e4 chore: bump version to 1.13.0 (#32147) 2026-02-11 17:08:49 +08:00
b4fec9b7aa fix: hide invite button if current user is not workspace manager (#31744) 2026-02-11 16:31:12 +08:00
7e0bccbbf0 fix: update index to optimize message clean performance (#32238)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-11 16:07:52 +08:00
2f87ecc0ce fix: fix use fastopenapi lead user is anonymouse (#32236)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-11 15:53:51 +08:00
5b4c7b2a40 feat(tests): add mock for useInvalidateWorkflowRunHistory in pipeline run tests (#32234) 2026-02-11 14:51:43 +08:00
378a1d7d08 Merge commit from fork
Removed the dangerous `new function` call during echarts parsing and replaced with an error message.

Co-authored-by: Byron Wang <byron@linux.com>
2026-02-11 14:22:30 +08:00
ce0192620d chore(deps): bump google-api-python-client from 2.90.0 to 2.189.0 in /api (#32102)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-02-11 15:15:21 +09:00
e9feeedc01 chore(deps): bump cryptography from 46.0.3 to 46.0.5 in /api (#32218)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-02-11 15:12:21 +09:00
e32490f54e feat(workflow): enhance workflow run history management and UI updates (#32230) 2026-02-11 14:09:33 +08:00
e9db50f781 docs(api): mark SetupApi as unauthenticated by design (#32224) 2026-02-11 12:11:09 +08:00
0310f631ee fix: fix get_message_event_type return wrong message type (#32019)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-11 10:57:27 +08:00
abc5a61e98 feat: support nl-NL language (#32216) 2026-02-11 10:42:13 +08:00
5f1698add6 fix: add unique constraint to tenant_default_models to prevent duplic… (#31221)
Co-authored-by: qiaofenglin <qiaofenglin@baidu.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Novice <novice12185727@gmail.com>
2026-02-11 10:22:35 +08:00
36e50f277f fix: fix all tools is deleted (#32207) 2026-02-11 10:04:38 +08:00
704ee40caa fix(api): excessive high CPU usage caused by RedisClientWrapper (#32212)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2026-02-11 09:49:29 +08:00
3119c99979 chore(api): consume tasks in workflow_based_app_execution queue in start-worker script (#32214) 2026-02-11 09:21:54 +08:00
16b8733886 fix: Fix the display of state icon of base node (#32208) 2026-02-10 22:45:56 +08:00
83f64104fd chore(deps): bump axios from 1.13.2 to 1.13.5 in /sdks/nodejs-client (#32199)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-02-10 21:58:06 +08:00
5077879886 chore: allow draft run single node without connect to other node (#31977) 2026-02-10 18:03:52 +08:00
697b57631a fix(console): keep conversation updated_at unchanged when marking read (#32133) 2026-02-10 17:56:38 +08:00
6015f23e79 feat: enhancement celery configuration (#32145) 2026-02-10 17:55:24 +08:00
f355c8d595 refactor: type safe env, update to zod v4 (#32035) 2026-02-10 17:55:11 +08:00
0142001fc2 fix: fix no dify home directory lead permission error (#32169) 2026-02-10 17:47:46 +08:00
4058e9ae23 refactor: extract sub-components and custom hooks from UpdateDSLModal and Metadata components (#32045)
Co-authored-by: CodingOnStar <hanxujiang@dify.com>
Co-authored-by: Stephen Zhou <38493346+hyoban@users.noreply.github.com>
2026-02-10 17:26:08 +08:00
95310561ec chore(api): update launch.json.example to include new workflow_based_app_execution. (#32184) 2026-02-10 17:08:43 +08:00
de33561a52 test: add comprehensive tests for Human Input Node functionality (#32191) 2026-02-10 17:00:46 +08:00
6d9665578b fix: replace sendBeacon with fetch keepalive for autosave on page close (#32088)
Signed-off-by: Varun Chawla <varun_6april@hotmail.com>
2026-02-10 16:59:02 +08:00
18f14c04dc fix(web): fill workflow tool output descriptions from schema (#32117) 2026-02-10 16:51:28 +08:00
14251b249d fix(api): include file marker for workflow tool file outputs (#32114) 2026-02-10 16:51:12 +08:00
1819bd72ef refactor: import component css in globals.css (#32180)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-10 13:55:42 +08:00
7dabc03a08 fix: When the user is a non-sandbox user and has a paid balance, the … (#32173)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-10 12:08:23 +08:00
1a050c9f86 fix(api): clean up orphaned pending accounts on member removal (#32151)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-10 10:17:27 +08:00
7fb6e0cdfe refactor(api): tighten OTel decorator typing (#32163) 2026-02-10 00:46:02 +09:00
e0fcf33979 chore: introduce css icons (#32004) 2026-02-09 18:37:41 +08:00
898e09264b chore: detect utilities in css (#32143) 2026-02-09 18:20:09 +08:00
160 changed files with 13887 additions and 2598 deletions

View File

@ -715,6 +715,7 @@ ANNOTATION_IMPORT_MAX_CONCURRENT=5
# Sandbox expired records clean configuration
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000

View File

@ -54,7 +54,7 @@
"--loglevel",
"DEBUG",
"-Q",
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,workflow_based_app_execution,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
]
}
]

View File

@ -1344,6 +1344,10 @@ class SandboxExpiredRecordsCleanConfig(BaseSettings):
description="Maximum number of records to process in each batch",
default=1000,
)
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: PositiveInt = Field(
description="Maximum interval in milliseconds between batches",
default=200,
)
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: PositiveInt = Field(
description="Retention days for sandbox expired workflow_run records and message records",
default=30,

View File

@ -259,11 +259,20 @@ class CeleryConfig(DatabaseConfig):
description="Password of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
description="Timeout for Redis Sentinel socket operations in seconds.",
default=0.1,
)
CELERY_TASK_ANNOTATIONS: dict[str, Any] | None = Field(
description=(
"Annotations for Celery tasks as a JSON mapping of task name -> options "
"(for example, rate limits or other task-specific settings)."
),
default=None,
)
@computed_field
def CELERY_RESULT_BACKEND(self) -> str | None:
if self.CELERY_BACKEND in ("database", "rabbitmq"):

View File

@ -21,6 +21,7 @@ language_timezone_mapping = {
"th-TH": "Asia/Bangkok",
"id-ID": "Asia/Jakarta",
"ar-TN": "Africa/Tunis",
"nl-NL": "Europe/Amsterdam",
}
languages = list(language_timezone_mapping.keys())

View File

@ -599,7 +599,12 @@ def _get_conversation(app_model, conversation_id):
db.session.execute(
sa.update(Conversation)
.where(Conversation.id == conversation_id, Conversation.read_at.is_(None))
.values(read_at=naive_utc_now(), read_account_id=current_user.id)
# Keep updated_at unchanged when only marking a conversation as read.
.values(
read_at=naive_utc_now(),
read_account_id=current_user.id,
updated_at=Conversation.updated_at,
)
)
db.session.commit()
db.session.refresh(conversation)

View File

@ -1,6 +1,7 @@
import urllib.parse
import httpx
from flask_restx import Resource
from pydantic import BaseModel, Field
import services
@ -10,12 +11,12 @@ from controllers.common.errors import (
RemoteFileUploadError,
UnsupportedFileTypeError,
)
from controllers.fastopenapi import console_router
from controllers.console import console_ns
from core.file import helpers as file_helpers
from core.helper import ssrf_proxy
from extensions.ext_database import db
from fields.file_fields import FileWithSignedUrl, RemoteFileInfo
from libs.login import current_account_with_tenant
from libs.login import current_account_with_tenant, login_required
from services.file_service import FileService
@ -23,69 +24,73 @@ class RemoteFileUploadPayload(BaseModel):
url: str = Field(..., description="URL to fetch")
@console_router.get(
"/remote-files/<path:url>",
response_model=RemoteFileInfo,
tags=["console"],
)
def get_remote_file_info(url: str) -> RemoteFileInfo:
decoded_url = urllib.parse.unquote(url)
resp = ssrf_proxy.head(decoded_url)
if resp.status_code != httpx.codes.OK:
resp = ssrf_proxy.get(decoded_url, timeout=3)
resp.raise_for_status()
return RemoteFileInfo(
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
file_length=int(resp.headers.get("Content-Length", 0)),
)
@console_router.post(
"/remote-files/upload",
response_model=FileWithSignedUrl,
tags=["console"],
status_code=201,
)
def upload_remote_file(payload: RemoteFileUploadPayload) -> FileWithSignedUrl:
url = payload.url
try:
resp = ssrf_proxy.head(url=url)
@console_ns.route("/remote-files/<path:url>")
class GetRemoteFileInfo(Resource):
@login_required
def get(self, url: str):
decoded_url = urllib.parse.unquote(url)
resp = ssrf_proxy.head(decoded_url)
if resp.status_code != httpx.codes.OK:
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
if resp.status_code != httpx.codes.OK:
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
except httpx.RequestError as e:
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
resp = ssrf_proxy.get(decoded_url, timeout=3)
resp.raise_for_status()
return RemoteFileInfo(
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
file_length=int(resp.headers.get("Content-Length", 0)),
).model_dump(mode="json")
file_info = helpers.guess_file_info_from_response(resp)
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
raise FileTooLargeError
@console_ns.route("/remote-files/upload")
class RemoteFileUpload(Resource):
@login_required
def post(self):
payload = RemoteFileUploadPayload.model_validate(console_ns.payload)
url = payload.url
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
# Try to fetch remote file metadata/content first
try:
resp = ssrf_proxy.head(url=url)
if resp.status_code != httpx.codes.OK:
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
if resp.status_code != httpx.codes.OK:
# Normalize into a user-friendly error message expected by tests
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
except httpx.RequestError as e:
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
try:
user, _ = current_account_with_tenant()
upload_file = FileService(db.engine).upload_file(
filename=file_info.filename,
content=content,
mimetype=file_info.mimetype,
user=user,
source_url=url,
file_info = helpers.guess_file_info_from_response(resp)
# Enforce file size limit with 400 (Bad Request) per tests' expectation
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
raise FileTooLargeError()
# Load content if needed
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
try:
user, _ = current_account_with_tenant()
upload_file = FileService(db.engine).upload_file(
filename=file_info.filename,
content=content,
mimetype=file_info.mimetype,
user=user,
source_url=url,
)
except services.errors.file.FileTooLargeError as file_too_large_error:
raise FileTooLargeError(file_too_large_error.description)
except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError()
# Success: return created resource with 201 status
return (
FileWithSignedUrl(
id=upload_file.id,
name=upload_file.name,
size=upload_file.size,
extension=upload_file.extension,
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
mime_type=upload_file.mime_type,
created_by=upload_file.created_by,
created_at=int(upload_file.created_at.timestamp()),
).model_dump(mode="json"),
201,
)
except services.errors.file.FileTooLargeError as file_too_large_error:
raise FileTooLargeError(file_too_large_error.description)
except services.errors.file.UnsupportedFileTypeError:
raise UnsupportedFileTypeError()
return FileWithSignedUrl(
id=upload_file.id,
name=upload_file.name,
size=upload_file.size,
extension=upload_file.extension,
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
mime_type=upload_file.mime_type,
created_by=upload_file.created_by,
created_at=int(upload_file.created_at.timestamp()),
)

View File

@ -42,7 +42,15 @@ class SetupResponse(BaseModel):
tags=["console"],
)
def get_setup_status_api() -> SetupStatusResponse:
"""Get system setup status."""
"""Get system setup status.
NOTE: This endpoint is unauthenticated by design.
During first-time bootstrap there is no admin account yet, so frontend initialization must be
able to query setup progress before any login flow exists.
Only bootstrap-safe status information should be returned by this endpoint.
"""
if dify_config.EDITION == "SELF_HOSTED":
setup_status = get_setup_status()
if setup_status and not isinstance(setup_status, bool):
@ -61,7 +69,12 @@ def get_setup_status_api() -> SetupStatusResponse:
)
@only_edition_self_hosted
def setup_system(payload: SetupRequestPayload) -> SetupResponse:
"""Initialize system setup with admin account."""
"""Initialize system setup with admin account.
NOTE: This endpoint is unauthenticated by design for first-time bootstrap.
Access is restricted by deployment mode (`SELF_HOSTED`), one-time setup guards,
and init-password validation rather than user session authentication.
"""
if get_setup_status():
raise AlreadySetupError()

View File

@ -34,7 +34,7 @@ def stream_topic_events(
on_subscribe()
while True:
try:
msg = sub.receive(timeout=0.1)
msg = sub.receive(timeout=1)
except SubscriptionClosedError:
return
if msg is None:

View File

@ -45,6 +45,8 @@ from core.app.entities.task_entities import (
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
from core.app.task_pipeline.message_cycle_manager import MessageCycleManager
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
from core.file import helpers as file_helpers
from core.file.enums import FileTransferMethod
from core.model_manager import ModelInstance
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (
@ -56,10 +58,11 @@ from core.ops.entities.trace_entity import TraceTaskName
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
from core.prompt.utils.prompt_message_util import PromptMessageUtil
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
from core.tools.signature import sign_tool_file
from events.message_event import message_was_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.model import AppMode, Conversation, Message, MessageAgentThought
from models.model import AppMode, Conversation, Message, MessageAgentThought, MessageFile, UploadFile
logger = logging.getLogger(__name__)
@ -463,6 +466,85 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
metadata=metadata_dict,
)
def _record_files(self):
with Session(db.engine, expire_on_commit=False) as session:
message_files = session.scalars(select(MessageFile).where(MessageFile.message_id == self._message_id)).all()
if not message_files:
return None
files_list = []
upload_file_ids = [
mf.upload_file_id
for mf in message_files
if mf.transfer_method == FileTransferMethod.LOCAL_FILE and mf.upload_file_id
]
upload_files_map = {}
if upload_file_ids:
upload_files = session.scalars(select(UploadFile).where(UploadFile.id.in_(upload_file_ids))).all()
upload_files_map = {uf.id: uf for uf in upload_files}
for message_file in message_files:
upload_file = None
if message_file.transfer_method == FileTransferMethod.LOCAL_FILE and message_file.upload_file_id:
upload_file = upload_files_map.get(message_file.upload_file_id)
url = None
filename = "file"
mime_type = "application/octet-stream"
size = 0
extension = ""
if message_file.transfer_method == FileTransferMethod.REMOTE_URL:
url = message_file.url
if message_file.url:
filename = message_file.url.split("/")[-1].split("?")[0] # Remove query params
elif message_file.transfer_method == FileTransferMethod.LOCAL_FILE:
if upload_file:
url = file_helpers.get_signed_file_url(upload_file_id=str(upload_file.id))
filename = upload_file.name
mime_type = upload_file.mime_type or "application/octet-stream"
size = upload_file.size or 0
extension = f".{upload_file.extension}" if upload_file.extension else ""
elif message_file.upload_file_id:
# Fallback: generate URL even if upload_file not found
url = file_helpers.get_signed_file_url(upload_file_id=str(message_file.upload_file_id))
elif message_file.transfer_method == FileTransferMethod.TOOL_FILE and message_file.url:
# For tool files, use URL directly if it's HTTP, otherwise sign it
if message_file.url.startswith("http"):
url = message_file.url
filename = message_file.url.split("/")[-1].split("?")[0]
else:
# Extract tool file id and extension from URL
url_parts = message_file.url.split("/")
if url_parts:
file_part = url_parts[-1].split("?")[0] # Remove query params first
# Use rsplit to correctly handle filenames with multiple dots
if "." in file_part:
tool_file_id, ext = file_part.rsplit(".", 1)
extension = f".{ext}"
else:
tool_file_id = file_part
extension = ".bin"
url = sign_tool_file(tool_file_id=tool_file_id, extension=extension)
filename = file_part
transfer_method_value = message_file.transfer_method
remote_url = message_file.url if message_file.transfer_method == FileTransferMethod.REMOTE_URL else ""
file_dict = {
"related_id": message_file.id,
"extension": extension,
"filename": filename,
"size": size,
"mime_type": mime_type,
"transfer_method": transfer_method_value,
"type": message_file.type,
"url": url or "",
"upload_file_id": message_file.upload_file_id or message_file.id,
"remote_url": remote_url,
}
files_list.append(file_dict)
return files_list or None
def _agent_message_to_stream_response(self, answer: str, message_id: str) -> AgentMessageStreamResponse:
"""
Agent message to stream response.

View File

@ -64,7 +64,13 @@ class MessageCycleManager:
# Use SQLAlchemy 2.x style session.scalar(select(...))
with session_factory.create_session() as session:
message_file = session.scalar(select(MessageFile).where(MessageFile.message_id == message_id))
message_file = session.scalar(
select(MessageFile)
.where(
MessageFile.message_id == message_id,
)
.where(MessageFile.belongs_to == "assistant")
)
if message_file:
self._message_has_file.add(message_id)

View File

@ -5,7 +5,7 @@ from collections.abc import Generator
from copy import deepcopy
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
if TYPE_CHECKING: # pragma: no cover
from models.model import File
from core.tools.__base.tool_runtime import ToolRuntime
@ -171,7 +171,7 @@ class Tool(ABC):
def create_file_message(self, file: File) -> ToolInvokeMessage:
return ToolInvokeMessage(
type=ToolInvokeMessage.MessageType.FILE,
message=ToolInvokeMessage.FileMessage(),
message=ToolInvokeMessage.FileMessage(file_marker="file_marker"),
meta={"file": file},
)

View File

@ -80,8 +80,14 @@ def init_app(app: DifyApp) -> Celery:
worker_hijack_root_logger=False,
timezone=pytz.timezone(dify_config.LOG_TZ or "UTC"),
task_ignore_result=True,
task_annotations=dify_config.CELERY_TASK_ANNOTATIONS,
)
if dify_config.CELERY_BACKEND == "redis":
celery_app.conf.update(
result_backend_transport_options=broker_transport_options,
)
# Apply SSL configuration if enabled
ssl_options = _get_celery_ssl_options()
if ssl_options:

View File

@ -119,7 +119,7 @@ class RedisClientWrapper:
redis_client: RedisClientWrapper = RedisClientWrapper()
pubsub_redis_client: RedisClientWrapper = RedisClientWrapper()
_pubsub_redis_client: redis.Redis | RedisCluster | None = None
def _get_ssl_configuration() -> tuple[type[Union[Connection, SSLConnection]], dict[str, Any]]:
@ -232,7 +232,7 @@ def _create_standalone_client(redis_params: dict[str, Any]) -> Union[redis.Redis
return client
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> Union[redis.Redis, RedisCluster]:
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> redis.Redis | RedisCluster:
if use_clusters:
return RedisCluster.from_url(pubsub_url)
return redis.Redis.from_url(pubsub_url)
@ -256,23 +256,19 @@ def init_app(app: DifyApp):
redis_client.initialize(client)
app.extensions["redis"] = redis_client
pubsub_client = client
global _pubsub_redis_client
_pubsub_redis_client = client
if dify_config.normalized_pubsub_redis_url:
pubsub_client = _create_pubsub_client(
_pubsub_redis_client = _create_pubsub_client(
dify_config.normalized_pubsub_redis_url, dify_config.PUBSUB_REDIS_USE_CLUSTERS
)
pubsub_redis_client.initialize(pubsub_client)
def get_pubsub_redis_client() -> RedisClientWrapper:
return pubsub_redis_client
def get_pubsub_broadcast_channel() -> BroadcastChannelProtocol:
redis_conn = get_pubsub_redis_client()
assert _pubsub_redis_client is not None, "PubSub redis Client should be initialized here."
if dify_config.PUBSUB_REDIS_CHANNEL_TYPE == "sharded":
return ShardedRedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
return RedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
return ShardedRedisBroadcastChannel(_pubsub_redis_client)
return RedisBroadcastChannel(_pubsub_redis_client)
P = ParamSpec("P")

View File

@ -1,6 +1,6 @@
import functools
from collections.abc import Callable
from typing import Any, TypeVar, cast
from typing import ParamSpec, TypeVar, cast
from opentelemetry.trace import get_tracer
@ -8,7 +8,8 @@ from configs import dify_config
from extensions.otel.decorators.handler import SpanHandler
from extensions.otel.runtime import is_instrument_flag_enabled
T = TypeVar("T", bound=Callable[..., Any])
P = ParamSpec("P")
R = TypeVar("R")
_HANDLER_INSTANCES: dict[type[SpanHandler], SpanHandler] = {SpanHandler: SpanHandler()}
@ -20,7 +21,7 @@ def _get_handler_instance(handler_class: type[SpanHandler]) -> SpanHandler:
return _HANDLER_INSTANCES[handler_class]
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T], T]:
def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[Callable[P, R]], Callable[P, R]]:
"""
Decorator that traces a function with an OpenTelemetry span.
@ -30,9 +31,9 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T],
:param handler_class: Optional handler class to use for this span. If None, uses the default SpanHandler.
"""
def decorator(func: T) -> T:
def decorator(func: Callable[P, R]) -> Callable[P, R]:
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> Any:
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
if not (dify_config.ENABLE_OTEL or is_instrument_flag_enabled()):
return func(*args, **kwargs)
@ -46,6 +47,6 @@ def trace_span(handler_class: type[SpanHandler] | None = None) -> Callable[[T],
kwargs=kwargs,
)
return cast(T, wrapper)
return cast(Callable[P, R], wrapper)
return decorator

View File

@ -1,9 +1,11 @@
import inspect
from collections.abc import Callable, Mapping
from typing import Any
from typing import Any, TypeVar
from opentelemetry.trace import SpanKind, Status, StatusCode
R = TypeVar("R")
class SpanHandler:
"""
@ -31,9 +33,9 @@ class SpanHandler:
def _extract_arguments(
self,
wrapped: Callable[..., Any],
args: tuple[Any, ...],
kwargs: Mapping[str, Any],
wrapped: Callable[..., R],
args: tuple[object, ...],
kwargs: Mapping[str, object],
) -> dict[str, Any] | None:
"""
Extract function arguments using inspect.signature.
@ -62,10 +64,10 @@ class SpanHandler:
def wrapper(
self,
tracer: Any,
wrapped: Callable[..., Any],
args: tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> Any:
wrapped: Callable[..., R],
args: tuple[object, ...],
kwargs: Mapping[str, object],
) -> R:
"""
Fully control the wrapper behavior.

View File

@ -1,6 +1,6 @@
import logging
from collections.abc import Callable, Mapping
from typing import Any
from typing import Any, TypeVar
from opentelemetry.trace import SpanKind, Status, StatusCode
from opentelemetry.util.types import AttributeValue
@ -12,16 +12,19 @@ from models.model import Account
logger = logging.getLogger(__name__)
R = TypeVar("R")
class AppGenerateHandler(SpanHandler):
"""Span handler for ``AppGenerateService.generate``."""
def wrapper(
self,
tracer: Any,
wrapped: Callable[..., Any],
args: tuple[Any, ...],
kwargs: Mapping[str, Any],
) -> Any:
wrapped: Callable[..., R],
args: tuple[object, ...],
kwargs: Mapping[str, object],
) -> R:
try:
arguments = self._extract_arguments(wrapped, args, kwargs)
if not arguments:

View File

@ -152,7 +152,7 @@ class RedisSubscriptionBase(Subscription):
"""Iterator for consuming messages from the subscription."""
while not self._closed.is_set():
try:
item = self._queue.get(timeout=0.1)
item = self._queue.get(timeout=1)
except queue.Empty:
continue

View File

@ -1,7 +1,7 @@
from __future__ import annotations
from libs.broadcast_channel.channel import Producer, Subscriber, Subscription
from redis import Redis
from redis import Redis, RedisCluster
from ._subscription import RedisSubscriptionBase
@ -18,7 +18,7 @@ class BroadcastChannel:
def __init__(
self,
redis_client: Redis,
redis_client: Redis | RedisCluster,
):
self._client = redis_client
@ -27,7 +27,7 @@ class BroadcastChannel:
class Topic:
def __init__(self, redis_client: Redis, topic: str):
def __init__(self, redis_client: Redis | RedisCluster, topic: str):
self._client = redis_client
self._topic = topic

View File

@ -70,8 +70,9 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
# Since we have already filtered at the caller's site, we can safely set
# `ignore_subscribe_messages=False`.
if isinstance(self._client, RedisCluster):
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message`
# would use busy-looping to wait for incoming message, consuming excessive CPU quota.
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message` without
# specifying the `target_node` argument would use busy-looping to wait
# for incoming message, consuming excessive CPU quota.
#
# Here we specify the `target_node` to mitigate this problem.
node = self._client.get_node_from_key(self._topic)
@ -80,8 +81,10 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
timeout=1,
target_node=node,
)
else:
elif isinstance(self._client, Redis):
return self._pubsub.get_sharded_message(ignore_subscribe_messages=False, timeout=1) # type: ignore[attr-defined]
else:
raise AssertionError("client should be either Redis or RedisCluster.")
def _get_message_type(self) -> str:
return "smessage"

View File

@ -0,0 +1,59 @@
"""add unique constraint to tenant_default_models
Revision ID: fix_tenant_default_model_unique
Revises: 9d77545f524e
Create Date: 2026-01-19 15:07:00.000000
"""
from alembic import op
import sqlalchemy as sa
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = 'f55813ffe2c8'
down_revision = 'c3df22613c99'
branch_labels = None
depends_on = None
def upgrade():
# First, remove duplicate records keeping only the most recent one per (tenant_id, model_type)
# This is necessary before adding the unique constraint
conn = op.get_bind()
# Delete duplicates: keep the record with the latest updated_at for each (tenant_id, model_type)
# If updated_at is the same, keep the one with the largest id as tiebreaker
if _is_pg(conn):
# PostgreSQL: Use DISTINCT ON for efficient deduplication
conn.execute(sa.text("""
DELETE FROM tenant_default_models
WHERE id NOT IN (
SELECT DISTINCT ON (tenant_id, model_type) id
FROM tenant_default_models
ORDER BY tenant_id, model_type, updated_at DESC, id DESC
)
"""))
else:
# MySQL: Use self-join to find and delete duplicates
# Keep the record with latest updated_at (or largest id if updated_at is equal)
conn.execute(sa.text("""
DELETE t1 FROM tenant_default_models t1
INNER JOIN tenant_default_models t2
ON t1.tenant_id = t2.tenant_id
AND t1.model_type = t2.model_type
AND (t1.updated_at < t2.updated_at
OR (t1.updated_at = t2.updated_at AND t1.id < t2.id))
"""))
# Now add the unique constraint
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.create_unique_constraint('unique_tenant_default_model_type', ['tenant_id', 'model_type'])
def downgrade():
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
batch_op.drop_constraint('unique_tenant_default_model_type', type_='unique')

View File

@ -0,0 +1,39 @@
"""fix index to optimize message clean job performance
Revision ID: fce013ca180e
Revises: f55813ffe2c8
Create Date: 2026-02-11 15:49:17.603638
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'fce013ca180e'
down_revision = 'f55813ffe2c8'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('message_created_at_idx'))
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
batch_op.create_index('saved_message_message_id_idx', ['message_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
batch_op.drop_index('saved_message_message_id_idx')
with op.batch_alter_table('messages', schema=None) as batch_op:
batch_op.create_index(batch_op.f('message_created_at_idx'), ['created_at'], unique=False)
# ### end Alembic commands ###

View File

@ -227,7 +227,7 @@ class App(Base):
with Session(db.engine) as session:
if api_provider_ids:
existing_api_providers = [
api_provider.id
str(api_provider.id)
for api_provider in session.execute(
text("SELECT id FROM tool_api_providers WHERE id IN :provider_ids"),
{"provider_ids": tuple(api_provider_ids)},
@ -1040,7 +1040,6 @@ class Message(Base):
Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"),
Index("message_account_idx", "app_id", "from_source", "from_account_id"),
Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
Index("message_created_at_idx", "created_at"),
Index("message_app_mode_idx", "app_mode"),
Index("message_created_at_id_idx", "created_at", "id"),
)

View File

@ -181,6 +181,7 @@ class TenantDefaultModel(TypeBase):
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="tenant_default_model_pkey"),
sa.Index("tenant_default_model_tenant_id_provider_type_idx", "tenant_id", "provider_name", "model_type"),
sa.UniqueConstraint("tenant_id", "model_type", name="unique_tenant_default_model_type"),
)
id: Mapped[str] = mapped_column(

View File

@ -16,6 +16,7 @@ class SavedMessage(TypeBase):
__table_args__ = (
sa.PrimaryKeyConstraint("id", name="saved_message_pkey"),
sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
sa.Index("saved_message_message_id_idx", "message_id"),
)
id: Mapped[str] = mapped_column(

View File

@ -1,6 +1,6 @@
[project]
name = "dify-api"
version = "1.12.1"
version = "1.13.0"
requires-python = ">=3.11,<3.13"
dependencies = [
@ -23,7 +23,7 @@ dependencies = [
"gevent~=25.9.1",
"gmpy2~=2.2.1",
"google-api-core==2.18.0",
"google-api-python-client==2.90.0",
"google-api-python-client==2.189.0",
"google-auth==2.29.0",
"google-auth-httplib2==0.2.0",
"google-cloud-aiplatform==1.49.0",

View File

@ -1225,7 +1225,12 @@ class TenantService:
@staticmethod
def remove_member_from_tenant(tenant: Tenant, account: Account, operator: Account):
"""Remove member from tenant"""
"""Remove member from tenant.
If the removed member has ``AccountStatus.PENDING`` (invited but never
activated) and no remaining workspace memberships, the orphaned account
record is deleted as well.
"""
if operator.id == account.id:
raise CannotOperateSelfError("Cannot operate self.")
@ -1235,9 +1240,31 @@ class TenantService:
if not ta:
raise MemberNotInTenantError("Member not in tenant.")
# Capture identifiers before any deletions; attribute access on the ORM
# object may fail after commit() expires the instance.
account_id = account.id
account_email = account.email
db.session.delete(ta)
# Clean up orphaned pending accounts (invited but never activated)
should_delete_account = False
if account.status == AccountStatus.PENDING:
# autoflush flushes ta deletion before this query, so 0 means no remaining joins
remaining_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).count()
if remaining_joins == 0:
db.session.delete(account)
should_delete_account = True
db.session.commit()
if should_delete_account:
logger.info(
"Deleted orphaned pending account: account_id=%s, email=%s",
account_id,
account_email,
)
if dify_config.BILLING_ENABLED:
BillingService.clean_billing_info_cache(tenant.id)
@ -1245,13 +1272,13 @@ class TenantService:
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
sync_success = sync_workspace_member_removal(
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
workspace_id=tenant.id, member_id=account_id, source="workspace_member_removed"
)
if not sync_success:
logger.warning(
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
tenant.id,
account.id,
account_id,
)
@staticmethod

View File

@ -22,7 +22,7 @@ from libs.exception import BaseHTTPException
from models.human_input import RecipientType
from models.model import App, AppMode
from repositories.factory import DifyAPIRepositoryFactory
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE, resume_app_execution
from tasks.app_generate.workflow_execute_task import resume_app_execution
class Form:
@ -230,7 +230,6 @@ class HumanInputService:
try:
resume_app_execution.apply_async(
kwargs={"payload": payload},
queue=WORKFLOW_BASED_APP_EXECUTION_QUEUE,
)
except Exception: # pragma: no cover
logger.exception("Failed to enqueue resume task for workflow run %s", workflow_run_id)

View File

@ -1,10 +1,13 @@
import datetime
import logging
import os
import random
import time
from collections.abc import Sequence
from typing import cast
from sqlalchemy import delete, select
import sqlalchemy as sa
from sqlalchemy import delete, select, tuple_
from sqlalchemy.engine import CursorResult
from sqlalchemy.orm import Session
@ -193,11 +196,15 @@ class MessagesCleanService:
self._end_before,
)
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
while True:
stats["batches"] += 1
batch_start = time.monotonic()
# Step 1: Fetch a batch of messages using cursor
with Session(db.engine, expire_on_commit=False) as session:
fetch_messages_start = time.monotonic()
msg_stmt = (
select(Message.id, Message.app_id, Message.created_at)
.where(Message.created_at < self._end_before)
@ -209,13 +216,13 @@ class MessagesCleanService:
msg_stmt = msg_stmt.where(Message.created_at >= self._start_from)
# Apply cursor condition: (created_at, id) > (last_created_at, last_message_id)
# This translates to:
# created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id)
if _cursor:
# Continuing from previous batch
msg_stmt = msg_stmt.where(
(Message.created_at > _cursor[0])
| ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1]))
tuple_(Message.created_at, Message.id)
> tuple_(
sa.literal(_cursor[0], type_=sa.DateTime()),
sa.literal(_cursor[1], type_=Message.id.type),
)
)
raw_messages = list(session.execute(msg_stmt).all())
@ -223,6 +230,12 @@ class MessagesCleanService:
SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at)
for msg_id, app_id, msg_created_at in raw_messages
]
logger.info(
"clean_messages (batch %s): fetched %s messages in %sms",
stats["batches"],
len(messages),
int((time.monotonic() - fetch_messages_start) * 1000),
)
# Track total messages fetched across all batches
stats["total_messages"] += len(messages)
@ -241,8 +254,16 @@ class MessagesCleanService:
logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"])
continue
fetch_apps_start = time.monotonic()
app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids))
apps = list(session.execute(app_stmt).all())
logger.info(
"clean_messages (batch %s): fetched %s apps for %s app_ids in %sms",
stats["batches"],
len(apps),
len(app_ids),
int((time.monotonic() - fetch_apps_start) * 1000),
)
if not apps:
logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"])
@ -252,7 +273,15 @@ class MessagesCleanService:
app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps}
# Step 3: Delegate to policy to determine which messages to delete
policy_start = time.monotonic()
message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant)
logger.info(
"clean_messages (batch %s): policy selected %s/%s messages in %sms",
stats["batches"],
len(message_ids_to_delete),
len(messages),
int((time.monotonic() - policy_start) * 1000),
)
if not message_ids_to_delete:
logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"])
@ -263,14 +292,20 @@ class MessagesCleanService:
# Step 4: Batch delete messages and their relations
if not self._dry_run:
with Session(db.engine, expire_on_commit=False) as session:
delete_relations_start = time.monotonic()
# Delete related records first
self._batch_delete_message_relations(session, message_ids_to_delete)
delete_relations_ms = int((time.monotonic() - delete_relations_start) * 1000)
# Delete messages
delete_messages_start = time.monotonic()
delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete))
delete_result = cast(CursorResult, session.execute(delete_stmt))
messages_deleted = delete_result.rowcount
delete_messages_ms = int((time.monotonic() - delete_messages_start) * 1000)
commit_start = time.monotonic()
session.commit()
commit_ms = int((time.monotonic() - commit_start) * 1000)
stats["total_deleted"] += messages_deleted
@ -280,6 +315,19 @@ class MessagesCleanService:
len(messages),
messages_deleted,
)
logger.info(
"clean_messages (batch %s): relations %sms, messages %sms, commit %sms, batch total %sms",
stats["batches"],
delete_relations_ms,
delete_messages_ms,
commit_ms,
int((time.monotonic() - batch_start) * 1000),
)
# Random sleep between batches to avoid overwhelming the database
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
logger.info("clean_messages (batch %s): sleeping for %.2fms", stats["batches"], sleep_ms)
time.sleep(sleep_ms / 1000)
else:
# Log random sample of message IDs that would be deleted (up to 10)
sample_size = min(10, len(message_ids_to_delete))

View File

@ -1,5 +1,8 @@
import datetime
import logging
import os
import random
import time
from collections.abc import Iterable, Sequence
import click
@ -72,7 +75,12 @@ class WorkflowRunCleanup:
batch_index = 0
last_seen: tuple[datetime.datetime, str] | None = None
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
while True:
batch_start = time.monotonic()
fetch_start = time.monotonic()
run_rows = self.workflow_run_repo.get_runs_batch_by_time_range(
start_from=self.window_start,
end_before=self.window_end,
@ -80,12 +88,30 @@ class WorkflowRunCleanup:
batch_size=self.batch_size,
)
if not run_rows:
logger.info("workflow_run_cleanup (batch #%s): no more rows to process", batch_index + 1)
break
batch_index += 1
last_seen = (run_rows[-1].created_at, run_rows[-1].id)
logger.info(
"workflow_run_cleanup (batch #%s): fetched %s rows in %sms",
batch_index,
len(run_rows),
int((time.monotonic() - fetch_start) * 1000),
)
tenant_ids = {row.tenant_id for row in run_rows}
filter_start = time.monotonic()
free_tenants = self._filter_free_tenants(tenant_ids)
logger.info(
"workflow_run_cleanup (batch #%s): filtered %s free tenants from %s tenants in %sms",
batch_index,
len(free_tenants),
len(tenant_ids),
int((time.monotonic() - filter_start) * 1000),
)
free_runs = [row for row in run_rows if row.tenant_id in free_tenants]
paid_or_skipped = len(run_rows) - len(free_runs)
@ -104,11 +130,17 @@ class WorkflowRunCleanup:
total_runs_targeted += len(free_runs)
if self.dry_run:
count_start = time.monotonic()
batch_counts = self.workflow_run_repo.count_runs_with_related(
free_runs,
count_node_executions=self._count_node_executions,
count_trigger_logs=self._count_trigger_logs,
)
logger.info(
"workflow_run_cleanup (batch #%s, dry_run): counted related records in %sms",
batch_index,
int((time.monotonic() - count_start) * 1000),
)
if related_totals is not None:
for key in related_totals:
related_totals[key] += batch_counts.get(key, 0)
@ -120,14 +152,21 @@ class WorkflowRunCleanup:
fg="yellow",
)
)
logger.info(
"workflow_run_cleanup (batch #%s, dry_run): batch total %sms",
batch_index,
int((time.monotonic() - batch_start) * 1000),
)
continue
try:
delete_start = time.monotonic()
counts = self.workflow_run_repo.delete_runs_with_related(
free_runs,
delete_node_executions=self._delete_node_executions,
delete_trigger_logs=self._delete_trigger_logs,
)
delete_ms = int((time.monotonic() - delete_start) * 1000)
except Exception:
logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0])
raise
@ -143,6 +182,17 @@ class WorkflowRunCleanup:
fg="green",
)
)
logger.info(
"workflow_run_cleanup (batch #%s): delete %sms, batch total %sms",
batch_index,
delete_ms,
int((time.monotonic() - batch_start) * 1000),
)
# Random sleep between batches to avoid overwhelming the database
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
logger.info("workflow_run_cleanup (batch #%s): sleeping for %.2fms", batch_index, sleep_ms)
time.sleep(sleep_ms / 1000)
if self.dry_run:
if self.window_start:

View File

@ -129,15 +129,15 @@ def build_workflow_event_stream(
return
try:
event = buffer_state.queue.get(timeout=0.1)
event = buffer_state.queue.get(timeout=1)
except queue.Empty:
current_time = time.time()
if current_time - last_msg_time > idle_timeout:
logger.debug(
"No workflow events received for %s seconds, keeping stream open",
"Idle timeout of %s seconds reached, closing workflow event stream.",
idle_timeout,
)
last_msg_time = current_time
return
if current_time - last_ping_time >= ping_interval:
yield StreamEvent.PING.value
last_ping_time = current_time
@ -405,7 +405,7 @@ def _start_buffering(subscription) -> BufferState:
dropped_count = 0
try:
while not buffer_state.stop_event.is_set():
msg = subscription.receive(timeout=0.1)
msg = subscription.receive(timeout=1)
if msg is None:
continue
event = _parse_event_message(msg)

View File

@ -1,6 +1,7 @@
from flask_login import current_user
from configs import dify_config
from enums.cloud_plan import CloudPlan
from extensions.ext_database import db
from models.account import Tenant, TenantAccountJoin, TenantAccountRole
from services.account_service import TenantService
@ -53,7 +54,12 @@ class WorkspaceService:
from services.credit_pool_service import CreditPoolService
paid_pool = CreditPoolService.get_pool(tenant_id=tenant.id, pool_type="paid")
if paid_pool:
# if the tenant is not on the sandbox plan and the paid pool is not full, use the paid pool
if (
feature.billing.subscription.plan != CloudPlan.SANDBOX
and paid_pool is not None
and (paid_pool.quota_limit == -1 or paid_pool.quota_limit > paid_pool.quota_used)
):
tenant_info["trial_credits"] = paid_pool.quota_limit
tenant_info["trial_credits_used"] = paid_pool.quota_used
else:

View File

@ -51,7 +51,7 @@ def _patch_redis_clients_on_loaded_modules():
continue
if hasattr(module, "redis_client"):
module.redis_client = redis_mock
if hasattr(module, "pubsub_redis_client"):
if hasattr(module, "_pubsub_redis_client"):
module.pubsub_redis_client = redis_mock
@ -72,7 +72,7 @@ def _patch_redis_clients():
with (
patch.object(ext_redis, "redis_client", redis_mock),
patch.object(ext_redis, "pubsub_redis_client", redis_mock),
patch.object(ext_redis, "_pubsub_redis_client", redis_mock),
):
_patch_redis_clients_on_loaded_modules()
yield

View File

@ -0,0 +1,34 @@
from datetime import datetime
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
from controllers.console.app.conversation import _get_conversation
def test_get_conversation_mark_read_keeps_updated_at_unchanged():
app_model = SimpleNamespace(id="app-id")
account = SimpleNamespace(id="account-id")
conversation = MagicMock()
conversation.id = "conversation-id"
with (
patch("controllers.console.app.conversation.current_account_with_tenant", return_value=(account, None)),
patch("controllers.console.app.conversation.naive_utc_now", return_value=datetime(2026, 2, 9, 0, 0, 0)),
patch("controllers.console.app.conversation.db.session") as mock_session,
):
mock_session.query.return_value.where.return_value.first.return_value = conversation
_get_conversation(app_model, "conversation-id")
statement = mock_session.execute.call_args[0][0]
compiled = statement.compile()
sql_text = str(compiled).lower()
compact_sql_text = sql_text.replace(" ", "")
params = compiled.params
assert "updated_at=current_timestamp" not in compact_sql_text
assert "updated_at=conversations.updated_at" in compact_sql_text
assert "read_at=:read_at" in compact_sql_text
assert "read_account_id=:read_account_id" in compact_sql_text
assert params["read_at"] == datetime(2026, 2, 9, 0, 0, 0)
assert params["read_account_id"] == "account-id"

View File

@ -1,92 +1,286 @@
import builtins
"""Tests for remote file upload API endpoints using Flask-RESTX."""
import contextlib
from datetime import datetime
from types import SimpleNamespace
from unittest.mock import patch
from unittest.mock import Mock, patch
import httpx
import pytest
from flask import Flask
from flask.views import MethodView
from extensions import ext_fastopenapi
if not hasattr(builtins, "MethodView"):
builtins.MethodView = MethodView # type: ignore[attr-defined]
from flask import Flask, g
@pytest.fixture
def app() -> Flask:
"""Create Flask app for testing."""
app = Flask(__name__)
app.config["TESTING"] = True
app.config["SECRET_KEY"] = "test-secret-key"
return app
def test_console_remote_files_fastopenapi_get_info(app: Flask):
ext_fastopenapi.init_app(app)
@pytest.fixture
def client(app):
"""Create test client with console blueprint registered."""
from controllers.console import bp
response = httpx.Response(
200,
request=httpx.Request("HEAD", "http://example.com/file.txt"),
headers={"Content-Type": "text/plain", "Content-Length": "10"},
)
with patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response):
client = app.test_client()
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
resp = client.get(f"/console/api/remote-files/{encoded_url}")
assert resp.status_code == 200
assert resp.get_json() == {"file_type": "text/plain", "file_length": 10}
app.register_blueprint(bp)
return app.test_client()
def test_console_remote_files_fastopenapi_upload(app: Flask):
ext_fastopenapi.init_app(app)
@pytest.fixture
def mock_account():
"""Create a mock account for testing."""
from models import Account
head_response = httpx.Response(
200,
request=httpx.Request("GET", "http://example.com/file.txt"),
content=b"hello",
)
file_info = SimpleNamespace(
extension="txt",
size=5,
filename="file.txt",
mimetype="text/plain",
)
uploaded = SimpleNamespace(
id="file-id",
name="file.txt",
size=5,
extension="txt",
mime_type="text/plain",
created_by="user-id",
created_at=datetime(2024, 1, 1),
)
account = Mock(spec=Account)
account.id = "test-account-id"
account.current_tenant_id = "test-tenant-id"
return account
with (
patch("controllers.console.remote_files.db", new=SimpleNamespace(engine=object())),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
patch("controllers.console.remote_files.helpers.guess_file_info_from_response", return_value=file_info),
patch("controllers.console.remote_files.FileService.is_file_size_within_limit", return_value=True),
patch("controllers.console.remote_files.FileService.__init__", return_value=None),
patch("controllers.console.remote_files.current_account_with_tenant", return_value=(object(), "tenant-id")),
patch("controllers.console.remote_files.FileService.upload_file", return_value=uploaded),
patch("controllers.console.remote_files.file_helpers.get_signed_file_url", return_value="signed-url"),
):
client = app.test_client()
resp = client.post(
"/console/api/remote-files/upload",
json={"url": "http://example.com/file.txt"},
@pytest.fixture
def auth_ctx(app, mock_account):
"""Context manager to set auth/tenant context in flask.g for a request."""
@contextlib.contextmanager
def _ctx():
with app.test_request_context():
g._login_user = mock_account
g._current_tenant = mock_account.current_tenant_id
yield
return _ctx
class TestGetRemoteFileInfo:
"""Test GET /console/api/remote-files/<path:url> endpoint."""
def test_get_remote_file_info_success(self, app, client, mock_account):
"""Test successful retrieval of remote file info."""
response = httpx.Response(
200,
request=httpx.Request("HEAD", "http://example.com/file.txt"),
headers={"Content-Type": "text/plain", "Content-Length": "1024"},
)
assert resp.status_code == 201
assert resp.get_json() == {
"id": "file-id",
"name": "file.txt",
"size": 5,
"extension": "txt",
"url": "signed-url",
"mime_type": "text/plain",
"created_by": "user-id",
"created_at": int(uploaded.created_at.timestamp()),
}
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response),
patch("libs.login.check_csrf_token", return_value=None),
):
with app.test_request_context():
g._login_user = mock_account
g._current_tenant = mock_account.current_tenant_id
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
resp = client.get(f"/console/api/remote-files/{encoded_url}")
assert resp.status_code == 200
data = resp.get_json()
assert data["file_type"] == "text/plain"
assert data["file_length"] == 1024
def test_get_remote_file_info_fallback_to_get_on_head_failure(self, app, client, mock_account):
"""Test fallback to GET when HEAD returns non-200 status."""
head_response = httpx.Response(
404,
request=httpx.Request("HEAD", "http://example.com/file.pdf"),
)
get_response = httpx.Response(
200,
request=httpx.Request("GET", "http://example.com/file.pdf"),
headers={"Content-Type": "application/pdf", "Content-Length": "2048"},
)
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_response),
patch("libs.login.check_csrf_token", return_value=None),
):
with app.test_request_context():
g._login_user = mock_account
g._current_tenant = mock_account.current_tenant_id
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.pdf"
resp = client.get(f"/console/api/remote-files/{encoded_url}")
assert resp.status_code == 200
data = resp.get_json()
assert data["file_type"] == "application/pdf"
assert data["file_length"] == 2048
class TestRemoteFileUpload:
"""Test POST /console/api/remote-files/upload endpoint."""
@pytest.mark.parametrize(
("head_status", "use_get"),
[
(200, False), # HEAD succeeds
(405, True), # HEAD fails -> fallback GET
],
)
def test_upload_remote_file_success_paths(self, client, mock_account, auth_ctx, head_status, use_get):
url = "http://example.com/file.pdf"
head_resp = httpx.Response(
head_status,
request=httpx.Request("HEAD", url),
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
)
get_resp = httpx.Response(
200,
request=httpx.Request("GET", url),
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
content=b"file content",
)
file_info = SimpleNamespace(
extension="pdf",
size=1024,
filename="file.pdf",
mimetype="application/pdf",
)
uploaded_file = SimpleNamespace(
id="uploaded-file-id",
name="file.pdf",
size=1024,
extension="pdf",
mime_type="application/pdf",
created_by="test-account-id",
created_at=datetime(2024, 1, 1, 12, 0, 0),
)
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp) as p_head,
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_resp) as p_get,
patch(
"controllers.console.remote_files.helpers.guess_file_info_from_response",
return_value=file_info,
),
patch(
"controllers.console.remote_files.FileService.is_file_size_within_limit",
return_value=True,
),
patch("controllers.console.remote_files.db", spec=["engine"]),
patch("controllers.console.remote_files.FileService") as mock_file_service,
patch(
"controllers.console.remote_files.file_helpers.get_signed_file_url",
return_value="http://example.com/signed-url",
),
patch("libs.login.check_csrf_token", return_value=None),
):
mock_file_service.return_value.upload_file.return_value = uploaded_file
with auth_ctx():
resp = client.post(
"/console/api/remote-files/upload",
json={"url": url},
)
assert resp.status_code == 201
p_head.assert_called_once()
# GET is used either for fallback (HEAD fails) or to fetch content after HEAD succeeds
p_get.assert_called_once()
mock_file_service.return_value.upload_file.assert_called_once()
data = resp.get_json()
assert data["id"] == "uploaded-file-id"
assert data["name"] == "file.pdf"
assert data["size"] == 1024
assert data["extension"] == "pdf"
assert data["url"] == "http://example.com/signed-url"
assert data["mime_type"] == "application/pdf"
assert data["created_by"] == "test-account-id"
@pytest.mark.parametrize(
("size_ok", "raises", "expected_status", "expected_msg"),
[
# When size check fails in controller, API returns 413 with message "File size exceeded..."
(False, None, 413, "file size exceeded"),
# When service raises unsupported type, controller maps to 415 with message "File type not allowed."
(True, "unsupported", 415, "file type not allowed"),
],
)
def test_upload_remote_file_errors(
self, client, mock_account, auth_ctx, size_ok, raises, expected_status, expected_msg
):
url = "http://example.com/x.pdf"
head_resp = httpx.Response(
200,
request=httpx.Request("HEAD", url),
headers={"Content-Type": "application/pdf", "Content-Length": "9"},
)
file_info = SimpleNamespace(extension="pdf", size=9, filename="x.pdf", mimetype="application/pdf")
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp),
patch(
"controllers.console.remote_files.helpers.guess_file_info_from_response",
return_value=file_info,
),
patch(
"controllers.console.remote_files.FileService.is_file_size_within_limit",
return_value=size_ok,
),
patch("controllers.console.remote_files.db", spec=["engine"]),
patch("libs.login.check_csrf_token", return_value=None),
):
if raises == "unsupported":
from services.errors.file import UnsupportedFileTypeError
with patch("controllers.console.remote_files.FileService") as mock_file_service:
mock_file_service.return_value.upload_file.side_effect = UnsupportedFileTypeError("bad")
with auth_ctx():
resp = client.post(
"/console/api/remote-files/upload",
json={"url": url},
)
else:
with auth_ctx():
resp = client.post(
"/console/api/remote-files/upload",
json={"url": url},
)
assert resp.status_code == expected_status
data = resp.get_json()
msg = (data.get("error") or {}).get("message") or data.get("message", "")
assert expected_msg in msg.lower()
def test_upload_remote_file_fetch_failure(self, client, mock_account, auth_ctx):
"""Test upload when fetching of remote file fails."""
with (
patch(
"controllers.console.remote_files.current_account_with_tenant",
return_value=(mock_account, "test-tenant-id"),
),
patch(
"controllers.console.remote_files.ssrf_proxy.head",
side_effect=httpx.RequestError("Connection failed"),
),
patch("libs.login.check_csrf_token", return_value=None),
):
with auth_ctx():
resp = client.post(
"/console/api/remote-files/upload",
json={"url": "http://unreachable.com/file.pdf"},
)
assert resp.status_code == 400
data = resp.get_json()
msg = (data.get("error") or {}).get("message") or data.get("message", "")
assert "failed to fetch" in msg.lower()

View File

@ -25,15 +25,19 @@ class TestMessageCycleManagerOptimization:
task_state = Mock()
return MessageCycleManager(application_generate_entity=mock_application_generate_entity, task_state=task_state)
def test_get_message_event_type_with_message_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE_FILE when message has files."""
def test_get_message_event_type_with_assistant_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE_FILE when message has assistant-generated files.
This ensures that AI-generated images (belongs_to='assistant') trigger the MESSAGE_FILE event,
allowing the frontend to properly display generated image files with url field.
"""
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
# Setup mock session and message file
mock_session = Mock()
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
mock_message_file = Mock()
# Current implementation uses session.scalar(select(...))
mock_message_file.belongs_to = "assistant"
mock_session.scalar.return_value = mock_message_file
# Execute
@ -44,6 +48,31 @@ class TestMessageCycleManagerOptimization:
assert result == StreamEvent.MESSAGE_FILE
mock_session.scalar.assert_called_once()
def test_get_message_event_type_with_user_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE when message only has user-uploaded files.
This is a regression test for the issue where user-uploaded images (belongs_to='user')
caused the LLM text response to be incorrectly tagged with MESSAGE_FILE event,
resulting in broken images in the chat UI. The query filters for belongs_to='assistant',
so when only user files exist, the database query returns None, resulting in MESSAGE event type.
"""
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
# Setup mock session and message file
mock_session = Mock()
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
# When querying for assistant files with only user files present, return None
# (simulates database query with belongs_to='assistant' filter returning no results)
mock_session.scalar.return_value = None
# Execute
with current_app.app_context():
result = message_cycle_manager.get_message_event_type("test-message-id")
# Assert
assert result == StreamEvent.MESSAGE
mock_session.scalar.assert_called_once()
def test_get_message_event_type_without_message_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE when message has no files."""
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
@ -69,7 +98,7 @@ class TestMessageCycleManagerOptimization:
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
mock_message_file = Mock()
# Current implementation uses session.scalar(select(...))
mock_message_file.belongs_to = "assistant"
mock_session.scalar.return_value = mock_message_file
# Execute: compute event type once, then pass to message_to_stream_response

View File

@ -496,6 +496,9 @@ class TestSchemaResolverClass:
avg_time_no_cache = sum(results1) / len(results1)
# Second run (with cache) - run multiple times
# Warm up cache first
resolve_dify_schema_refs(schema)
results2 = []
for _ in range(3):
start = time.perf_counter()

View File

@ -0,0 +1,211 @@
from __future__ import annotations
from collections.abc import Generator
from dataclasses import dataclass
from typing import Any, cast
from core.app.entities.app_invoke_entities import InvokeFrom
from core.tools.__base.tool import Tool
from core.tools.__base.tool_runtime import ToolRuntime
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_entities import ToolEntity, ToolIdentity, ToolInvokeMessage, ToolProviderType
class DummyCastType:
def cast_value(self, value: Any) -> str:
return f"cast:{value}"
@dataclass
class DummyParameter:
name: str
type: DummyCastType
form: str = "llm"
required: bool = False
default: Any = None
options: list[Any] | None = None
llm_description: str | None = None
class DummyTool(Tool):
def __init__(self, entity: ToolEntity, runtime: ToolRuntime):
super().__init__(entity=entity, runtime=runtime)
self.result: ToolInvokeMessage | list[ToolInvokeMessage] | Generator[ToolInvokeMessage, None, None] = (
self.create_text_message("default")
)
self.runtime_parameter_overrides: list[Any] | None = None
self.last_invocation: dict[str, Any] | None = None
def tool_provider_type(self) -> ToolProviderType:
return ToolProviderType.BUILT_IN
def _invoke(
self,
user_id: str,
tool_parameters: dict[str, Any],
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
) -> ToolInvokeMessage | list[ToolInvokeMessage] | Generator[ToolInvokeMessage, None, None]:
self.last_invocation = {
"user_id": user_id,
"tool_parameters": tool_parameters,
"conversation_id": conversation_id,
"app_id": app_id,
"message_id": message_id,
}
return self.result
def get_runtime_parameters(
self,
conversation_id: str | None = None,
app_id: str | None = None,
message_id: str | None = None,
):
if self.runtime_parameter_overrides is not None:
return self.runtime_parameter_overrides
return super().get_runtime_parameters(
conversation_id=conversation_id,
app_id=app_id,
message_id=message_id,
)
def _build_tool(runtime: ToolRuntime | None = None) -> DummyTool:
entity = ToolEntity(
identity=ToolIdentity(author="test", name="dummy", label=I18nObject(en_US="dummy"), provider="test"),
parameters=[],
description=None,
has_runtime_parameters=False,
)
runtime = runtime or ToolRuntime(tenant_id="tenant-1", invoke_from=InvokeFrom.DEBUGGER, runtime_parameters={})
return DummyTool(entity=entity, runtime=runtime)
def test_invoke_supports_single_message_and_parameter_casting():
runtime = ToolRuntime(
tenant_id="tenant-1",
invoke_from=InvokeFrom.DEBUGGER,
runtime_parameters={"from_runtime": "runtime-value"},
)
tool = _build_tool(runtime)
tool.entity.parameters = cast(
Any,
[
DummyParameter(name="unused", type=DummyCastType()),
DummyParameter(name="age", type=DummyCastType()),
],
)
tool.result = tool.create_text_message("ok")
messages = list(
tool.invoke(
user_id="user-1",
tool_parameters={"age": "18", "raw": "keep"},
conversation_id="conv-1",
app_id="app-1",
message_id="msg-1",
)
)
assert len(messages) == 1
assert messages[0].message.text == "ok"
assert tool.last_invocation == {
"user_id": "user-1",
"tool_parameters": {"age": "cast:18", "raw": "keep", "from_runtime": "runtime-value"},
"conversation_id": "conv-1",
"app_id": "app-1",
"message_id": "msg-1",
}
def test_invoke_supports_list_and_generator_results():
tool = _build_tool()
tool.result = [tool.create_text_message("a"), tool.create_text_message("b")]
list_messages = list(tool.invoke(user_id="user-1", tool_parameters={}))
assert [msg.message.text for msg in list_messages] == ["a", "b"]
def _message_generator() -> Generator[ToolInvokeMessage, None, None]:
yield tool.create_text_message("g1")
yield tool.create_text_message("g2")
tool.result = _message_generator()
generated_messages = list(tool.invoke(user_id="user-2", tool_parameters={}))
assert [msg.message.text for msg in generated_messages] == ["g1", "g2"]
def test_fork_tool_runtime_returns_new_tool_with_copied_entity():
tool = _build_tool()
new_runtime = ToolRuntime(tenant_id="tenant-2", invoke_from=InvokeFrom.EXPLORE, runtime_parameters={})
forked = tool.fork_tool_runtime(new_runtime)
assert isinstance(forked, DummyTool)
assert forked is not tool
assert forked.runtime == new_runtime
assert forked.entity == tool.entity
assert forked.entity is not tool.entity
def test_get_runtime_parameters_and_merge_runtime_parameters():
tool = _build_tool()
original = DummyParameter(name="temperature", type=DummyCastType(), form="schema", required=True, default="0.7")
tool.entity.parameters = cast(Any, [original])
default_runtime_parameters = tool.get_runtime_parameters()
assert default_runtime_parameters == [original]
override = DummyParameter(name="temperature", type=DummyCastType(), form="llm", required=False, default="0.5")
appended = DummyParameter(name="new_param", type=DummyCastType(), form="form", required=False, default="x")
tool.runtime_parameter_overrides = [override, appended]
merged = tool.get_merged_runtime_parameters()
assert len(merged) == 2
assert merged[0].name == "temperature"
assert merged[0].form == "llm"
assert merged[0].required is False
assert merged[0].default == "0.5"
assert merged[1].name == "new_param"
def test_message_factory_helpers():
tool = _build_tool()
image_message = tool.create_image_message("https://example.com/image.png")
assert image_message.type == ToolInvokeMessage.MessageType.IMAGE
assert image_message.message.text == "https://example.com/image.png"
file_obj = object()
file_message = tool.create_file_message(file_obj) # type: ignore[arg-type]
assert file_message.type == ToolInvokeMessage.MessageType.FILE
assert file_message.message.file_marker == "file_marker"
assert file_message.meta == {"file": file_obj}
link_message = tool.create_link_message("https://example.com")
assert link_message.type == ToolInvokeMessage.MessageType.LINK
assert link_message.message.text == "https://example.com"
text_message = tool.create_text_message("hello")
assert text_message.type == ToolInvokeMessage.MessageType.TEXT
assert text_message.message.text == "hello"
blob_message = tool.create_blob_message(b"blob", meta={"source": "unit-test"})
assert blob_message.type == ToolInvokeMessage.MessageType.BLOB
assert blob_message.message.blob == b"blob"
assert blob_message.meta == {"source": "unit-test"}
json_message = tool.create_json_message({"k": "v"}, suppress_output=True)
assert json_message.type == ToolInvokeMessage.MessageType.JSON
assert json_message.message.json_object == {"k": "v"}
assert json_message.message.suppress_output is True
variable_message = tool.create_variable_message("answer", 42, stream=False)
assert variable_message.type == ToolInvokeMessage.MessageType.VARIABLE
assert variable_message.message.variable_name == "answer"
assert variable_message.message.variable_value == 42
assert variable_message.message.stream is False
def test_base_abstract_invoke_placeholder_returns_none():
tool = _build_tool()
assert Tool._invoke(tool, user_id="u", tool_parameters={}) is None

View File

@ -255,6 +255,32 @@ def test_create_variable_message():
assert message.message.stream is False
def test_create_file_message_should_include_file_marker():
entity = ToolEntity(
identity=ToolIdentity(author="test", name="test tool", label=I18nObject(en_US="test tool"), provider="test"),
parameters=[],
description=None,
has_runtime_parameters=False,
)
runtime = ToolRuntime(tenant_id="test_tool", invoke_from=InvokeFrom.EXPLORE)
tool = WorkflowTool(
workflow_app_id="",
workflow_as_tool_id="",
version="1",
workflow_entities={},
workflow_call_depth=1,
entity=entity,
runtime=runtime,
)
file_obj = object()
message = tool.create_file_message(file_obj) # type: ignore[arg-type]
assert message.type == ToolInvokeMessage.MessageType.FILE
assert message.message.file_marker == "file_marker"
assert message.meta == {"file": file_obj}
def test_resolve_user_from_database_falls_back_to_end_user(monkeypatch: pytest.MonkeyPatch):
"""Ensure worker context can resolve EndUser when Account is missing."""

View File

@ -198,6 +198,15 @@ class SubscriptionTestCase:
description: str = ""
class FakeRedisClient:
"""Minimal fake Redis client for unit tests."""
def __init__(self) -> None:
self.publish = MagicMock()
self.spublish = MagicMock()
self.pubsub = MagicMock(return_value=MagicMock())
class TestRedisSubscription:
"""Test cases for the _RedisSubscription class."""
@ -619,10 +628,13 @@ class TestRedisSubscription:
class TestRedisShardedSubscription:
"""Test cases for the _RedisShardedSubscription class."""
@pytest.fixture(autouse=True)
def patch_sharded_redis_type(self, monkeypatch):
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
@pytest.fixture
def mock_redis_client(self) -> MagicMock:
client = MagicMock()
return client
def mock_redis_client(self) -> FakeRedisClient:
return FakeRedisClient()
@pytest.fixture
def mock_pubsub(self) -> MagicMock:
@ -636,7 +648,7 @@ class TestRedisShardedSubscription:
@pytest.fixture
def sharded_subscription(
self, mock_pubsub: MagicMock, mock_redis_client: MagicMock
self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
) -> Generator[_RedisShardedSubscription, None, None]:
"""Create a _RedisShardedSubscription instance for testing."""
subscription = _RedisShardedSubscription(
@ -657,7 +669,7 @@ class TestRedisShardedSubscription:
# ==================== Lifecycle Tests ====================
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
"""Test that sharded subscription is properly initialized."""
subscription = _RedisShardedSubscription(
client=mock_redis_client,
@ -970,7 +982,7 @@ class TestRedisShardedSubscription:
],
)
def test_sharded_subscription_scenarios(
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: MagicMock
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
):
"""Test various sharded subscription scenarios using table-driven approach."""
subscription = _RedisShardedSubscription(
@ -1058,7 +1070,7 @@ class TestRedisShardedSubscription:
# Close should still work
sharded_subscription.close() # Should not raise
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
"""Test various sharded channel name formats."""
channel_names = [
"simple",
@ -1120,10 +1132,13 @@ class TestRedisSubscriptionCommon:
"""Parameterized fixture providing subscription type and class."""
return request.param
@pytest.fixture(autouse=True)
def patch_sharded_redis_type(self, monkeypatch):
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
@pytest.fixture
def mock_redis_client(self) -> MagicMock:
client = MagicMock()
return client
def mock_redis_client(self) -> FakeRedisClient:
return FakeRedisClient()
@pytest.fixture
def mock_pubsub(self) -> MagicMock:
@ -1140,7 +1155,7 @@ class TestRedisSubscriptionCommon:
return pubsub
@pytest.fixture
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
"""Create a subscription instance based on parameterized type."""
subscription_type, subscription_class = subscription_params
topic_name = f"test-{subscription_type}-topic"

View File

@ -698,6 +698,132 @@ class TestTenantService:
self._assert_database_operations_called(mock_db_dependencies["db"])
# ==================== Member Removal Tests ====================
def test_remove_pending_member_deletes_orphaned_account(self):
"""Test that removing a pending member with no other workspaces deletes the account."""
# Arrange
mock_tenant = MagicMock()
mock_tenant.id = "tenant-456"
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
mock_pending_member = TestAccountAssociatedDataFactory.create_account_mock(
account_id="pending-user-789", email="pending@example.com", status=AccountStatus.PENDING
)
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="pending-user-789", role="normal"
)
with patch("services.account_service.db") as mock_db:
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
query_mock_permission = MagicMock()
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
query_mock_ta = MagicMock()
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
query_mock_count = MagicMock()
query_mock_count.filter_by.return_value.count.return_value = 0
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
# Act
TenantService.remove_member_from_tenant(mock_tenant, mock_pending_member, mock_operator)
# Assert: enterprise sync still receives the correct member ID
mock_sync.assert_called_once_with(
workspace_id="tenant-456",
member_id="pending-user-789",
source="workspace_member_removed",
)
# Assert: both join record and account should be deleted
mock_db.session.delete.assert_any_call(mock_ta)
mock_db.session.delete.assert_any_call(mock_pending_member)
assert mock_db.session.delete.call_count == 2
def test_remove_pending_member_keeps_account_with_other_workspaces(self):
"""Test that removing a pending member who belongs to other workspaces preserves the account."""
# Arrange
mock_tenant = MagicMock()
mock_tenant.id = "tenant-456"
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
mock_pending_member = TestAccountAssociatedDataFactory.create_account_mock(
account_id="pending-user-789", email="pending@example.com", status=AccountStatus.PENDING
)
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="pending-user-789", role="normal"
)
with patch("services.account_service.db") as mock_db:
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
query_mock_permission = MagicMock()
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
query_mock_ta = MagicMock()
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
# Remaining join count = 1 (still in another workspace)
query_mock_count = MagicMock()
query_mock_count.filter_by.return_value.count.return_value = 1
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta, query_mock_count]
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
# Act
TenantService.remove_member_from_tenant(mock_tenant, mock_pending_member, mock_operator)
# Assert: only the join record should be deleted, not the account
mock_db.session.delete.assert_called_once_with(mock_ta)
def test_remove_active_member_preserves_account(self):
"""Test that removing an active member never deletes the account, even with no other workspaces."""
# Arrange
mock_tenant = MagicMock()
mock_tenant.id = "tenant-456"
mock_operator = TestAccountAssociatedDataFactory.create_account_mock(account_id="operator-123", role="owner")
mock_active_member = TestAccountAssociatedDataFactory.create_account_mock(
account_id="active-user-789", email="active@example.com", status=AccountStatus.ACTIVE
)
mock_ta = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="active-user-789", role="normal"
)
with patch("services.account_service.db") as mock_db:
mock_operator_join = TestAccountAssociatedDataFactory.create_tenant_join_mock(
tenant_id="tenant-456", account_id="operator-123", role="owner"
)
query_mock_permission = MagicMock()
query_mock_permission.filter_by.return_value.first.return_value = mock_operator_join
query_mock_ta = MagicMock()
query_mock_ta.filter_by.return_value.first.return_value = mock_ta
mock_db.session.query.side_effect = [query_mock_permission, query_mock_ta]
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
# Act
TenantService.remove_member_from_tenant(mock_tenant, mock_active_member, mock_operator)
# Assert: only the join record should be deleted
mock_db.session.delete.assert_called_once_with(mock_ta)
# ==================== Tenant Switching Tests ====================
def test_switch_tenant_success(self):

View File

@ -17,7 +17,6 @@ from core.workflow.nodes.human_input.entities import (
from core.workflow.nodes.human_input.enums import FormInputType, HumanInputFormKind, HumanInputFormStatus
from models.human_input import RecipientType
from services.human_input_service import Form, FormExpiredError, HumanInputService, InvalidFormDataError
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE
@pytest.fixture
@ -88,7 +87,6 @@ def test_enqueue_resume_dispatches_task_for_workflow(mocker, mock_session_factor
resume_task.apply_async.assert_called_once()
call_kwargs = resume_task.apply_async.call_args.kwargs
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"
@ -130,7 +128,6 @@ def test_enqueue_resume_dispatches_task_for_advanced_chat(mocker, mock_session_f
resume_task.apply_async.assert_called_once()
call_kwargs = resume_task.apply_async.call_args.kwargs
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"

84
api/uv.lock generated
View File

@ -1237,49 +1237,47 @@ wheels = [
[[package]]
name = "cryptography"
version = "46.0.3"
version = "46.0.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
{ url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
{ url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
{ url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
{ url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
{ url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
{ url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
{ url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
{ url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
{ url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
{ url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
{ url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
{ url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
{ url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
{ url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
{ url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
{ url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
{ url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
{ url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
{ url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
{ url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
{ url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
{ url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
{ url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
{ url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
{ url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
{ url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
{ url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
{ url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
{ url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
{ url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" },
{ url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" },
{ url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" },
{ url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" },
{ url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" },
{ url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" },
{ url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
{ url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
{ url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
{ url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
{ url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
{ url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
{ url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
{ url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
{ url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
{ url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
{ url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
{ url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
{ url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
{ url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
{ url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" },
{ url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" },
{ url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" },
{ url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" },
{ url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" },
{ url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" },
]
[[package]]
@ -1368,7 +1366,7 @@ wheels = [
[[package]]
name = "dify-api"
version = "1.12.1"
version = "1.13.0"
source = { virtual = "." }
dependencies = [
{ name = "aliyun-log-python-sdk" },
@ -1594,7 +1592,7 @@ requires-dist = [
{ name = "gevent", specifier = "~=25.9.1" },
{ name = "gmpy2", specifier = "~=2.2.1" },
{ name = "google-api-core", specifier = "==2.18.0" },
{ name = "google-api-python-client", specifier = "==2.90.0" },
{ name = "google-api-python-client", specifier = "==2.189.0" },
{ name = "google-auth", specifier = "==2.29.0" },
{ name = "google-auth-httplib2", specifier = "==0.2.0" },
{ name = "google-cloud-aiplatform", specifier = "==1.49.0" },
@ -2306,7 +2304,7 @@ grpc = [
[[package]]
name = "google-api-python-client"
version = "2.90.0"
version = "2.189.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "google-api-core" },
@ -2315,9 +2313,9 @@ dependencies = [
{ name = "httplib2" },
{ name = "uritemplate" },
]
sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311, upload-time = "2023-06-20T16:29:25.008Z" }
sdist = { url = "https://files.pythonhosted.org/packages/6f/f8/0783aeca3410ee053d4dd1fccafd85197847b8f84dd038e036634605d083/google_api_python_client-2.189.0.tar.gz", hash = "sha256:45f2d8559b5c895dde6ad3fb33de025f5cb2c197fa5862f18df7f5295a172741", size = 13979470, upload-time = "2026-02-03T19:24:55.432Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891, upload-time = "2023-06-20T16:29:19.532Z" },
{ url = "https://files.pythonhosted.org/packages/04/44/3677ff27998214f2fa7957359da48da378a0ffff1bd0bdaba42e752bc13e/google_api_python_client-2.189.0-py3-none-any.whl", hash = "sha256:a258c09660a49c6159173f8bbece171278e917e104a11f0640b34751b79c8a1a", size = 14547633, upload-time = "2026-02-03T19:24:52.845Z" },
]
[[package]]

View File

@ -106,10 +106,10 @@ if [[ -z "${QUEUES}" ]]; then
# Configure queues based on edition
if [[ "${EDITION}" == "CLOUD" ]]; then
# Cloud edition: separate queues for dataset and trigger tasks
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
else
# Community edition (SELF_HOSTED): dataset and workflow have separate queues
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
fi
echo "No queues specified, using edition-based defaults: ${QUEUES}"

View File

@ -62,6 +62,9 @@ LANG=C.UTF-8
LC_ALL=C.UTF-8
PYTHONIOENCODING=utf-8
# Set UV cache directory to avoid permission issues with non-existent home directory
UV_CACHE_DIR=/tmp/.uv-cache
# ------------------------------
# Server Configuration
# ------------------------------
@ -384,6 +387,8 @@ CELERY_USE_SENTINEL=false
CELERY_SENTINEL_MASTER_NAME=
CELERY_SENTINEL_PASSWORD=
CELERY_SENTINEL_SOCKET_TIMEOUT=0.1
# e.g. {"tasks.add": {"rate_limit": "10/s"}}
CELERY_TASK_ANNOTATIONS=null
# ------------------------------
# CORS Configuration
@ -1518,6 +1523,7 @@ AMPLITUDE_API_KEY=
# Sandbox expired records clean configuration
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30

View File

@ -21,7 +21,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.13.0
restart: always
environment:
# Use the shared environment variables.
@ -63,7 +63,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.13.0
restart: always
environment:
# Use the shared environment variables.
@ -102,7 +102,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.13.0
restart: always
environment:
# Use the shared environment variables.
@ -132,7 +132,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.12.1
image: langgenius/dify-web:1.13.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@ -16,6 +16,7 @@ x-shared-env: &shared-api-worker-env
LANG: ${LANG:-C.UTF-8}
LC_ALL: ${LC_ALL:-C.UTF-8}
PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8}
UV_CACHE_DIR: ${UV_CACHE_DIR:-/tmp/.uv-cache}
LOG_LEVEL: ${LOG_LEVEL:-INFO}
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
LOG_FILE: ${LOG_FILE:-/app/logs/server.log}
@ -105,6 +106,7 @@ x-shared-env: &shared-api-worker-env
CELERY_SENTINEL_MASTER_NAME: ${CELERY_SENTINEL_MASTER_NAME:-}
CELERY_SENTINEL_PASSWORD: ${CELERY_SENTINEL_PASSWORD:-}
CELERY_SENTINEL_SOCKET_TIMEOUT: ${CELERY_SENTINEL_SOCKET_TIMEOUT:-0.1}
CELERY_TASK_ANNOTATIONS: ${CELERY_TASK_ANNOTATIONS:-null}
WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*}
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
@ -682,6 +684,7 @@ x-shared-env: &shared-api-worker-env
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21}
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000}
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200}
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30}
PUBSUB_REDIS_URL: ${PUBSUB_REDIS_URL:-}
PUBSUB_REDIS_CHANNEL_TYPE: ${PUBSUB_REDIS_CHANNEL_TYPE:-pubsub}
@ -712,7 +715,7 @@ services:
# API service
api:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.13.0
restart: always
environment:
# Use the shared environment variables.
@ -754,7 +757,7 @@ services:
# worker service
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
worker:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.13.0
restart: always
environment:
# Use the shared environment variables.
@ -793,7 +796,7 @@ services:
# worker_beat service
# Celery beat for scheduling periodic tasks.
worker_beat:
image: langgenius/dify-api:1.12.1
image: langgenius/dify-api:1.13.0
restart: always
environment:
# Use the shared environment variables.
@ -823,7 +826,7 @@ services:
# Frontend web application.
web:
image: langgenius/dify-web:1.12.1
image: langgenius/dify-web:1.13.0
restart: always
environment:
CONSOLE_API_URL: ${CONSOLE_API_URL:-}

View File

@ -10,7 +10,7 @@ importers:
dependencies:
axios:
specifier: ^1.13.2
version: 1.13.2
version: 1.13.5
devDependencies:
'@eslint/js':
specifier: ^9.39.2
@ -544,8 +544,8 @@ packages:
asynckit@0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
axios@1.13.2:
resolution: {integrity: sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==}
axios@1.13.5:
resolution: {integrity: sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==}
balanced-match@1.0.2:
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
@ -1677,7 +1677,7 @@ snapshots:
asynckit@0.4.0: {}
axios@1.13.2:
axios@1.13.5:
dependencies:
follow-redirects: 1.15.11
form-data: 4.0.5

View File

@ -1,261 +0,0 @@
/**
* MAX_PARALLEL_LIMIT Configuration Bug Test
*
* This test reproduces and verifies the fix for issue #23083:
* MAX_PARALLEL_LIMIT environment variable does not take effect in iteration panel
*/
import { render, screen } from '@testing-library/react'
import * as React from 'react'
// Mock environment variables before importing constants
const originalEnv = process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
// Test with different environment values
function setupEnvironment(value?: string) {
if (value)
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = value
else
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
// Clear module cache to force re-evaluation
vi.resetModules()
}
function restoreEnvironment() {
if (originalEnv)
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = originalEnv
else
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
vi.resetModules()
}
// Mock i18next with proper implementation
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => {
if (key.includes('MaxParallelismTitle'))
return 'Max Parallelism'
if (key.includes('MaxParallelismDesc'))
return 'Maximum number of parallel executions'
if (key.includes('parallelMode'))
return 'Parallel Mode'
if (key.includes('parallelPanelDesc'))
return 'Enable parallel execution'
if (key.includes('errorResponseMethod'))
return 'Error Response Method'
return key
},
}),
initReactI18next: {
type: '3rdParty',
init: vi.fn(),
},
}))
// Mock i18next module completely to prevent initialization issues
vi.mock('i18next', () => ({
use: vi.fn().mockReturnThis(),
init: vi.fn().mockReturnThis(),
t: vi.fn(key => key),
isInitialized: true,
}))
// Mock the useConfig hook
vi.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({
default: () => ({
inputs: {
is_parallel: true,
parallel_nums: 5,
error_handle_mode: 'terminated',
},
changeParallel: vi.fn(),
changeParallelNums: vi.fn(),
changeErrorHandleMode: vi.fn(),
}),
}))
// Mock other components
vi.mock('@/app/components/workflow/nodes/_base/components/variable/var-reference-picker', () => ({
default: function MockVarReferencePicker() {
return <div data-testid="var-reference-picker">VarReferencePicker</div>
},
}))
vi.mock('@/app/components/workflow/nodes/_base/components/split', () => ({
default: function MockSplit() {
return <div data-testid="split">Split</div>
},
}))
vi.mock('@/app/components/workflow/nodes/_base/components/field', () => ({
default: function MockField({ title, children }: { title: string, children: React.ReactNode }) {
return (
<div data-testid="field">
<label>{title}</label>
{children}
</div>
)
},
}))
const getParallelControls = () => ({
numberInput: screen.getByRole('spinbutton'),
slider: screen.getByRole('slider'),
})
describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
const mockNodeData = {
id: 'test-iteration-node',
type: 'iteration' as const,
data: {
title: 'Test Iteration',
desc: 'Test iteration node',
iterator_selector: ['test'],
output_selector: ['output'],
is_parallel: true,
parallel_nums: 5,
error_handle_mode: 'terminated' as const,
},
}
beforeEach(() => {
vi.clearAllMocks()
})
afterEach(() => {
restoreEnvironment()
})
afterAll(() => {
restoreEnvironment()
})
describe('Environment Variable Parsing', () => {
it('should parse MAX_PARALLEL_LIMIT from NEXT_PUBLIC_MAX_PARALLEL_LIMIT environment variable', async () => {
setupEnvironment('25')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
expect(MAX_PARALLEL_LIMIT).toBe(25)
})
it('should fallback to default when environment variable is not set', async () => {
setupEnvironment() // No environment variable
const { MAX_PARALLEL_LIMIT } = await import('@/config')
expect(MAX_PARALLEL_LIMIT).toBe(10)
})
it('should handle invalid environment variable values', async () => {
setupEnvironment('invalid')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
// Should fall back to default when parsing fails
expect(MAX_PARALLEL_LIMIT).toBe(10)
})
it('should handle empty environment variable', async () => {
setupEnvironment('')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
// Should fall back to default when empty
expect(MAX_PARALLEL_LIMIT).toBe(10)
})
// Edge cases for boundary values
it('should clamp MAX_PARALLEL_LIMIT to MIN when env is 0 or negative', async () => {
setupEnvironment('0')
let { MAX_PARALLEL_LIMIT } = await import('@/config')
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
setupEnvironment('-5')
;({ MAX_PARALLEL_LIMIT } = await import('@/config'))
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
})
it('should handle float numbers by parseInt behavior', async () => {
setupEnvironment('12.7')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
// parseInt truncates to integer
expect(MAX_PARALLEL_LIMIT).toBe(12)
})
})
describe('UI Component Integration (Main Fix Verification)', () => {
it('should render iteration panel with environment-configured max value', async () => {
// Set environment variable to a different value
setupEnvironment('30')
// Import Panel after setting environment
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
const { MAX_PARALLEL_LIMIT } = await import('@/config')
render(
<Panel
id="test-node"
// @ts-expect-error key type mismatch
data={mockNodeData.data}
/>,
)
// Behavior-focused assertion: UI max should equal MAX_PARALLEL_LIMIT
const { numberInput, slider } = getParallelControls()
expect(numberInput).toHaveAttribute('max', String(MAX_PARALLEL_LIMIT))
expect(slider).toHaveAttribute('aria-valuemax', String(MAX_PARALLEL_LIMIT))
// Verify the actual values
expect(MAX_PARALLEL_LIMIT).toBe(30)
expect(numberInput.getAttribute('max')).toBe('30')
expect(slider.getAttribute('aria-valuemax')).toBe('30')
})
it('should maintain UI consistency with different environment values', async () => {
setupEnvironment('15')
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
const { MAX_PARALLEL_LIMIT } = await import('@/config')
render(
<Panel
id="test-node"
// @ts-expect-error key type mismatch
data={mockNodeData.data}
/>,
)
// Both input and slider should use the same max value from MAX_PARALLEL_LIMIT
const { numberInput, slider } = getParallelControls()
expect(numberInput.getAttribute('max')).toBe(slider.getAttribute('aria-valuemax'))
expect(numberInput.getAttribute('max')).toBe(String(MAX_PARALLEL_LIMIT))
})
})
describe('Legacy Constant Verification (For Transition Period)', () => {
// Marked as transition/deprecation tests
it('should maintain MAX_ITERATION_PARALLEL_NUM for backward compatibility', async () => {
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
expect(typeof MAX_ITERATION_PARALLEL_NUM).toBe('number')
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10) // Hardcoded legacy value
})
it('should demonstrate MAX_PARALLEL_LIMIT vs legacy constant difference', async () => {
setupEnvironment('50')
const { MAX_PARALLEL_LIMIT } = await import('@/config')
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
// MAX_PARALLEL_LIMIT is configurable, MAX_ITERATION_PARALLEL_NUM is not
expect(MAX_PARALLEL_LIMIT).toBe(50)
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10)
expect(MAX_PARALLEL_LIMIT).not.toBe(MAX_ITERATION_PARALLEL_NUM)
})
})
describe('Constants Validation', () => {
it('should validate that required constants exist and have correct types', async () => {
const { MAX_PARALLEL_LIMIT } = await import('@/config')
const { MIN_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
expect(typeof MAX_PARALLEL_LIMIT).toBe('number')
expect(typeof MIN_ITERATION_PARALLEL_NUM).toBe('number')
expect(MAX_PARALLEL_LIMIT).toBeGreaterThanOrEqual(MIN_ITERATION_PARALLEL_NUM)
})
})
})

View File

@ -3,7 +3,6 @@ import type { CSSProperties, ReactNode } from 'react'
import { cva } from 'class-variance-authority'
import * as React from 'react'
import { cn } from '@/utils/classnames'
import './index.css'
enum BadgeState {
Warning = 'warning',

View File

@ -8,6 +8,7 @@ import { UserActionButtonType } from '@/app/components/workflow/nodes/human-inpu
import 'dayjs/locale/en'
import 'dayjs/locale/zh-cn'
import 'dayjs/locale/ja'
import 'dayjs/locale/nl'
dayjs.extend(utc)
dayjs.extend(relativeTime)
@ -45,6 +46,7 @@ const localeMap: Record<string, string> = {
'en-US': 'en',
'zh-Hans': 'zh-cn',
'ja-JP': 'ja',
'nl-NL': 'nl',
}
export const getRelativeTime = (

View File

@ -98,7 +98,9 @@ const VoiceParamConfig = ({
className="h-full w-full cursor-pointer rounded-lg border-0 bg-components-input-bg-normal py-1.5 pl-3 pr-10 focus-visible:bg-state-base-hover focus-visible:outline-none group-hover:bg-state-base-hover sm:text-sm sm:leading-6"
>
<span className={cn('block truncate text-left text-text-secondary', !languageItem?.name && 'text-text-tertiary')}>
{languageItem?.name ? t(`voice.language.${replace(languageItem?.value, '-', '')}`, { ns: 'common' }) : localLanguagePlaceholder}
{languageItem?.name
? t(`voice.language.${replace(languageItem?.value ?? '', '-', '')}`, languageItem?.name, { ns: 'common' as const })
: localLanguagePlaceholder}
</span>
<span className="pointer-events-none absolute inset-y-0 right-0 flex items-center pr-2">
<ChevronDownIcon
@ -129,7 +131,7 @@ const VoiceParamConfig = ({
<span
className={cn('block', selected && 'font-normal')}
>
{t(`voice.language.${replace((item.value), '-', '')}`, { ns: 'common' })}
{t(`voice.language.${replace((item.value), '-', '')}`, item.name, { ns: 'common' as const })}
</span>
{(selected || item.value === text2speech?.language) && (
<span

View File

@ -1,5 +1,5 @@
import type { RemixiconComponentType } from '@remixicon/react'
import { z } from 'zod'
import * as z from 'zod'
export const InputTypeEnum = z.enum([
'text-input',

View File

@ -1,6 +1,6 @@
import type { ZodNumber, ZodSchema, ZodString } from 'zod'
import type { BaseConfiguration } from './types'
import { z } from 'zod'
import * as z from 'zod'
import { BaseFieldType } from './types'
export const generateZodSchema = (fields: BaseConfiguration[]) => {

View File

@ -1,4 +1,4 @@
import { z } from 'zod'
import * as z from 'zod'
const ContactMethod = z.union([
z.literal('email'),
@ -22,10 +22,10 @@ export const UserSchema = z.object({
.min(3, 'Surname must be at least 3 characters long')
.regex(/^[A-Z]/, 'Surname must start with a capital letter'),
isAcceptingTerms: z.boolean().refine(val => val, {
message: 'You must accept the terms and conditions',
error: 'You must accept the terms and conditions',
}),
contact: z.object({
email: z.string().email('Invalid email address'),
email: z.email('Invalid email address'),
phone: z.string().optional(),
preferredContactMethod: ContactMethod,
}),

View File

@ -1,6 +1,6 @@
import type { ZodSchema, ZodString } from 'zod'
import type { InputFieldConfiguration } from './types'
import { z } from 'zod'
import * as z from 'zod'
import { SupportedFileTypes, TransferMethod } from '@/app/components/rag-pipeline/components/panel/input-field/editor/form/schema'
import { InputFieldType } from './types'

View File

@ -204,23 +204,10 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
}
}
catch {
try {
// eslint-disable-next-line no-new-func
const result = new Function(`return ${trimmedContent}`)()
if (typeof result === 'object' && result !== null) {
setFinalChartOption(result)
setChartState('success')
processedRef.current = true
return
}
}
catch {
// If we have a complete JSON structure but it doesn't parse,
// it's likely an error rather than incomplete data
setChartState('error')
processedRef.current = true
return
}
// Avoid executing arbitrary code; require valid JSON for chart options.
setChartState('error')
processedRef.current = true
return
}
}
@ -249,19 +236,9 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
}
}
catch {
try {
// eslint-disable-next-line no-new-func
const result = new Function(`return ${trimmedContent}`)()
if (typeof result === 'object' && result !== null) {
setFinalChartOption(result)
isValidOption = true
}
}
catch {
// Both parsing methods failed, but content looks complete
setChartState('error')
processedRef.current = true
}
// Only accept JSON to avoid executing arbitrary code from the message.
setChartState('error')
processedRef.current = true
}
if (isValidOption) {

View File

@ -2,6 +2,7 @@
import type { FC } from 'react'
import * as React from 'react'
import { useTranslation } from 'react-i18next'
import { env } from '@/env'
import ParamItem from '.'
type Props = {
@ -11,12 +12,7 @@ type Props = {
enable: boolean
}
const maxTopK = (() => {
const configValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-top-k-max-value') || '', 10)
if (configValue && !isNaN(configValue))
return configValue
return 10
})()
const maxTopK = env.NEXT_PUBLIC_TOP_K_MAX_VALUE
const VALUE_LIMIT = {
default: 2,
step: 1,

View File

@ -4,7 +4,6 @@ import { cva } from 'class-variance-authority'
import * as React from 'react'
import { Highlight } from '@/app/components/base/icons/src/public/common'
import { cn } from '@/utils/classnames'
import './index.css'
const PremiumBadgeVariants = cva(
'premium-badge',

View File

@ -1,6 +1,6 @@
import { render, screen } from '@testing-library/react'
import { noop } from 'es-toolkit/function'
import { z } from 'zod'
import * as z from 'zod'
import withValidation from '.'
describe('withValidation HOC', () => {

View File

@ -1,5 +1,5 @@
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import { z } from 'zod'
import * as z from 'zod'
import withValidation from '.'
// Sample components to wrap with validation
@ -65,7 +65,7 @@ const ProductCard = ({ name, price, category, inStock }: ProductCardProps) => {
// Create validated versions
const userSchema = z.object({
name: z.string().min(1, 'Name is required'),
email: z.string().email('Invalid email'),
email: z.email('Invalid email'),
age: z.number().min(0).max(150),
})
@ -371,7 +371,7 @@ export const ConfigurationValidation: Story = {
)
const configSchema = z.object({
apiUrl: z.string().url('Must be valid URL'),
apiUrl: z.url('Must be valid URL'),
timeout: z.number().min(0).max(30000),
retries: z.number().min(0).max(5),
debug: z.boolean(),
@ -430,7 +430,7 @@ export const UsageDocumentation: Story = {
<div>
<h4 className="mb-2 text-sm font-semibold text-gray-900">Usage Example</h4>
<pre className="overflow-x-auto rounded-lg bg-gray-900 p-4 text-xs text-gray-100">
{`import { z } from 'zod'
{`import * as z from 'zod'
import withValidation from './withValidation'
// Define your component

View File

@ -5,6 +5,7 @@ import { useTranslation } from 'react-i18next'
import Input from '@/app/components/base/input'
import { InputNumber } from '@/app/components/base/input-number'
import Tooltip from '@/app/components/base/tooltip'
import { env } from '@/env'
const TextLabel: FC<PropsWithChildren> = (props) => {
return <label className="text-xs font-semibold leading-none text-text-secondary">{props.children}</label>
@ -46,7 +47,7 @@ export const DelimiterInput: FC<InputProps & { tooltip?: string }> = (props) =>
}
export const MaxLengthInput: FC<InputNumberProps> = (props) => {
const maxValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000', 10)
const maxValue = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
const { t } = useTranslation()
return (

View File

@ -1,5 +1,6 @@
import type { ParentMode, PreProcessingRule, ProcessRule, Rules, SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
import { useCallback, useRef, useState } from 'react'
import { env } from '@/env'
import { ChunkingMode, ProcessMode } from '@/models/datasets'
import escape from './escape'
import unescape from './unescape'
@ -8,10 +9,7 @@ import unescape from './unescape'
export const DEFAULT_SEGMENT_IDENTIFIER = '\\n\\n'
export const DEFAULT_MAXIMUM_CHUNK_LENGTH = 1024
export const DEFAULT_OVERLAP = 50
export const MAXIMUM_CHUNK_TOKEN_LENGTH = Number.parseInt(
globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000',
10,
)
export const MAXIMUM_CHUNK_TOKEN_LENGTH = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
export type ParentChildConfig = {
chunkForContext: ParentMode

View File

@ -1,7 +1,7 @@
import type { BaseConfiguration } from '@/app/components/base/form/form-scenarios/base/types'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import * as React from 'react'
import { z } from 'zod'
import * as z from 'zod'
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
import Toast from '@/app/components/base/toast'
import Actions from './actions'
@ -53,7 +53,7 @@ const createFailingSchema = () => {
issues: [{ path: ['field1'], message: 'is required' }],
},
}),
} as unknown as z.ZodSchema
} as unknown as z.ZodType
}
// ==========================================

View File

@ -0,0 +1,129 @@
'use client'
import type { FC } from 'react'
import type { DocType } from '@/models/datasets'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import Radio from '@/app/components/base/radio'
import Tooltip from '@/app/components/base/tooltip'
import { useMetadataMap } from '@/hooks/use-metadata'
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
import { cn } from '@/utils/classnames'
import s from '../style.module.css'
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
return <div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
}
const IconButton: FC<{ type: DocType, isChecked: boolean }> = ({ type, isChecked = false }) => {
const metadataMap = useMetadataMap()
return (
<Tooltip popupContent={metadataMap[type].text}>
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
<TypeIcon
iconName={metadataMap[type].iconName || ''}
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
/>
</button>
</Tooltip>
)
}
type DocTypeSelectorProps = {
docType: DocType | ''
documentType?: DocType | ''
tempDocType: DocType | ''
onTempDocTypeChange: (type: DocType | '') => void
onConfirm: () => void
onCancel: () => void
}
const DocTypeSelector: FC<DocTypeSelectorProps> = ({
docType,
documentType,
tempDocType,
onTempDocTypeChange,
onConfirm,
onCancel,
}) => {
const { t } = useTranslation()
const isFirstTime = !docType && !documentType
const currValue = tempDocType ?? documentType
return (
<>
{isFirstTime && (
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
)}
<div className={s.operationWrapper}>
{isFirstTime && (
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
)}
{documentType && (
<>
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
</>
)}
<Radio.Group value={currValue ?? ''} onChange={onTempDocTypeChange} className={s.radioGroup}>
{CUSTOMIZABLE_DOC_TYPES.map(type => (
<Radio key={type} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
<IconButton type={type} isChecked={currValue === type} />
</Radio>
))}
</Radio.Group>
{isFirstTime && (
<Button variant="primary" onClick={onConfirm} disabled={!tempDocType}>
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
</Button>
)}
{documentType && (
<div className={s.opBtnWrapper}>
<Button onClick={onConfirm} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">
{t('operation.save', { ns: 'common' })}
</Button>
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>
{t('operation.cancel', { ns: 'common' })}
</Button>
</div>
)}
</div>
</>
)
}
type DocumentTypeDisplayProps = {
displayType: DocType | ''
showChangeLink?: boolean
onChangeClick?: () => void
}
export const DocumentTypeDisplay: FC<DocumentTypeDisplayProps> = ({
displayType,
showChangeLink = false,
onChangeClick,
}) => {
const { t } = useTranslation()
const metadataMap = useMetadataMap()
const effectiveType = displayType || 'book'
return (
<div className={s.documentTypeShow}>
{(displayType || !showChangeLink) && (
<>
<TypeIcon iconName={metadataMap[effectiveType]?.iconName || ''} className={s.iconShow} />
{metadataMap[effectiveType].text}
{showChangeLink && (
<div className="ml-1 inline-flex items-center gap-1">
·
<div onClick={onChangeClick} className="cursor-pointer hover:text-text-accent">
{t('operation.change', { ns: 'common' })}
</div>
</div>
)}
</>
)}
</div>
)
}
export default DocTypeSelector

View File

@ -0,0 +1,89 @@
'use client'
import type { FC, ReactNode } from 'react'
import type { inputType } from '@/hooks/use-metadata'
import { useTranslation } from 'react-i18next'
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
import Input from '@/app/components/base/input'
import { SimpleSelect } from '@/app/components/base/select'
import { getTextWidthWithCanvas } from '@/utils'
import { cn } from '@/utils/classnames'
import s from '../style.module.css'
type FieldInfoProps = {
label: string
value?: string
valueIcon?: ReactNode
displayedValue?: string
defaultValue?: string
showEdit?: boolean
inputType?: inputType
selectOptions?: Array<{ value: string, name: string }>
onUpdate?: (v: string) => void
}
const FieldInfo: FC<FieldInfoProps> = ({
label,
value = '',
valueIcon,
displayedValue = '',
defaultValue,
showEdit = false,
inputType = 'input',
selectOptions = [],
onUpdate,
}) => {
const { t } = useTranslation()
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
const editAlignTop = showEdit && inputType === 'textarea'
const readAlignTop = !showEdit && textNeedWrap
const renderContent = () => {
if (!showEdit)
return displayedValue
if (inputType === 'select') {
return (
<SimpleSelect
onSelect={({ value }) => onUpdate?.(value as string)}
items={selectOptions}
defaultValue={value}
className={s.select}
wrapperClassName={s.selectWrapper}
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
if (inputType === 'textarea') {
return (
<AutoHeightTextarea
onChange={e => onUpdate?.(e.target.value)}
value={value}
className={s.textArea}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<Input
onChange={e => onUpdate?.(e.target.value)}
value={value}
defaultValue={defaultValue}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
<div className="flex grow items-center gap-1 text-text-secondary">
{valueIcon}
{renderContent()}
</div>
</div>
)
}
export default FieldInfo

View File

@ -0,0 +1,88 @@
'use client'
import type { FC } from 'react'
import type { metadataType } from '@/hooks/use-metadata'
import type { FullDocumentDetail } from '@/models/datasets'
import { get } from 'es-toolkit/compat'
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
import FieldInfo from './field-info'
const map2Options = (map: Record<string, string>) => {
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
}
function useCategoryMapResolver(mainField: metadataType | '') {
const languageMap = useLanguages()
const bookCategoryMap = useBookCategories()
const personalDocCategoryMap = usePersonalDocCategories()
const businessDocCategoryMap = useBusinessDocCategories()
return (field: string): Record<string, string> => {
if (field === 'language')
return languageMap
if (field === 'category' && mainField === 'book')
return bookCategoryMap
if (field === 'document_type') {
if (mainField === 'personal_document')
return personalDocCategoryMap
if (mainField === 'business_document')
return businessDocCategoryMap
}
return {}
}
}
type MetadataFieldListProps = {
mainField: metadataType | ''
canEdit?: boolean
metadata?: Record<string, string>
docDetail?: FullDocumentDetail
onFieldUpdate?: (field: string, value: string) => void
}
const MetadataFieldList: FC<MetadataFieldListProps> = ({
mainField,
canEdit = false,
metadata,
docDetail,
onFieldUpdate,
}) => {
const metadataMap = useMetadataMap()
const getCategoryMap = useCategoryMapResolver(mainField)
if (!mainField)
return null
const fieldMap = metadataMap[mainField]?.subFieldsMap
const isFixedField = ['originInfo', 'technicalParameters'].includes(mainField)
const sourceData = isFixedField ? docDetail : metadata
const getDisplayValue = (field: string) => {
const val = get(sourceData, field, '')
if (!val && val !== 0)
return '-'
if (fieldMap[field]?.inputType === 'select')
return getCategoryMap(field)[val]
if (fieldMap[field]?.render)
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
return val
}
return (
<div className="flex flex-col gap-1">
{Object.keys(fieldMap).map(field => (
<FieldInfo
key={fieldMap[field]?.label}
label={fieldMap[field]?.label}
displayedValue={getDisplayValue(field)}
value={get(sourceData, field, '')}
inputType={fieldMap[field]?.inputType || 'input'}
showEdit={canEdit}
onUpdate={val => onFieldUpdate?.(field, val)}
selectOptions={map2Options(getCategoryMap(field))}
/>
))}
</div>
)
}
export default MetadataFieldList

View File

@ -0,0 +1,137 @@
'use client'
import type { CommonResponse } from '@/models/common'
import type { DocType, FullDocumentDetail } from '@/models/datasets'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { ToastContext } from '@/app/components/base/toast'
import { modifyDocMetadata } from '@/service/datasets'
import { asyncRunSafe } from '@/utils'
import { useDocumentContext } from '../../context'
type MetadataState = {
documentType?: DocType | ''
metadata: Record<string, string>
}
/**
* Normalize raw doc_type: treat 'others' as empty string.
*/
const normalizeDocType = (rawDocType: string): DocType | '' => {
return rawDocType === 'others' ? '' : rawDocType as DocType | ''
}
type UseMetadataStateOptions = {
docDetail?: FullDocumentDetail
onUpdate?: () => void
}
export function useMetadataState({ docDetail, onUpdate }: UseMetadataStateOptions) {
const { doc_metadata = {} } = docDetail || {}
const rawDocType = docDetail?.doc_type ?? ''
const docType = normalizeDocType(rawDocType)
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const datasetId = useDocumentContext(s => s.datasetId)
const documentId = useDocumentContext(s => s.documentId)
// If no documentType yet, start in editing + showDocTypes mode
const [editStatus, setEditStatus] = useState(!docType)
const [metadataParams, setMetadataParams] = useState<MetadataState>(
docType
? { documentType: docType, metadata: (doc_metadata || {}) as Record<string, string> }
: { metadata: {} },
)
const [showDocTypes, setShowDocTypes] = useState(!docType)
const [tempDocType, setTempDocType] = useState<DocType | ''>('')
const [saveLoading, setSaveLoading] = useState(false)
// Sync local state when the upstream docDetail changes (e.g. after save or navigation).
// These setters are intentionally called together to batch-reset multiple pieces
// of derived editing state that cannot be expressed as pure derived values.
useEffect(() => {
if (docDetail?.doc_type) {
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setEditStatus(false)
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setShowDocTypes(false)
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setTempDocType(docType)
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setMetadataParams({
documentType: docType,
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
})
}
}, [docDetail?.doc_type, docDetail?.doc_metadata, docType])
const confirmDocType = () => {
if (!tempDocType)
return
setMetadataParams({
documentType: tempDocType,
// Clear metadata when switching to a different doc type
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {},
})
setEditStatus(true)
setShowDocTypes(false)
}
const cancelDocType = () => {
setTempDocType(metadataParams.documentType ?? '')
setEditStatus(true)
setShowDocTypes(false)
}
const enableEdit = () => {
setEditStatus(true)
}
const cancelEdit = () => {
setMetadataParams({ documentType: docType || '', metadata: { ...(docDetail?.doc_metadata || {}) } })
setEditStatus(!docType)
if (!docType)
setShowDocTypes(true)
}
const saveMetadata = async () => {
setSaveLoading(true)
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
datasetId,
documentId,
body: {
doc_type: metadataParams.documentType || docType || '',
doc_metadata: metadataParams.metadata,
},
}) as Promise<CommonResponse>)
if (!e)
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
else
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
onUpdate?.()
setEditStatus(false)
setSaveLoading(false)
}
const updateMetadataField = (field: string, value: string) => {
setMetadataParams(prev => ({ ...prev, metadata: { ...prev.metadata, [field]: value } }))
}
return {
docType,
editStatus,
showDocTypes,
tempDocType,
saveLoading,
metadataParams,
setTempDocType,
setShowDocTypes,
confirmDocType,
cancelDocType,
enableEdit,
cancelEdit,
saveMetadata,
updateMetadataField,
}
}

View File

@ -1,7 +1,6 @@
import type { FullDocumentDetail } from '@/models/datasets'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import Metadata, { FieldInfo } from './index'
// Mock document context
@ -121,7 +120,6 @@ vi.mock('@/hooks/use-metadata', () => ({
}),
}))
// Mock getTextWidthWithCanvas
vi.mock('@/utils', () => ({
asyncRunSafe: async (promise: Promise<unknown>) => {
try {
@ -135,33 +133,32 @@ vi.mock('@/utils', () => ({
getTextWidthWithCanvas: () => 100,
}))
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
id: 'doc-1',
name: 'Test Document',
doc_type: 'book',
doc_metadata: {
title: 'Test Book',
author: 'Test Author',
language: 'en',
},
data_source_type: 'upload_file',
segment_count: 10,
hit_count: 5,
...overrides,
} as FullDocumentDetail)
describe('Metadata', () => {
beforeEach(() => {
vi.clearAllMocks()
})
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
id: 'doc-1',
name: 'Test Document',
doc_type: 'book',
doc_metadata: {
title: 'Test Book',
author: 'Test Author',
language: 'en',
},
data_source_type: 'upload_file',
segment_count: 10,
hit_count: 5,
...overrides,
} as FullDocumentDetail)
const defaultProps = {
docDetail: createMockDocDetail(),
loading: false,
onUpdate: vi.fn(),
}
// Rendering tests
describe('Rendering', () => {
it('should render without crashing', () => {
// Arrange & Act
@ -191,7 +188,7 @@ describe('Metadata', () => {
// Arrange & Act
render(<Metadata {...defaultProps} loading={true} />)
// Assert - Loading component should be rendered
// Assert - Loading component should be rendered, title should not
expect(screen.queryByText(/metadata\.title/i)).not.toBeInTheDocument()
})
@ -204,7 +201,7 @@ describe('Metadata', () => {
})
})
// Edit mode tests
// Edit mode (tests useMetadataState hook integration)
describe('Edit Mode', () => {
it('should enter edit mode when edit button is clicked', () => {
// Arrange
@ -303,7 +300,7 @@ describe('Metadata', () => {
})
})
// Document type selection
// Document type selection (tests DocTypeSelector sub-component integration)
describe('Document Type Selection', () => {
it('should show doc type selection when no doc_type exists', () => {
// Arrange
@ -353,13 +350,13 @@ describe('Metadata', () => {
})
})
// Origin info and technical parameters
// Fixed fields (tests MetadataFieldList sub-component integration)
describe('Fixed Fields', () => {
it('should render origin info fields', () => {
// Arrange & Act
render(<Metadata {...defaultProps} />)
// Assert - Origin info fields should be displayed
// Assert
expect(screen.getByText('Data Source Type')).toBeInTheDocument()
})
@ -382,7 +379,7 @@ describe('Metadata', () => {
// Act
const { container } = render(<Metadata {...defaultProps} docDetail={docDetail} />)
// Assert - should render without crashing
// Assert
expect(container.firstChild).toBeInTheDocument()
})
@ -390,7 +387,7 @@ describe('Metadata', () => {
// Arrange & Act
const { container } = render(<Metadata {...defaultProps} docDetail={undefined} loading={false} />)
// Assert - should render without crashing
// Assert
expect(container.firstChild).toBeInTheDocument()
})
@ -425,7 +422,6 @@ describe('Metadata', () => {
})
})
// FieldInfo component tests
describe('FieldInfo', () => {
beforeEach(() => {
vi.clearAllMocks()
@ -543,3 +539,149 @@ describe('FieldInfo', () => {
})
})
})
// --- useMetadataState hook coverage tests (via component interactions) ---
describe('useMetadataState coverage', () => {
beforeEach(() => {
vi.clearAllMocks()
})
const defaultProps = {
docDetail: createMockDocDetail(),
loading: false,
onUpdate: vi.fn(),
}
describe('cancelDocType', () => {
it('should cancel doc type change and return to edit mode', () => {
// Arrange
render(<Metadata {...defaultProps} />)
// Enter edit mode → click change to open doc type selector
fireEvent.click(screen.getByText(/operation\.edit/i))
fireEvent.click(screen.getByText(/operation\.change/i))
// Now in doc type selector mode — should show cancel button
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
// Act — cancel the doc type change
fireEvent.click(screen.getByText(/operation\.cancel/i))
// Assert — should be back to edit mode (cancel + save buttons visible)
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
})
})
describe('confirmDocType', () => {
it('should confirm same doc type and return to edit mode keeping metadata', () => {
// Arrange — useEffect syncs tempDocType='book' from docDetail
render(<Metadata {...defaultProps} />)
// Enter edit mode → click change to open doc type selector
fireEvent.click(screen.getByText(/operation\.edit/i))
fireEvent.click(screen.getByText(/operation\.change/i))
// DocTypeSelector shows save/cancel buttons
expect(screen.getByText(/metadata\.docTypeChangeTitle/i)).toBeInTheDocument()
// Act — click save to confirm same doc type (tempDocType='book')
fireEvent.click(screen.getByText(/operation\.save/i))
// Assert — should return to edit mode with metadata fields visible
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
})
})
describe('cancelEdit when no docType', () => {
it('should show doc type selection when cancel is clicked with doc_type others', () => {
// Arrange — doc with 'others' type normalizes to '' internally.
// The useEffect sees doc_type='others' (truthy) and syncs state,
// so the component initially shows view mode. Enter edit → cancel to trigger cancelEdit.
const docDetail = createMockDocDetail({ doc_type: 'others' })
render(<Metadata {...defaultProps} docDetail={docDetail} />)
// 'others' is normalized to '' → useEffect fires (doc_type truthy) → view mode
// The rendered type uses default 'book' fallback for display
expect(screen.getByText(/operation\.edit/i)).toBeInTheDocument()
// Enter edit mode
fireEvent.click(screen.getByText(/operation\.edit/i))
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
// Act — cancel edit; internally docType is '' so cancelEdit goes to showDocTypes
fireEvent.click(screen.getByText(/operation\.cancel/i))
// Assert — should show doc type selection since normalized docType was ''
expect(screen.getByText(/metadata\.docTypeSelectTitle/i)).toBeInTheDocument()
})
})
describe('updateMetadataField', () => {
it('should update metadata field value via input', () => {
// Arrange
render(<Metadata {...defaultProps} />)
// Enter edit mode
fireEvent.click(screen.getByText(/operation\.edit/i))
// Act — find an input and change its value (Title field)
const inputs = screen.getAllByRole('textbox')
expect(inputs.length).toBeGreaterThan(0)
fireEvent.change(inputs[0], { target: { value: 'Updated Title' } })
// Assert — the input should have the new value
expect(inputs[0]).toHaveValue('Updated Title')
})
})
describe('saveMetadata calls modifyDocMetadata with correct body', () => {
it('should pass doc_type and doc_metadata in save request', async () => {
// Arrange
mockModifyDocMetadata.mockResolvedValueOnce({})
render(<Metadata {...defaultProps} />)
// Enter edit mode
fireEvent.click(screen.getByText(/operation\.edit/i))
// Act — save
fireEvent.click(screen.getByText(/operation\.save/i))
// Assert
await waitFor(() => {
expect(mockModifyDocMetadata).toHaveBeenCalledWith(
expect.objectContaining({
datasetId: 'test-dataset-id',
documentId: 'test-document-id',
body: expect.objectContaining({
doc_type: 'book',
}),
}),
)
})
})
})
describe('useEffect sync', () => {
it('should handle doc_metadata being null in effect sync', () => {
// Arrange — first render with null metadata
const { rerender } = render(
<Metadata
{...defaultProps}
docDetail={createMockDocDetail({ doc_metadata: null })}
/>,
)
// Act — rerender with a different doc_type to trigger useEffect sync
rerender(
<Metadata
{...defaultProps}
docDetail={createMockDocDetail({ doc_type: 'paper', doc_metadata: null })}
/>,
)
// Assert — should render without crashing, showing Paper type
expect(screen.getByText('Paper')).toBeInTheDocument()
})
})
})

View File

@ -1,422 +1,124 @@
'use client'
import type { FC, ReactNode } from 'react'
import type { inputType, metadataType } from '@/hooks/use-metadata'
import type { CommonResponse } from '@/models/common'
import type { DocType, FullDocumentDetail } from '@/models/datasets'
import type { FC } from 'react'
import type { FullDocumentDetail } from '@/models/datasets'
import { PencilIcon } from '@heroicons/react/24/outline'
import { get } from 'es-toolkit/compat'
import * as React from 'react'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
import Button from '@/app/components/base/button'
import Divider from '@/app/components/base/divider'
import Input from '@/app/components/base/input'
import Loading from '@/app/components/base/loading'
import Radio from '@/app/components/base/radio'
import { SimpleSelect } from '@/app/components/base/select'
import { ToastContext } from '@/app/components/base/toast'
import Tooltip from '@/app/components/base/tooltip'
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
import { modifyDocMetadata } from '@/service/datasets'
import { asyncRunSafe, getTextWidthWithCanvas } from '@/utils'
import { cn } from '@/utils/classnames'
import { useDocumentContext } from '../context'
import { useMetadataMap } from '@/hooks/use-metadata'
import DocTypeSelector, { DocumentTypeDisplay } from './components/doc-type-selector'
import MetadataFieldList from './components/metadata-field-list'
import { useMetadataState } from './hooks/use-metadata-state'
import s from './style.module.css'
const map2Options = (map: { [key: string]: string }) => {
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
}
export { default as FieldInfo } from './components/field-info'
type IFieldInfoProps = {
label: string
value?: string
valueIcon?: ReactNode
displayedValue?: string
defaultValue?: string
showEdit?: boolean
inputType?: inputType
selectOptions?: Array<{ value: string, name: string }>
onUpdate?: (v: any) => void
}
export const FieldInfo: FC<IFieldInfoProps> = ({
label,
value = '',
valueIcon,
displayedValue = '',
defaultValue,
showEdit = false,
inputType = 'input',
selectOptions = [],
onUpdate,
}) => {
const { t } = useTranslation()
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
const editAlignTop = showEdit && inputType === 'textarea'
const readAlignTop = !showEdit && textNeedWrap
const renderContent = () => {
if (!showEdit)
return displayedValue
if (inputType === 'select') {
return (
<SimpleSelect
onSelect={({ value }) => onUpdate?.(value as string)}
items={selectOptions}
defaultValue={value}
className={s.select}
wrapperClassName={s.selectWrapper}
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
if (inputType === 'textarea') {
return (
<AutoHeightTextarea
onChange={e => onUpdate?.(e.target.value)}
value={value}
className={s.textArea}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<Input
onChange={e => onUpdate?.(e.target.value)}
value={value}
defaultValue={defaultValue}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
<div className="flex grow items-center gap-1 text-text-secondary">
{valueIcon}
{renderContent()}
</div>
</div>
)
}
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
return (
<div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
)
}
const IconButton: FC<{
type: DocType
isChecked: boolean
}> = ({ type, isChecked = false }) => {
const metadataMap = useMetadataMap()
return (
<Tooltip
popupContent={metadataMap[type].text}
>
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
<TypeIcon
iconName={metadataMap[type].iconName || ''}
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
/>
</button>
</Tooltip>
)
}
type IMetadataProps = {
type MetadataProps = {
docDetail?: FullDocumentDetail
loading: boolean
onUpdate: () => void
}
type MetadataState = {
documentType?: DocType | ''
metadata: Record<string, string>
}
const Metadata: FC<IMetadataProps> = ({ docDetail, loading, onUpdate }) => {
const { doc_metadata = {} } = docDetail || {}
const rawDocType = docDetail?.doc_type ?? ''
const doc_type = rawDocType === 'others' ? '' : rawDocType
const Metadata: FC<MetadataProps> = ({ docDetail, loading, onUpdate }) => {
const { t } = useTranslation()
const metadataMap = useMetadataMap()
const languageMap = useLanguages()
const bookCategoryMap = useBookCategories()
const personalDocCategoryMap = usePersonalDocCategories()
const businessDocCategoryMap = useBusinessDocCategories()
const [editStatus, setEditStatus] = useState(!doc_type) // if no documentType, in editing status by default
// the initial values are according to the documentType
const [metadataParams, setMetadataParams] = useState<MetadataState>(
doc_type
? {
documentType: doc_type as DocType,
metadata: (doc_metadata || {}) as Record<string, string>,
}
: { metadata: {} },
)
const [showDocTypes, setShowDocTypes] = useState(!doc_type) // whether show doc types
const [tempDocType, setTempDocType] = useState<DocType | ''>('') // for remember icon click
const [saveLoading, setSaveLoading] = useState(false)
const { notify } = useContext(ToastContext)
const datasetId = useDocumentContext(s => s.datasetId)
const documentId = useDocumentContext(s => s.documentId)
useEffect(() => {
if (docDetail?.doc_type) {
setEditStatus(false)
setShowDocTypes(false)
setTempDocType(doc_type as DocType | '')
setMetadataParams({
documentType: doc_type as DocType | '',
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
})
}
}, [docDetail?.doc_type, docDetail?.doc_metadata, doc_type])
// confirm doc type
const confirmDocType = () => {
if (!tempDocType)
return
setMetadataParams({
documentType: tempDocType,
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {} as Record<string, string>, // change doc type, clear metadata
})
setEditStatus(true)
setShowDocTypes(false)
}
// cancel doc type
const cancelDocType = () => {
setTempDocType(metadataParams.documentType ?? '')
setEditStatus(true)
setShowDocTypes(false)
}
// show doc type select
const renderSelectDocType = () => {
const { documentType } = metadataParams
const {
docType,
editStatus,
showDocTypes,
tempDocType,
saveLoading,
metadataParams,
setTempDocType,
setShowDocTypes,
confirmDocType,
cancelDocType,
enableEdit,
cancelEdit,
saveMetadata,
updateMetadataField,
} = useMetadataState({ docDetail, onUpdate })
if (loading) {
return (
<>
{!doc_type && !documentType && (
<>
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
</>
)}
<div className={s.operationWrapper}>
{!doc_type && !documentType && (
<>
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
</>
)}
{documentType && (
<>
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
</>
)}
<Radio.Group value={tempDocType ?? documentType ?? ''} onChange={setTempDocType} className={s.radioGroup}>
{CUSTOMIZABLE_DOC_TYPES.map((type, index) => {
const currValue = tempDocType ?? documentType
return (
<Radio key={index} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
<IconButton
type={type}
isChecked={currValue === type}
/>
</Radio>
)
})}
</Radio.Group>
{!doc_type && !documentType && (
<Button
variant="primary"
onClick={confirmDocType}
disabled={!tempDocType}
>
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
</Button>
)}
{documentType && (
<div className={s.opBtnWrapper}>
<Button onClick={confirmDocType} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">{t('operation.save', { ns: 'common' })}</Button>
<Button onClick={cancelDocType} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
</div>
)}
</div>
</>
)
}
// show metadata info and edit
const renderFieldInfos = ({ mainField = 'book', canEdit }: { mainField?: metadataType | '', canEdit?: boolean }) => {
if (!mainField)
return null
const fieldMap = metadataMap[mainField]?.subFieldsMap
const sourceData = ['originInfo', 'technicalParameters'].includes(mainField) ? docDetail : metadataParams.metadata
const getTargetMap = (field: string) => {
if (field === 'language')
return languageMap
if (field === 'category' && mainField === 'book')
return bookCategoryMap
if (field === 'document_type') {
if (mainField === 'personal_document')
return personalDocCategoryMap
if (mainField === 'business_document')
return businessDocCategoryMap
}
return {} as any
}
const getTargetValue = (field: string) => {
const val = get(sourceData, field, '')
if (!val && val !== 0)
return '-'
if (fieldMap[field]?.inputType === 'select')
return getTargetMap(field)[val]
if (fieldMap[field]?.render)
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
return val
}
return (
<div className="flex flex-col gap-1">
{Object.keys(fieldMap).map((field) => {
return (
<FieldInfo
key={fieldMap[field]?.label}
label={fieldMap[field]?.label}
displayedValue={getTargetValue(field)}
value={get(sourceData, field, '')}
inputType={fieldMap[field]?.inputType || 'input'}
showEdit={canEdit}
onUpdate={(val) => {
setMetadataParams(pre => ({ ...pre, metadata: { ...pre.metadata, [field]: val } }))
}}
selectOptions={map2Options(getTargetMap(field))}
/>
)
})}
<div className={`${s.main} bg-gray-25`}>
<Loading type="app" />
</div>
)
}
const enabledEdit = () => {
setEditStatus(true)
}
const onCancel = () => {
setMetadataParams({ documentType: doc_type || '', metadata: { ...docDetail?.doc_metadata } })
setEditStatus(!doc_type)
if (!doc_type)
setShowDocTypes(true)
}
const onSave = async () => {
setSaveLoading(true)
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
datasetId,
documentId,
body: {
doc_type: metadataParams.documentType || doc_type || '',
doc_metadata: metadataParams.metadata,
},
}) as Promise<CommonResponse>)
if (!e)
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
else
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
onUpdate?.()
setEditStatus(false)
setSaveLoading(false)
}
return (
<div className={`${s.main} ${editStatus ? 'bg-white' : 'bg-gray-25'}`}>
{loading
? (<Loading type="app" />)
: (
<>
<div className={s.titleWrapper}>
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
{!editStatus
? (
<Button onClick={enabledEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
<PencilIcon className={s.opIcon} />
{t('operation.edit', { ns: 'common' })}
</Button>
)
: showDocTypes
? null
: (
<div className={s.opBtnWrapper}>
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
<Button
onClick={onSave}
className={`${s.opBtn} ${s.opSaveBtn}`}
variant="primary"
loading={saveLoading}
>
{t('operation.save', { ns: 'common' })}
</Button>
</div>
)}
{/* Header: title + action buttons */}
<div className={s.titleWrapper}>
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
{!editStatus
? (
<Button onClick={enableEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
<PencilIcon className={s.opIcon} />
{t('operation.edit', { ns: 'common' })}
</Button>
)
: !showDocTypes && (
<div className={s.opBtnWrapper}>
<Button onClick={cancelEdit} className={`${s.opBtn} ${s.opCancelBtn}`}>
{t('operation.cancel', { ns: 'common' })}
</Button>
<Button onClick={saveMetadata} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary" loading={saveLoading}>
{t('operation.save', { ns: 'common' })}
</Button>
</div>
{/* show selected doc type and changing entry */}
{!editStatus
? (
<div className={s.documentTypeShow}>
<TypeIcon iconName={metadataMap[doc_type || 'book']?.iconName || ''} className={s.iconShow} />
{metadataMap[doc_type || 'book'].text}
</div>
)
: showDocTypes
? null
: (
<div className={s.documentTypeShow}>
{metadataParams.documentType && (
<>
<TypeIcon iconName={metadataMap[metadataParams.documentType || 'book'].iconName || ''} className={s.iconShow} />
{metadataMap[metadataParams.documentType || 'book'].text}
{editStatus && (
<div className="ml-1 inline-flex items-center gap-1">
·
<div
onClick={() => { setShowDocTypes(true) }}
className="cursor-pointer hover:text-text-accent"
>
{t('operation.change', { ns: 'common' })}
</div>
</div>
)}
</>
)}
</div>
)}
{(!doc_type && showDocTypes) ? null : <Divider />}
{showDocTypes ? renderSelectDocType() : renderFieldInfos({ mainField: metadataParams.documentType, canEdit: editStatus })}
{/* show fixed fields */}
<Divider />
{renderFieldInfos({ mainField: 'originInfo', canEdit: false })}
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
<Divider />
{renderFieldInfos({ mainField: 'technicalParameters', canEdit: false })}
</>
)}
</div>
{/* Document type display / selector */}
{!editStatus
? <DocumentTypeDisplay displayType={docType} />
: showDocTypes
? null
: (
<DocumentTypeDisplay
displayType={metadataParams.documentType || ''}
showChangeLink={editStatus}
onChangeClick={() => setShowDocTypes(true)}
/>
)}
{/* Divider between type display and fields (skip when in first-time selection) */}
{(!docType && showDocTypes) ? null : <Divider />}
{/* Doc type selector or editable metadata fields */}
{showDocTypes
? (
<DocTypeSelector
docType={docType}
documentType={metadataParams.documentType}
tempDocType={tempDocType}
onTempDocTypeChange={setTempDocType}
onConfirm={confirmDocType}
onCancel={cancelDocType}
/>
)
: (
<MetadataFieldList
mainField={metadataParams.documentType || ''}
canEdit={editStatus}
metadata={metadataParams.metadata}
docDetail={docDetail}
onFieldUpdate={updateMetadataField}
/>
)}
{/* Fixed fields: origin info */}
<Divider />
<MetadataFieldList mainField="originInfo" docDetail={docDetail} />
{/* Fixed fields: technical parameters */}
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
<Divider />
<MetadataFieldList mainField="technicalParameters" docDetail={docDetail} />
</div>
)
}

View File

@ -28,6 +28,7 @@ import { useGlobalPublicStore } from '@/context/global-public-context'
import { useDocLink } from '@/context/i18n'
import { useModalContext } from '@/context/modal-context'
import { useProviderContext } from '@/context/provider-context'
import { env } from '@/env'
import { useLogout } from '@/service/use-common'
import { cn } from '@/utils/classnames'
import AccountAbout from '../account-about'
@ -178,7 +179,7 @@ export default function AppSelector() {
</Link>
</MenuItem>
{
document?.body?.getAttribute('data-public-site-about') !== 'hide' && (
env.NEXT_PUBLIC_SITE_ABOUT !== 'hide' && (
<MenuItem>
<div
className={cn(itemClassName, 'justify-between', 'data-[active]:bg-state-base-hover')}

View File

@ -104,7 +104,7 @@ const MembersPage = () => {
<UpgradeBtn className="mr-2" loc="member-invite" />
)}
<div className="shrink-0">
<InviteButton disabled={!isCurrentWorkspaceManager || isMemberFull} onClick={() => setInviteModalVisible(true)} />
{isCurrentWorkspaceManager && <InviteButton disabled={isMemberFull} onClick={() => setInviteModalVisible(true)} />}
</div>
</div>
<div className="overflow-visible lg:overflow-visible">

View File

@ -3,6 +3,7 @@
import { SerwistProvider } from '@serwist/turbopack/react'
import { useEffect } from 'react'
import { IS_DEV } from '@/config'
import { env } from '@/env'
import { isClient } from '@/utils/client'
export function PWAProvider({ children }: { children: React.ReactNode }) {
@ -10,7 +11,7 @@ export function PWAProvider({ children }: { children: React.ReactNode }) {
return <DisabledPWAProvider>{children}</DisabledPWAProvider>
}
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
const basePath = env.NEXT_PUBLIC_BASE_PATH
const swUrl = `${basePath}/serwist/sw.js`
return (

View File

@ -1,6 +1,6 @@
import type { TFunction } from 'i18next'
import type { SchemaOptions } from './types'
import { z } from 'zod'
import * as z from 'zod'
import { InputTypeEnum } from '@/app/components/base/form/components/field/input-type-select/types'
import { MAX_VAR_KEY_LENGTH } from '@/config'
import { PipelineInputVarType } from '@/models/pipeline'

View File

@ -1,6 +1,6 @@
import type { PropsWithChildren } from 'react'
import { act, cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { DSLImportStatus } from '@/models/app'
import UpdateDSLModal from './update-dsl-modal'
@ -145,11 +145,6 @@ vi.mock('@/app/components/workflow/constants', () => ({
WORKFLOW_DATA_UPDATE: 'WORKFLOW_DATA_UPDATE',
}))
afterEach(() => {
cleanup()
vi.clearAllMocks()
})
describe('UpdateDSLModal', () => {
const mockOnCancel = vi.fn()
const mockOnBackup = vi.fn()

View File

@ -1,40 +1,17 @@
'use client'
import type { MouseEventHandler } from 'react'
import {
RiAlertFill,
RiCloseLine,
RiFileDownloadLine,
} from '@remixicon/react'
import {
memo,
useCallback,
useRef,
useState,
} from 'react'
import { memo } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import Uploader from '@/app/components/app/create-from-dsl-modal/uploader'
import Button from '@/app/components/base/button'
import Modal from '@/app/components/base/modal'
import { ToastContext } from '@/app/components/base/toast'
import { WORKFLOW_DATA_UPDATE } from '@/app/components/workflow/constants'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import { useWorkflowStore } from '@/app/components/workflow/store'
import {
initialEdges,
initialNodes,
} from '@/app/components/workflow/utils'
import { useEventEmitterContextContext } from '@/context/event-emitter'
import {
DSLImportMode,
DSLImportStatus,
} from '@/models/app'
import {
useImportPipelineDSL,
useImportPipelineDSLConfirm,
} from '@/service/use-pipeline'
import { fetchWorkflowDraft } from '@/service/workflow'
import { useUpdateDSLModal } from '../hooks/use-update-dsl-modal'
import VersionMismatchModal from './version-mismatch-modal'
type UpdateDSLModalProps = {
onCancel: () => void
@ -48,146 +25,17 @@ const UpdateDSLModal = ({
onImport,
}: UpdateDSLModalProps) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const [currentFile, setDSLFile] = useState<File>()
const [fileContent, setFileContent] = useState<string>()
const [loading, setLoading] = useState(false)
const { eventEmitter } = useEventEmitterContextContext()
const [show, setShow] = useState(true)
const [showErrorModal, setShowErrorModal] = useState(false)
const [versions, setVersions] = useState<{ importedVersion: string, systemVersion: string }>()
const [importId, setImportId] = useState<string>()
const { handleCheckPluginDependencies } = usePluginDependencies()
const { mutateAsync: importDSL } = useImportPipelineDSL()
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
const workflowStore = useWorkflowStore()
const readFile = (file: File) => {
const reader = new FileReader()
reader.onload = function (event) {
const content = event.target?.result
setFileContent(content as string)
}
reader.readAsText(file)
}
const handleFile = (file?: File) => {
setDSLFile(file)
if (file)
readFile(file)
if (!file)
setFileContent('')
}
const handleWorkflowUpdate = useCallback(async (pipelineId: string) => {
const {
graph,
hash,
rag_pipeline_variables,
} = await fetchWorkflowDraft(`/rag/pipelines/${pipelineId}/workflows/draft`)
const { nodes, edges, viewport } = graph
eventEmitter?.emit({
type: WORKFLOW_DATA_UPDATE,
payload: {
nodes: initialNodes(nodes, edges),
edges: initialEdges(edges, nodes),
viewport,
hash,
rag_pipeline_variables: rag_pipeline_variables || [],
},
} as any)
}, [eventEmitter])
const isCreatingRef = useRef(false)
const handleImport: MouseEventHandler = useCallback(async () => {
const { pipelineId } = workflowStore.getState()
if (isCreatingRef.current)
return
isCreatingRef.current = true
if (!currentFile)
return
try {
if (pipelineId && fileContent) {
setLoading(true)
const response = await importDSL({ mode: DSLImportMode.YAML_CONTENT, yaml_content: fileContent, pipeline_id: pipelineId })
const { id, status, pipeline_id, imported_dsl_version, current_dsl_version } = response
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
if (!pipeline_id) {
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
return
}
handleWorkflowUpdate(pipeline_id)
if (onImport)
onImport()
notify({
type: status === DSLImportStatus.COMPLETED ? 'success' : 'warning',
message: t(status === DSLImportStatus.COMPLETED ? 'common.importSuccess' : 'common.importWarning', { ns: 'workflow' }),
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('common.importWarningDetails', { ns: 'workflow' }),
})
await handleCheckPluginDependencies(pipeline_id, true)
setLoading(false)
onCancel()
}
else if (status === DSLImportStatus.PENDING) {
setShow(false)
setTimeout(() => {
setShowErrorModal(true)
}, 300)
setVersions({
importedVersion: imported_dsl_version ?? '',
systemVersion: current_dsl_version ?? '',
})
setImportId(id)
}
else {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
}
}
// eslint-disable-next-line unused-imports/no-unused-vars
catch (e) {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
isCreatingRef.current = false
}, [currentFile, fileContent, onCancel, notify, t, onImport, handleWorkflowUpdate, handleCheckPluginDependencies, workflowStore, importDSL])
const onUpdateDSLConfirm: MouseEventHandler = async () => {
try {
if (!importId)
return
const response = await importDSLConfirm(importId)
const { status, pipeline_id } = response
if (status === DSLImportStatus.COMPLETED) {
if (!pipeline_id) {
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
return
}
handleWorkflowUpdate(pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
if (onImport)
onImport()
notify({ type: 'success', message: t('common.importSuccess', { ns: 'workflow' }) })
setLoading(false)
onCancel()
}
else if (status === DSLImportStatus.FAILED) {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
}
// eslint-disable-next-line unused-imports/no-unused-vars
catch (e) {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
}
const {
currentFile,
handleFile,
show,
showErrorModal,
setShowErrorModal,
loading,
versions,
handleImport,
onUpdateDSLConfirm,
} = useUpdateDSLModal({ onCancel, onImport })
return (
<>
@ -250,32 +98,12 @@ const UpdateDSLModal = ({
</Button>
</div>
</Modal>
<Modal
<VersionMismatchModal
isShow={showErrorModal}
versions={versions}
onClose={() => setShowErrorModal(false)}
className="w-[480px]"
>
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="system-md-regular flex grow flex-col text-text-secondary">
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
<br />
<div>
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
<span className="system-md-medium">{versions?.importedVersion}</span>
</div>
<div>
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
<span className="system-md-medium">{versions?.systemVersion}</span>
</div>
</div>
</div>
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
<Button variant="secondary" onClick={() => setShowErrorModal(false)}>{t('newApp.Cancel', { ns: 'app' })}</Button>
<Button variant="primary" destructive onClick={onUpdateDSLConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
</div>
</Modal>
onConfirm={onUpdateDSLConfirm}
/>
</>
)
}

View File

@ -0,0 +1,117 @@
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import VersionMismatchModal from './version-mismatch-modal'
describe('VersionMismatchModal', () => {
const mockOnClose = vi.fn()
const mockOnConfirm = vi.fn()
const defaultVersions = {
importedVersion: '0.8.0',
systemVersion: '1.0.0',
}
const defaultProps = {
isShow: true,
versions: defaultVersions,
onClose: mockOnClose,
onConfirm: mockOnConfirm,
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render dialog when isShow is true', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByRole('dialog')).toBeInTheDocument()
})
it('should not render dialog when isShow is false', () => {
render(<VersionMismatchModal {...defaultProps} isShow={false} />)
expect(screen.queryByRole('dialog')).not.toBeInTheDocument()
})
it('should render error title', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
it('should render all error description parts', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorPart1')).toBeInTheDocument()
expect(screen.getByText('app.newApp.appCreateDSLErrorPart2')).toBeInTheDocument()
expect(screen.getByText('app.newApp.appCreateDSLErrorPart3')).toBeInTheDocument()
expect(screen.getByText('app.newApp.appCreateDSLErrorPart4')).toBeInTheDocument()
})
it('should display imported and system version numbers', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByText('0.8.0')).toBeInTheDocument()
expect(screen.getByText('1.0.0')).toBeInTheDocument()
})
it('should render cancel and confirm buttons', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByRole('button', { name: /app\.newApp\.Cancel/ })).toBeInTheDocument()
expect(screen.getByRole('button', { name: /app\.newApp\.Confirm/ })).toBeInTheDocument()
})
})
describe('user interactions', () => {
it('should call onClose when cancel button is clicked', () => {
render(<VersionMismatchModal {...defaultProps} />)
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Cancel/ }))
expect(mockOnClose).toHaveBeenCalledTimes(1)
})
it('should call onConfirm when confirm button is clicked', () => {
render(<VersionMismatchModal {...defaultProps} />)
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Confirm/ }))
expect(mockOnConfirm).toHaveBeenCalledTimes(1)
})
})
describe('button variants', () => {
it('should render cancel button with secondary variant', () => {
render(<VersionMismatchModal {...defaultProps} />)
const cancelBtn = screen.getByRole('button', { name: /app\.newApp\.Cancel/ })
expect(cancelBtn).toHaveClass('btn-secondary')
})
it('should render confirm button with primary destructive variant', () => {
render(<VersionMismatchModal {...defaultProps} />)
const confirmBtn = screen.getByRole('button', { name: /app\.newApp\.Confirm/ })
expect(confirmBtn).toHaveClass('btn-primary')
expect(confirmBtn).toHaveClass('btn-destructive')
})
})
describe('edge cases', () => {
it('should handle undefined versions gracefully', () => {
render(<VersionMismatchModal {...defaultProps} versions={undefined} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
it('should handle empty version strings', () => {
const emptyVersions = { importedVersion: '', systemVersion: '' }
render(<VersionMismatchModal {...defaultProps} versions={emptyVersions} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
})
})

View File

@ -0,0 +1,54 @@
import type { MouseEventHandler } from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import Modal from '@/app/components/base/modal'
type VersionMismatchModalProps = {
isShow: boolean
versions?: {
importedVersion: string
systemVersion: string
}
onClose: () => void
onConfirm: MouseEventHandler
}
const VersionMismatchModal = ({
isShow,
versions,
onClose,
onConfirm,
}: VersionMismatchModalProps) => {
const { t } = useTranslation()
return (
<Modal
isShow={isShow}
onClose={onClose}
className="w-[480px]"
>
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="system-md-regular flex grow flex-col text-text-secondary">
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
<br />
<div>
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
<span className="system-md-medium">{versions?.importedVersion}</span>
</div>
<div>
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
<span className="system-md-medium">{versions?.systemVersion}</span>
</div>
</div>
</div>
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
<Button variant="secondary" onClick={onClose}>{t('newApp.Cancel', { ns: 'app' })}</Button>
<Button variant="primary" destructive onClick={onConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
</div>
</Modal>
)
}
export default VersionMismatchModal

View File

@ -68,23 +68,20 @@ vi.mock('@/config', () => ({
API_PREFIX: '/api',
}))
// Mock postWithKeepalive from service/fetch
const mockPostWithKeepalive = vi.fn()
vi.mock('@/service/fetch', () => ({
postWithKeepalive: (...args: unknown[]) => mockPostWithKeepalive(...args),
}))
// ============================================================================
// Tests
// ============================================================================
describe('useNodesSyncDraft', () => {
const mockSendBeacon = vi.fn()
beforeEach(() => {
vi.clearAllMocks()
// Setup navigator.sendBeacon mock
Object.defineProperty(navigator, 'sendBeacon', {
value: mockSendBeacon,
writable: true,
configurable: true,
})
// Default store state
mockStoreGetState.mockReturnValue({
getNodes: mockGetNodes,
@ -134,7 +131,7 @@ describe('useNodesSyncDraft', () => {
})
describe('syncWorkflowDraftWhenPageClose', () => {
it('should not call sendBeacon when nodes are read only', () => {
it('should not call postWithKeepalive when nodes are read only', () => {
mockGetNodesReadOnly.mockReturnValue(true)
const { result } = renderHook(() => useNodesSyncDraft())
@ -143,10 +140,10 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockSendBeacon).not.toHaveBeenCalled()
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
})
it('should call sendBeacon with correct URL and params', () => {
it('should call postWithKeepalive with correct URL and params', () => {
mockGetNodesReadOnly.mockReturnValue(false)
mockGetNodes.mockReturnValue([
{ id: 'node-1', data: { type: 'start' }, position: { x: 0, y: 0 } },
@ -158,13 +155,16 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockSendBeacon).toHaveBeenCalledWith(
expect(mockPostWithKeepalive).toHaveBeenCalledWith(
'/api/rag/pipelines/test-pipeline-id/workflows/draft',
expect.any(String),
expect.objectContaining({
graph: expect.any(Object),
hash: 'test-hash',
}),
)
})
it('should not call sendBeacon when pipelineId is missing', () => {
it('should not call postWithKeepalive when pipelineId is missing', () => {
mockWorkflowStoreGetState.mockReturnValue({
pipelineId: undefined,
environmentVariables: [],
@ -178,10 +178,10 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockSendBeacon).not.toHaveBeenCalled()
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
})
it('should not call sendBeacon when nodes array is empty', () => {
it('should not call postWithKeepalive when nodes array is empty', () => {
mockGetNodes.mockReturnValue([])
const { result } = renderHook(() => useNodesSyncDraft())
@ -190,7 +190,7 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockSendBeacon).not.toHaveBeenCalled()
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
})
it('should filter out temp nodes', () => {
@ -204,8 +204,8 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
// Should not call sendBeacon because after filtering temp nodes, array is empty
expect(mockSendBeacon).not.toHaveBeenCalled()
// Should not call postWithKeepalive because after filtering temp nodes, array is empty
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
})
it('should remove underscore-prefixed data keys from nodes', () => {
@ -219,9 +219,9 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockSendBeacon).toHaveBeenCalled()
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.graph.nodes[0].data._privateData).toBeUndefined()
expect(mockPostWithKeepalive).toHaveBeenCalled()
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.graph.nodes[0].data._privateData).toBeUndefined()
})
})
@ -395,8 +395,8 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.graph.viewport).toEqual({ x: 100, y: 200, zoom: 1.5 })
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.graph.viewport).toEqual({ x: 100, y: 200, zoom: 1.5 })
})
it('should include environment variables in params', () => {
@ -418,8 +418,8 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.environment_variables).toEqual([{ key: 'API_KEY', value: 'secret' }])
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.environment_variables).toEqual([{ key: 'API_KEY', value: 'secret' }])
})
it('should include rag pipeline variables in params', () => {
@ -441,8 +441,8 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.rag_pipeline_variables).toEqual([{ variable: 'input', type: 'text-input' }])
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.rag_pipeline_variables).toEqual([{ variable: 'input', type: 'text-input' }])
})
it('should remove underscore-prefixed keys from edges', () => {
@ -461,9 +461,9 @@ describe('useNodesSyncDraft', () => {
result.current.syncWorkflowDraftWhenPageClose()
})
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
expect(sentData.graph.edges[0].data._hidden).toBeUndefined()
expect(sentData.graph.edges[0].data.visible).toBe(false)
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
expect(sentParams.graph.edges[0].data._hidden).toBeUndefined()
expect(sentParams.graph.edges[0].data.visible).toBe(false)
})
})
})

View File

@ -9,6 +9,7 @@ import {
useWorkflowStore,
} from '@/app/components/workflow/store'
import { API_PREFIX } from '@/config'
import { postWithKeepalive } from '@/service/fetch'
import { syncWorkflowDraft } from '@/service/workflow'
import { usePipelineRefreshDraft } from '.'
@ -76,12 +77,8 @@ export const useNodesSyncDraft = () => {
return
const postParams = getPostParams()
if (postParams) {
navigator.sendBeacon(
`${API_PREFIX}${postParams.url}`,
JSON.stringify(postParams.params),
)
}
if (postParams)
postWithKeepalive(`${API_PREFIX}${postParams.url}`, postParams.params)
}, [getPostParams, getNodesReadOnly])
const performSync = useCallback(async (

View File

@ -92,8 +92,10 @@ vi.mock('@/service/workflow', () => ({
}))
const mockInvalidAllLastRun = vi.fn()
const mockInvalidateRunHistory = vi.fn()
vi.mock('@/service/use-workflow', () => ({
useInvalidAllLastRun: () => mockInvalidAllLastRun,
useInvalidateWorkflowRunHistory: () => mockInvalidateRunHistory,
}))
// Mock FlowType
@ -472,6 +474,7 @@ describe('usePipelineRun', () => {
})
expect(onWorkflowStarted).toHaveBeenCalledWith({ task_id: 'task-1' })
expect(mockInvalidateRunHistory).toHaveBeenCalled()
})
it('should call onWorkflowFinished callback when provided', async () => {
@ -493,6 +496,7 @@ describe('usePipelineRun', () => {
})
expect(onWorkflowFinished).toHaveBeenCalledWith({ status: 'succeeded' })
expect(mockInvalidateRunHistory).toHaveBeenCalled()
})
it('should call onError callback when provided', async () => {
@ -514,6 +518,7 @@ describe('usePipelineRun', () => {
})
expect(onError).toHaveBeenCalledWith({ message: 'error' })
expect(mockInvalidateRunHistory).toHaveBeenCalled()
})
it('should call onNodeStarted callback when provided', async () => {

View File

@ -12,7 +12,7 @@ import { useWorkflowRunEvent } from '@/app/components/workflow/hooks/use-workflo
import { useStore, useWorkflowStore } from '@/app/components/workflow/store'
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
import { ssePost } from '@/service/base'
import { useInvalidAllLastRun } from '@/service/use-workflow'
import { useInvalidAllLastRun, useInvalidateWorkflowRunHistory } from '@/service/use-workflow'
import { stopWorkflowRun } from '@/service/workflow'
import { FlowType } from '@/types/common'
import { useNodesSyncDraft } from './use-nodes-sync-draft'
@ -93,6 +93,7 @@ export const usePipelineRun = () => {
const pipelineId = useStore(s => s.pipelineId)
const invalidAllLastRun = useInvalidAllLastRun(FlowType.ragPipeline, pipelineId)
const invalidateRunHistory = useInvalidateWorkflowRunHistory()
const { fetchInspectVars } = useSetWorkflowVarsWithValue({
flowType: FlowType.ragPipeline,
flowId: pipelineId!,
@ -132,6 +133,7 @@ export const usePipelineRun = () => {
...restCallback
} = callback || {}
const { pipelineId } = workflowStore.getState()
const runHistoryUrl = `/rag/pipelines/${pipelineId}/workflow-runs`
workflowStore.setState({ historyWorkflowData: undefined })
const workflowContainer = document.getElementById('workflow-container')
@ -170,12 +172,14 @@ export const usePipelineRun = () => {
},
onWorkflowStarted: (params) => {
handleWorkflowStarted(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowStarted)
onWorkflowStarted(params)
},
onWorkflowFinished: (params) => {
handleWorkflowFinished(params)
invalidateRunHistory(runHistoryUrl)
fetchInspectVars({})
invalidAllLastRun()
@ -184,6 +188,7 @@ export const usePipelineRun = () => {
},
onError: (params) => {
handleWorkflowFailed()
invalidateRunHistory(runHistoryUrl)
if (onError)
onError(params)
@ -275,7 +280,7 @@ export const usePipelineRun = () => {
...restCallback,
},
)
}, [store, doSyncWorkflowDraft, workflowStore, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, handleWorkflowFailed, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace])
}, [store, doSyncWorkflowDraft, workflowStore, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, invalidateRunHistory, handleWorkflowFailed, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace])
const handleStopRun = useCallback((taskId: string) => {
const { pipelineId } = workflowStore.getState()

View File

@ -0,0 +1,551 @@
import { act, renderHook } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { DSLImportMode, DSLImportStatus } from '@/models/app'
import { useUpdateDSLModal } from './use-update-dsl-modal'
// --- FileReader stub ---
class MockFileReader {
onload: ((this: FileReader, event: ProgressEvent<FileReader>) => void) | null = null
readAsText(_file: Blob) {
const event = { target: { result: 'test content' } } as unknown as ProgressEvent<FileReader>
this.onload?.call(this as unknown as FileReader, event)
}
}
vi.stubGlobal('FileReader', MockFileReader as unknown as typeof FileReader)
// --- Module-level mock functions ---
const mockNotify = vi.fn()
const mockEmit = vi.fn()
const mockImportDSL = vi.fn()
const mockImportDSLConfirm = vi.fn()
const mockHandleCheckPluginDependencies = vi.fn()
// --- Mocks ---
vi.mock('react-i18next', () => ({
useTranslation: () => ({ t: (key: string) => key }),
}))
vi.mock('use-context-selector', () => ({
useContext: () => ({ notify: mockNotify }),
}))
vi.mock('@/app/components/base/toast', () => ({
ToastContext: {},
}))
vi.mock('@/context/event-emitter', () => ({
useEventEmitterContextContext: () => ({
eventEmitter: { emit: mockEmit },
}),
}))
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: () => ({ pipelineId: 'test-pipeline-id' }),
}),
}))
vi.mock('@/app/components/workflow/utils', () => ({
initialNodes: (nodes: unknown[]) => nodes,
initialEdges: (edges: unknown[]) => edges,
}))
vi.mock('@/app/components/workflow/constants', () => ({
WORKFLOW_DATA_UPDATE: 'WORKFLOW_DATA_UPDATE',
}))
vi.mock('@/app/components/workflow/plugin-dependency/hooks', () => ({
usePluginDependencies: () => ({
handleCheckPluginDependencies: mockHandleCheckPluginDependencies,
}),
}))
vi.mock('@/service/use-pipeline', () => ({
useImportPipelineDSL: () => ({ mutateAsync: mockImportDSL }),
useImportPipelineDSLConfirm: () => ({ mutateAsync: mockImportDSLConfirm }),
}))
vi.mock('@/service/workflow', () => ({
fetchWorkflowDraft: vi.fn().mockResolvedValue({
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
hash: 'test-hash',
rag_pipeline_variables: [],
}),
}))
// --- Helpers ---
const createFile = () => new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
// Cast MouseEventHandler to a plain callable for tests (event param is unused)
type AsyncFn = () => Promise<void>
describe('useUpdateDSLModal', () => {
const mockOnCancel = vi.fn()
const mockOnImport = vi.fn()
const renderUpdateDSLModal = (overrides?: { onImport?: () => void }) =>
renderHook(() =>
useUpdateDSLModal({
onCancel: mockOnCancel,
onImport: overrides?.onImport ?? mockOnImport,
}),
)
beforeEach(() => {
vi.clearAllMocks()
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.COMPLETED,
pipeline_id: 'test-pipeline-id',
})
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
})
// Initial state values
describe('initial state', () => {
it('should return correct defaults', () => {
const { result } = renderUpdateDSLModal()
expect(result.current.currentFile).toBeUndefined()
expect(result.current.show).toBe(true)
expect(result.current.showErrorModal).toBe(false)
expect(result.current.loading).toBe(false)
expect(result.current.versions).toBeUndefined()
})
})
// File handling
describe('handleFile', () => {
it('should set currentFile when file is provided', () => {
const { result } = renderUpdateDSLModal()
const file = createFile()
act(() => {
result.current.handleFile(file)
})
expect(result.current.currentFile).toBe(file)
})
it('should clear currentFile when called with undefined', () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
act(() => {
result.current.handleFile(undefined)
})
expect(result.current.currentFile).toBeUndefined()
})
})
// Modal state management
describe('modal state', () => {
it('should allow toggling showErrorModal', () => {
const { result } = renderUpdateDSLModal()
expect(result.current.showErrorModal).toBe(false)
act(() => {
result.current.setShowErrorModal(true)
})
expect(result.current.showErrorModal).toBe(true)
act(() => {
result.current.setShowErrorModal(false)
})
expect(result.current.showErrorModal).toBe(false)
})
})
// Import flow
describe('handleImport', () => {
it('should call importDSL with correct parameters', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockImportDSL).toHaveBeenCalledWith({
mode: DSLImportMode.YAML_CONTENT,
yaml_content: 'test content',
pipeline_id: 'test-pipeline-id',
})
})
it('should not call importDSL when no file is selected', async () => {
const { result } = renderUpdateDSLModal()
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockImportDSL).not.toHaveBeenCalled()
})
// COMPLETED status
it('should notify success on COMPLETED status', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'success' }))
})
it('should call onImport on successful import', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockOnImport).toHaveBeenCalled()
})
it('should call onCancel on successful import', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockOnCancel).toHaveBeenCalled()
})
it('should emit workflow update event on success', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockEmit).toHaveBeenCalled()
})
it('should call handleCheckPluginDependencies on success', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockHandleCheckPluginDependencies).toHaveBeenCalledWith('test-pipeline-id', true)
})
// COMPLETED_WITH_WARNINGS status
it('should notify warning on COMPLETED_WITH_WARNINGS status', async () => {
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.COMPLETED_WITH_WARNINGS,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'warning' }))
})
// PENDING status (version mismatch)
it('should switch to version mismatch modal on PENDING status', async () => {
vi.useFakeTimers({ shouldAdvanceTime: true })
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.PENDING,
pipeline_id: 'test-pipeline-id',
imported_dsl_version: '0.8.0',
current_dsl_version: '1.0.0',
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
await vi.advanceTimersByTimeAsync(350)
})
expect(result.current.show).toBe(false)
expect(result.current.showErrorModal).toBe(true)
expect(result.current.versions).toEqual({
importedVersion: '0.8.0',
systemVersion: '1.0.0',
})
vi.useRealTimers()
})
it('should default version strings to empty when undefined', async () => {
vi.useFakeTimers({ shouldAdvanceTime: true })
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.PENDING,
pipeline_id: 'test-pipeline-id',
imported_dsl_version: undefined,
current_dsl_version: undefined,
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
await vi.advanceTimersByTimeAsync(350)
})
expect(result.current.versions).toEqual({
importedVersion: '',
systemVersion: '',
})
vi.useRealTimers()
})
// FAILED / unknown status
it('should notify error on FAILED status', async () => {
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.FAILED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
// Exception
it('should notify error when importDSL throws', async () => {
mockImportDSL.mockRejectedValue(new Error('Network error'))
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
// Missing pipeline_id
it('should notify error when pipeline_id is missing on success', async () => {
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.COMPLETED,
pipeline_id: undefined,
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
})
// Confirm flow (after PENDING → version mismatch)
describe('onUpdateDSLConfirm', () => {
// Helper: drive the hook into PENDING state so importId is set
const setupPendingState = async (result: { current: ReturnType<typeof useUpdateDSLModal> }) => {
vi.useFakeTimers({ shouldAdvanceTime: true })
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.PENDING,
pipeline_id: 'test-pipeline-id',
imported_dsl_version: '0.8.0',
current_dsl_version: '1.0.0',
})
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
await vi.advanceTimersByTimeAsync(350)
})
vi.useRealTimers()
vi.clearAllMocks()
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
}
it('should call importDSLConfirm with the stored importId', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.COMPLETED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockImportDSLConfirm).toHaveBeenCalledWith('import-id')
})
it('should notify success and call onCancel after successful confirm', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.COMPLETED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'success' }))
expect(mockOnCancel).toHaveBeenCalled()
})
it('should call onImport after successful confirm', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.COMPLETED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockOnImport).toHaveBeenCalled()
})
it('should notify error on FAILED confirm status', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.FAILED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
it('should notify error when confirm throws exception', async () => {
mockImportDSLConfirm.mockRejectedValue(new Error('Confirm failed'))
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
it('should notify error when confirm succeeds but pipeline_id is missing', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.COMPLETED,
pipeline_id: undefined,
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
it('should not call importDSLConfirm when importId is not set', async () => {
const { result } = renderUpdateDSLModal()
// No pending state → importId is undefined
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockImportDSLConfirm).not.toHaveBeenCalled()
})
})
// Optional onImport callback
describe('optional onImport', () => {
it('should work without onImport callback', async () => {
const { result } = renderHook(() =>
useUpdateDSLModal({ onCancel: mockOnCancel }),
)
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
// Should succeed without throwing
expect(mockOnCancel).toHaveBeenCalled()
})
})
})

View File

@ -0,0 +1,205 @@
import type { MouseEventHandler } from 'react'
import {
useCallback,
useRef,
useState,
} from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { ToastContext } from '@/app/components/base/toast'
import { WORKFLOW_DATA_UPDATE } from '@/app/components/workflow/constants'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import { useWorkflowStore } from '@/app/components/workflow/store'
import {
initialEdges,
initialNodes,
} from '@/app/components/workflow/utils'
import { useEventEmitterContextContext } from '@/context/event-emitter'
import {
DSLImportMode,
DSLImportStatus,
} from '@/models/app'
import {
useImportPipelineDSL,
useImportPipelineDSLConfirm,
} from '@/service/use-pipeline'
import { fetchWorkflowDraft } from '@/service/workflow'
type VersionInfo = {
importedVersion: string
systemVersion: string
}
type UseUpdateDSLModalParams = {
onCancel: () => void
onImport?: () => void
}
const isCompletedStatus = (status: DSLImportStatus): boolean =>
status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS
export const useUpdateDSLModal = ({ onCancel, onImport }: UseUpdateDSLModalParams) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const { eventEmitter } = useEventEmitterContextContext()
const workflowStore = useWorkflowStore()
const { handleCheckPluginDependencies } = usePluginDependencies()
const { mutateAsync: importDSL } = useImportPipelineDSL()
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
// File state
const [currentFile, setDSLFile] = useState<File>()
const [fileContent, setFileContent] = useState<string>()
// Modal state
const [show, setShow] = useState(true)
const [showErrorModal, setShowErrorModal] = useState(false)
// Import state
const [loading, setLoading] = useState(false)
const [versions, setVersions] = useState<VersionInfo>()
const [importId, setImportId] = useState<string>()
const isCreatingRef = useRef(false)
const readFile = (file: File) => {
const reader = new FileReader()
reader.onload = (event) => {
setFileContent(event.target?.result as string)
}
reader.readAsText(file)
}
const handleFile = (file?: File) => {
setDSLFile(file)
if (file)
readFile(file)
if (!file)
setFileContent('')
}
const notifyError = useCallback(() => {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}, [notify, t])
const updateWorkflow = useCallback(async (pipelineId: string) => {
const { graph, hash, rag_pipeline_variables } = await fetchWorkflowDraft(
`/rag/pipelines/${pipelineId}/workflows/draft`,
)
const { nodes, edges, viewport } = graph
eventEmitter?.emit({
type: WORKFLOW_DATA_UPDATE,
payload: {
nodes: initialNodes(nodes, edges),
edges: initialEdges(edges, nodes),
viewport,
hash,
rag_pipeline_variables: rag_pipeline_variables || [],
},
})
}, [eventEmitter])
const completeImport = useCallback(async (
pipelineId: string | undefined,
status: DSLImportStatus = DSLImportStatus.COMPLETED,
) => {
if (!pipelineId) {
notifyError()
return
}
updateWorkflow(pipelineId)
onImport?.()
const isWarning = status === DSLImportStatus.COMPLETED_WITH_WARNINGS
notify({
type: isWarning ? 'warning' : 'success',
message: t(isWarning ? 'common.importWarning' : 'common.importSuccess', { ns: 'workflow' }),
children: isWarning && t('common.importWarningDetails', { ns: 'workflow' }),
})
await handleCheckPluginDependencies(pipelineId, true)
setLoading(false)
onCancel()
}, [updateWorkflow, onImport, notify, t, handleCheckPluginDependencies, onCancel, notifyError])
const showVersionMismatch = useCallback((
id: string,
importedVersion?: string,
systemVersion?: string,
) => {
setShow(false)
setTimeout(() => setShowErrorModal(true), 300)
setVersions({
importedVersion: importedVersion ?? '',
systemVersion: systemVersion ?? '',
})
setImportId(id)
}, [])
const handleImport: MouseEventHandler = useCallback(async () => {
const { pipelineId } = workflowStore.getState()
if (isCreatingRef.current)
return
isCreatingRef.current = true
if (!currentFile)
return
try {
if (!pipelineId || !fileContent)
return
setLoading(true)
const response = await importDSL({
mode: DSLImportMode.YAML_CONTENT,
yaml_content: fileContent,
pipeline_id: pipelineId,
})
const { id, status, pipeline_id, imported_dsl_version, current_dsl_version } = response
if (isCompletedStatus(status))
await completeImport(pipeline_id, status)
else if (status === DSLImportStatus.PENDING)
showVersionMismatch(id, imported_dsl_version, current_dsl_version)
else
notifyError()
}
catch {
notifyError()
}
isCreatingRef.current = false
}, [currentFile, fileContent, workflowStore, importDSL, completeImport, showVersionMismatch, notifyError])
const onUpdateDSLConfirm: MouseEventHandler = useCallback(async () => {
if (!importId)
return
try {
const { status, pipeline_id } = await importDSLConfirm(importId)
if (status === DSLImportStatus.COMPLETED) {
await completeImport(pipeline_id)
return
}
if (status === DSLImportStatus.FAILED)
notifyError()
}
catch {
notifyError()
}
}, [importId, importDSLConfirm, completeImport, notifyError])
return {
currentFile,
handleFile,
show,
showErrorModal,
setShowErrorModal,
loading,
versions,
handleImport,
onUpdateDSLConfirm,
}
}

View File

@ -4,12 +4,13 @@ import * as Sentry from '@sentry/react'
import { useEffect } from 'react'
import { IS_DEV } from '@/config'
import { env } from '@/env'
const SentryInitializer = ({
children,
}: { children: React.ReactElement }) => {
useEffect(() => {
const SENTRY_DSN = document?.body?.getAttribute('data-public-sentry-dsn')
const SENTRY_DSN = env.NEXT_PUBLIC_SENTRY_DSN
if (!IS_DEV && SENTRY_DSN) {
Sentry.init({
dsn: SENTRY_DSN,

View File

@ -13,6 +13,54 @@ describe('buildWorkflowOutputParameters', () => {
expect(result).toBe(params)
})
it('fills missing output description and type from schema when array input exists', () => {
const params: WorkflowToolProviderOutputParameter[] = [
{ name: 'answer', description: '', type: undefined },
{ name: 'files', description: 'keep this description', type: VarType.arrayFile },
]
const schema: WorkflowToolProviderOutputSchema = {
type: 'object',
properties: {
answer: {
type: VarType.string,
description: 'Generated answer',
},
files: {
type: VarType.arrayFile,
description: 'Schema files description',
},
},
}
const result = buildWorkflowOutputParameters(params, schema)
expect(result).toEqual([
{ name: 'answer', description: 'Generated answer', type: VarType.string },
{ name: 'files', description: 'keep this description', type: VarType.arrayFile },
])
})
it('falls back to empty description when both payload and schema descriptions are missing', () => {
const params: WorkflowToolProviderOutputParameter[] = [
{ name: 'missing_desc', description: '', type: undefined },
]
const schema: WorkflowToolProviderOutputSchema = {
type: 'object',
properties: {
other_field: {
type: VarType.string,
description: 'Other',
},
},
}
const result = buildWorkflowOutputParameters(params, schema)
expect(result).toEqual([
{ name: 'missing_desc', description: '', type: undefined },
])
})
it('derives parameters from schema when explicit array missing', () => {
const schema: WorkflowToolProviderOutputSchema = {
type: 'object',
@ -44,4 +92,56 @@ describe('buildWorkflowOutputParameters', () => {
it('returns empty array when no source information is provided', () => {
expect(buildWorkflowOutputParameters(null, null)).toEqual([])
})
it('derives parameters from schema when explicit array is empty', () => {
const schema: WorkflowToolProviderOutputSchema = {
type: 'object',
properties: {
output_text: {
type: VarType.string,
description: 'Output text',
},
},
}
const result = buildWorkflowOutputParameters([], schema)
expect(result).toEqual([
{ name: 'output_text', description: 'Output text', type: VarType.string },
])
})
it('returns undefined type when schema output type is missing', () => {
const schema = {
type: 'object',
properties: {
answer: {
description: 'Answer without type',
},
},
} as unknown as WorkflowToolProviderOutputSchema
const result = buildWorkflowOutputParameters(undefined, schema)
expect(result).toEqual([
{ name: 'answer', description: 'Answer without type', type: undefined },
])
})
it('falls back to empty description when schema-derived description is missing', () => {
const schema = {
type: 'object',
properties: {
answer: {
type: VarType.string,
},
},
} as unknown as WorkflowToolProviderOutputSchema
const result = buildWorkflowOutputParameters(undefined, schema)
expect(result).toEqual([
{ name: 'answer', description: '', type: VarType.string },
])
})
})

View File

@ -14,15 +14,28 @@ export const buildWorkflowOutputParameters = (
outputParameters: WorkflowToolProviderOutputParameter[] | null | undefined,
outputSchema?: WorkflowToolProviderOutputSchema | null,
): WorkflowToolProviderOutputParameter[] => {
if (Array.isArray(outputParameters))
return outputParameters
const schemaProperties = outputSchema?.properties
if (!outputSchema?.properties)
if (Array.isArray(outputParameters) && outputParameters.length > 0) {
if (!schemaProperties)
return outputParameters
return outputParameters.map((item) => {
const schema = schemaProperties[item.name]
return {
...item,
description: item.description || schema?.description || '',
type: normalizeVarType(item.type || schema?.type),
}
})
}
if (!schemaProperties)
return []
return Object.entries(outputSchema.properties).map(([name, schema]) => ({
return Object.entries(schemaProperties).map(([name, schema]) => ({
name,
description: schema.description,
description: schema.description || '',
type: normalizeVarType(schema.type),
}))
}

View File

@ -6,6 +6,7 @@ import { useSerialAsyncCallback } from '@/app/components/workflow/hooks/use-seri
import { useNodesReadOnly } from '@/app/components/workflow/hooks/use-workflow'
import { useWorkflowStore } from '@/app/components/workflow/store'
import { API_PREFIX } from '@/config'
import { postWithKeepalive } from '@/service/fetch'
import { syncWorkflowDraft } from '@/service/workflow'
import { useWorkflowRefreshDraft } from '.'
@ -85,7 +86,7 @@ export const useNodesSyncDraft = () => {
const postParams = getPostParams()
if (postParams)
navigator.sendBeacon(`${API_PREFIX}${postParams.url}`, JSON.stringify(postParams.params))
postWithKeepalive(`${API_PREFIX}${postParams.url}`, postParams.params)
}, [getPostParams, getNodesReadOnly])
const performSync = useCallback(async (

View File

@ -23,7 +23,7 @@ import { useWorkflowStore } from '@/app/components/workflow/store'
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
import { handleStream, post, sseGet, ssePost } from '@/service/base'
import { ContentType } from '@/service/fetch'
import { useInvalidAllLastRun } from '@/service/use-workflow'
import { useInvalidAllLastRun, useInvalidateWorkflowRunHistory } from '@/service/use-workflow'
import { stopWorkflowRun } from '@/service/workflow'
import { AppModeEnum } from '@/types/app'
import { useSetWorkflowVarsWithValue } from '../../workflow/hooks/use-fetch-workflow-inspect-vars'
@ -66,6 +66,7 @@ export const useWorkflowRun = () => {
const configsMap = useConfigsMap()
const { flowId, flowType } = configsMap
const invalidAllLastRun = useInvalidAllLastRun(flowType, flowId)
const invalidateRunHistory = useInvalidateWorkflowRunHistory()
const { fetchInspectVars } = useSetWorkflowVarsWithValue({
...configsMap,
@ -189,6 +190,9 @@ export const useWorkflowRun = () => {
} = callback || {}
workflowStore.setState({ historyWorkflowData: undefined })
const appDetail = useAppStore.getState().appDetail
const runHistoryUrl = appDetail?.mode === AppModeEnum.ADVANCED_CHAT
? `/apps/${appDetail.id}/advanced-chat/workflow-runs`
: `/apps/${appDetail?.id}/workflow-runs`
const workflowContainer = document.getElementById('workflow-container')
const {
@ -363,6 +367,7 @@ export const useWorkflowRun = () => {
const wrappedOnError = (params: any) => {
clearAbortController()
handleWorkflowFailed()
invalidateRunHistory(runHistoryUrl)
clearListeningState()
if (onError)
@ -381,6 +386,7 @@ export const useWorkflowRun = () => {
...restCallback,
onWorkflowStarted: (params) => {
handleWorkflowStarted(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowStarted)
onWorkflowStarted(params)
@ -388,6 +394,7 @@ export const useWorkflowRun = () => {
onWorkflowFinished: (params) => {
clearListeningState()
handleWorkflowFinished(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowFinished)
onWorkflowFinished(params)
@ -496,6 +503,7 @@ export const useWorkflowRun = () => {
},
onWorkflowPaused: (params) => {
handleWorkflowPaused()
invalidateRunHistory(runHistoryUrl)
if (onWorkflowPaused)
onWorkflowPaused(params)
const url = `/workflow/${params.workflow_run_id}/events`
@ -694,6 +702,7 @@ export const useWorkflowRun = () => {
},
onWorkflowFinished: (params) => {
handleWorkflowFinished(params)
invalidateRunHistory(runHistoryUrl)
if (onWorkflowFinished)
onWorkflowFinished(params)
@ -704,6 +713,7 @@ export const useWorkflowRun = () => {
},
onError: (params) => {
handleWorkflowFailed()
invalidateRunHistory(runHistoryUrl)
if (onError)
onError(params)
@ -803,6 +813,7 @@ export const useWorkflowRun = () => {
},
onWorkflowPaused: (params) => {
handleWorkflowPaused()
invalidateRunHistory(runHistoryUrl)
if (onWorkflowPaused)
onWorkflowPaused(params)
const url = `/workflow/${params.workflow_run_id}/events`
@ -837,7 +848,7 @@ export const useWorkflowRun = () => {
},
finalCallbacks,
)
}, [store, doSyncWorkflowDraft, workflowStore, pathname, handleWorkflowFailed, flowId, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace, handleWorkflowPaused, handleWorkflowNodeHumanInputRequired, handleWorkflowNodeHumanInputFormFilled, handleWorkflowNodeHumanInputFormTimeout])
}, [store, doSyncWorkflowDraft, workflowStore, pathname, handleWorkflowFailed, flowId, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, invalidateRunHistory, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace, handleWorkflowPaused, handleWorkflowNodeHumanInputRequired, handleWorkflowNodeHumanInputFormFilled, handleWorkflowNodeHumanInputFormTimeout])
const handleStopRun = useCallback((taskId: string) => {
const setStoppedState = () => {

View File

@ -1,18 +1,8 @@
import {
RiCheckboxCircleLine,
RiCloseLine,
RiErrorWarningLine,
} from '@remixicon/react'
import {
memo,
useState,
} from 'react'
import { useTranslation } from 'react-i18next'
import { AlertTriangle } from '@/app/components/base/icons/src/vender/line/alertsAndFeedback'
import {
ClockPlay,
ClockPlaySlim,
} from '@/app/components/base/icons/src/vender/line/time'
import Loading from '@/app/components/base/loading'
import {
PortalToFollowElem,
@ -89,9 +79,7 @@ const ViewHistory = ({
open && 'bg-components-button-secondary-bg-hover',
)}
>
<ClockPlay
className="mr-1 h-4 w-4"
/>
<span className="i-custom-vender-line-time-clock-play mr-1 h-4 w-4" />
{t('common.showRunHistory', { ns: 'workflow' })}
</div>
)
@ -107,7 +95,7 @@ const ViewHistory = ({
onClearLogAndMessageModal?.()
}}
>
<ClockPlay className={cn('h-4 w-4 group-hover:text-components-button-secondary-accent-text', open ? 'text-components-button-secondary-accent-text' : 'text-components-button-ghost-text')} />
<span className={cn('i-custom-vender-line-time-clock-play', 'h-4 w-4 group-hover:text-components-button-secondary-accent-text', open ? 'text-components-button-secondary-accent-text' : 'text-components-button-ghost-text')} />
</div>
</Tooltip>
)
@ -129,7 +117,7 @@ const ViewHistory = ({
setOpen(false)
}}
>
<RiCloseLine className="h-4 w-4 text-text-tertiary" />
<span className="i-ri-close-line h-4 w-4 text-text-tertiary" />
</div>
</div>
{
@ -145,7 +133,7 @@ const ViewHistory = ({
{
!data?.data.length && (
<div className="py-12">
<ClockPlaySlim className="mx-auto mb-2 h-8 w-8 text-text-quaternary" />
<span className="i-custom-vender-line-time-clock-play-slim mx-auto mb-2 h-8 w-8 text-text-quaternary" />
<div className="text-center text-[13px] text-text-quaternary">
{t('common.notRunning', { ns: 'workflow' })}
</div>
@ -175,18 +163,18 @@ const ViewHistory = ({
}}
>
{
!isChatMode && item.status === WorkflowRunningStatus.Stopped && (
<AlertTriangle className="mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#F79009]" />
!isChatMode && [WorkflowRunningStatus.Stopped, WorkflowRunningStatus.Paused].includes(item.status) && (
<span className="i-custom-vender-line-alertsAndFeedback-alert-triangle mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#F79009]" />
)
}
{
!isChatMode && item.status === WorkflowRunningStatus.Failed && (
<RiErrorWarningLine className="mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#F04438]" />
<span className="i-ri-error-warning-line mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#F04438]" />
)
}
{
!isChatMode && item.status === WorkflowRunningStatus.Succeeded && (
<RiCheckboxCircleLine className="mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#12B76A]" />
<span className="i-ri-checkbox-circle-line mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#12B76A]" />
)
}
<div>
@ -196,7 +184,7 @@ const ViewHistory = ({
item.id === historyWorkflowData?.id && 'text-text-accent',
)}
>
{`Test ${isChatMode ? 'Chat' : 'Run'}${formatWorkflowRunIdentifier(item.finished_at)}`}
{`Test ${isChatMode ? 'Chat' : 'Run'}${formatWorkflowRunIdentifier(item.finished_at, item.status)}`}
</div>
<div className="flex items-center text-xs leading-[18px] text-text-tertiary">
{item.created_by_account?.name}

View File

@ -159,6 +159,9 @@ const useLastRun = <T>({
if (!warningForNode)
return false
if (warningForNode.unConnected && !warningForNode.errorMessage)
return false
const message = warningForNode.errorMessage || 'This node has unresolved checklist issues'
Toast.notify({ type: 'error', message })
return true

View File

@ -4,13 +4,6 @@ import type {
} from 'react'
import type { IterationNodeType } from '@/app/components/workflow/nodes/iteration/types'
import type { NodeProps } from '@/app/components/workflow/types'
import {
RiAlertFill,
RiCheckboxCircleFill,
RiErrorWarningFill,
RiLoader2Line,
RiPauseCircleFill,
} from '@remixicon/react'
import {
cloneElement,
memo,
@ -109,7 +102,7 @@ const BaseNode: FC<BaseNodeProps> = ({
} = useMemo(() => {
return {
showRunningBorder: (data._runningStatus === NodeRunningStatus.Running || data._runningStatus === NodeRunningStatus.Paused) && !showSelectedBorder,
showSuccessBorder: (data._runningStatus === NodeRunningStatus.Succeeded || hasVarValue) && !showSelectedBorder,
showSuccessBorder: (data._runningStatus === NodeRunningStatus.Succeeded || (hasVarValue && !data._runningStatus)) && !showSelectedBorder,
showFailedBorder: data._runningStatus === NodeRunningStatus.Failed && !showSelectedBorder,
showExceptionBorder: data._runningStatus === NodeRunningStatus.Exception && !showSelectedBorder,
}
@ -127,7 +120,7 @@ const BaseNode: FC<BaseNodeProps> = ({
return (
<div
className={cn(
'system-xs-medium mr-2 text-text-tertiary',
'mr-2 text-text-tertiary system-xs-medium',
data._runningStatus === NodeRunningStatus.Running && 'text-text-accent',
)}
>
@ -167,7 +160,7 @@ const BaseNode: FC<BaseNodeProps> = ({
{
data.type === BlockEnum.DataSource && (
<div className="absolute inset-[-2px] top-[-22px] z-[-1] rounded-[18px] bg-node-data-source-bg p-0.5 backdrop-blur-[6px]">
<div className="system-2xs-semibold-uppercase flex h-5 items-center px-2.5 text-text-tertiary">
<div className="flex h-5 items-center px-2.5 text-text-tertiary system-2xs-semibold-uppercase">
{t('blocks.datasource', { ns: 'workflow' })}
</div>
</div>
@ -252,7 +245,7 @@ const BaseNode: FC<BaseNodeProps> = ({
/>
<div
title={data.title}
className="system-sm-semibold-uppercase mr-1 flex grow items-center truncate text-text-primary"
className="mr-1 flex grow items-center truncate text-text-primary system-sm-semibold-uppercase"
>
<div>
{data.title}
@ -268,7 +261,7 @@ const BaseNode: FC<BaseNodeProps> = ({
</div>
)}
>
<div className="system-2xs-medium-uppercase ml-1 flex items-center justify-center rounded-[5px] border-[1px] border-text-warning px-[5px] py-[3px] text-text-warning ">
<div className="ml-1 flex items-center justify-center rounded-[5px] border-[1px] border-text-warning px-[5px] py-[3px] text-text-warning system-2xs-medium-uppercase">
{t('nodes.iteration.parallelModeUpper', { ns: 'workflow' })}
</div>
</Tooltip>
@ -288,26 +281,26 @@ const BaseNode: FC<BaseNodeProps> = ({
!!(data.type === BlockEnum.Loop && data._loopIndex) && LoopIndex
}
{
isLoading && <RiLoader2Line className="h-3.5 w-3.5 animate-spin text-text-accent" />
isLoading && <span className="i-ri-loader-2-line h-3.5 w-3.5 animate-spin text-text-accent" />
}
{
!isLoading && data._runningStatus === NodeRunningStatus.Failed && (
<RiErrorWarningFill className="h-3.5 w-3.5 text-text-destructive" />
<span className="i-ri-error-warning-fill h-3.5 w-3.5 text-text-destructive" />
)
}
{
!isLoading && data._runningStatus === NodeRunningStatus.Exception && (
<RiAlertFill className="h-3.5 w-3.5 text-text-warning-secondary" />
<span className="i-ri-alert-fill h-3.5 w-3.5 text-text-warning-secondary" />
)
}
{
!isLoading && (data._runningStatus === NodeRunningStatus.Succeeded || hasVarValue) && (
<RiCheckboxCircleFill className="h-3.5 w-3.5 text-text-success" />
!isLoading && (data._runningStatus === NodeRunningStatus.Succeeded || (hasVarValue && !data._runningStatus)) && (
<span className="i-ri-checkbox-circle-fill h-3.5 w-3.5 text-text-success" />
)
}
{
!isLoading && data._runningStatus === NodeRunningStatus.Paused && (
<RiPauseCircleFill className="h-3.5 w-3.5 text-text-warning-secondary" />
<span className="i-ri-pause-circle-fill h-3.5 w-3.5 text-text-warning-secondary" />
)
}
</div>
@ -341,7 +334,7 @@ const BaseNode: FC<BaseNodeProps> = ({
}
{
!!(data.desc && data.type !== BlockEnum.Iteration && data.type !== BlockEnum.Loop) && (
<div className="system-xs-regular whitespace-pre-line break-words px-3 pb-2 pt-1 text-text-tertiary">
<div className="whitespace-pre-line break-words px-3 pb-2 pt-1 text-text-tertiary system-xs-regular">
{data.desc}
</div>
)

View File

@ -0,0 +1,567 @@
import type { ReactNode } from 'react'
import type { HumanInputNodeType } from '@/app/components/workflow/nodes/human-input/types'
import type {
Edge,
Node,
} from '@/app/components/workflow/types'
import { render, screen } from '@testing-library/react'
import { describe, expect, it, vi } from 'vitest'
import { WORKFLOW_COMMON_NODES } from '@/app/components/workflow/constants/node'
import humanInputDefault from '@/app/components/workflow/nodes/human-input/default'
import HumanInputNode from '@/app/components/workflow/nodes/human-input/node'
import {
DeliveryMethodType,
UserActionButtonType,
} from '@/app/components/workflow/nodes/human-input/types'
import { BlockEnum } from '@/app/components/workflow/types'
import { initialNodes, preprocessNodesAndEdges } from '@/app/components/workflow/utils/workflow-init'
// Mock reactflow which is needed by initialNodes and NodeSourceHandle
vi.mock('reactflow', async () => {
const reactflow = await vi.importActual('reactflow')
return {
...reactflow,
Handle: ({ children }: { children?: ReactNode }) => <div data-testid="handle">{children}</div>,
}
})
// Minimal store state mirroring the fields that NodeSourceHandle selects
const mockStoreState = {
shouldAutoOpenStartNodeSelector: false,
setShouldAutoOpenStartNodeSelector: vi.fn(),
setHasSelectedStartNode: vi.fn(),
}
// Mock workflow store used by NodeSourceHandle
// useStore accepts a selector and applies it to the state, so tests break
// if the component starts selecting fields that aren't provided here.
vi.mock('@/app/components/workflow/store', () => ({
useStore: vi.fn((selector?: (s: typeof mockStoreState) => unknown) =>
selector ? selector(mockStoreState) : mockStoreState,
),
useWorkflowStore: vi.fn(() => ({
getState: () => ({
getNodes: () => [],
}),
})),
}))
// Mock workflow hooks barrel (used by NodeSourceHandle via ../../../hooks)
vi.mock('@/app/components/workflow/hooks', () => ({
useNodesInteractions: () => ({
handleNodeAdd: vi.fn(),
}),
useNodesReadOnly: () => ({
getNodesReadOnly: () => false,
nodesReadOnly: false,
}),
useAvailableBlocks: () => ({
availableNextBlocks: [],
availablePrevBlocks: [],
}),
useIsChatMode: () => false,
}))
// ── Factory: Build a realistic human-input node as it would appear after DSL import ──
const createHumanInputNode = (overrides?: Partial<HumanInputNodeType>): Node => ({
id: 'human-input-1',
type: 'custom',
position: { x: 400, y: 200 },
data: {
type: BlockEnum.HumanInput,
title: 'Human Input',
desc: 'Wait for human input',
delivery_methods: [
{
id: 'dm-1',
type: DeliveryMethodType.WebApp,
enabled: true,
},
{
id: 'dm-2',
type: DeliveryMethodType.Email,
enabled: true,
config: {
recipients: { whole_workspace: false, items: [] },
subject: 'Please review',
body: 'Please review the form',
debug_mode: false,
},
},
],
form_content: '# Review Form\nPlease fill in the details below.',
inputs: [
{
type: 'text-input',
output_variable_name: 'review_result',
default: { selector: [], type: 'constant' as const, value: '' },
},
],
user_actions: [
{
id: 'approve',
title: 'Approve',
button_style: UserActionButtonType.Primary,
},
{
id: 'reject',
title: 'Reject',
button_style: UserActionButtonType.Default,
},
],
timeout: 3,
timeout_unit: 'day' as const,
...overrides,
} as HumanInputNodeType,
})
const createStartNode = (): Node => ({
id: 'start-1',
type: 'custom',
position: { x: 100, y: 200 },
data: {
type: BlockEnum.Start,
title: 'Start',
desc: '',
} as Node['data'],
})
const createEdge = (source: string, target: string, sourceHandle = 'source', targetHandle = 'target'): Edge => ({
id: `${source}-${sourceHandle}-${target}-${targetHandle}`,
type: 'custom',
source,
sourceHandle,
target,
targetHandle,
data: {},
} as Edge)
describe('DSL Import with Human Input Node', () => {
// ── preprocessNodesAndEdges: human-input nodes pass through without error ──
describe('preprocessNodesAndEdges', () => {
it('should pass through a workflow containing a human-input node unchanged', () => {
const humanInputNode = createHumanInputNode()
const startNode = createStartNode()
const nodes = [startNode, humanInputNode]
const edges = [createEdge('start-1', 'human-input-1')]
const result = preprocessNodesAndEdges(nodes as Node[], edges as Edge[])
expect(result.nodes).toHaveLength(2)
expect(result.edges).toHaveLength(1)
expect(result.nodes).toEqual(nodes)
expect(result.edges).toEqual(edges)
})
it('should not treat human-input node as an iteration or loop node', () => {
const humanInputNode = createHumanInputNode()
const nodes = [humanInputNode]
const result = preprocessNodesAndEdges(nodes as Node[], [])
// No extra iteration/loop start nodes should be injected
expect(result.nodes).toHaveLength(1)
expect(result.nodes[0].data.type).toBe(BlockEnum.HumanInput)
})
})
// ── initialNodes: human-input nodes are properly initialized ──
describe('initialNodes', () => {
it('should initialize a human-input node with connected handle IDs', () => {
const humanInputNode = createHumanInputNode()
const startNode = createStartNode()
const nodes = [startNode, humanInputNode]
const edges = [createEdge('start-1', 'human-input-1')]
const result = initialNodes(nodes as Node[], edges as Edge[])
const processedHumanInput = result.find(n => n.id === 'human-input-1')
expect(processedHumanInput).toBeDefined()
expect(processedHumanInput!.data.type).toBe(BlockEnum.HumanInput)
// initialNodes sets _connectedSourceHandleIds and _connectedTargetHandleIds
expect(processedHumanInput!.data._connectedSourceHandleIds).toBeDefined()
expect(processedHumanInput!.data._connectedTargetHandleIds).toBeDefined()
})
it('should preserve human-input node data after initialization', () => {
const humanInputNode = createHumanInputNode()
const nodes = [humanInputNode]
const result = initialNodes(nodes as Node[], [])
const processed = result[0]
const nodeData = processed.data as HumanInputNodeType
expect(nodeData.delivery_methods).toHaveLength(2)
expect(nodeData.user_actions).toHaveLength(2)
expect(nodeData.form_content).toBe('# Review Form\nPlease fill in the details below.')
expect(nodeData.timeout).toBe(3)
expect(nodeData.timeout_unit).toBe('day')
})
it('should set node type to custom if not set', () => {
const humanInputNode = createHumanInputNode()
delete (humanInputNode as Record<string, unknown>).type
const result = initialNodes([humanInputNode] as Node[], [])
expect(result[0].type).toBe('custom')
})
})
// ── Node component: renders without crashing for all data variations ──
describe('HumanInputNode Component', () => {
it('should render without crashing with full DSL data', () => {
const node = createHumanInputNode()
expect(() => {
render(
<HumanInputNode
id={node.id}
data={node.data as HumanInputNodeType}
/>,
)
}).not.toThrow()
})
it('should display delivery method labels when methods are present', () => {
const node = createHumanInputNode()
render(
<HumanInputNode
id={node.id}
data={node.data as HumanInputNodeType}
/>,
)
// Delivery method type labels are rendered in lowercase
expect(screen.getByText('webapp')).toBeInTheDocument()
expect(screen.getByText('email')).toBeInTheDocument()
})
it('should display user action IDs', () => {
const node = createHumanInputNode()
render(
<HumanInputNode
id={node.id}
data={node.data as HumanInputNodeType}
/>,
)
expect(screen.getByText('approve')).toBeInTheDocument()
expect(screen.getByText('reject')).toBeInTheDocument()
})
it('should always display Timeout handle', () => {
const node = createHumanInputNode()
render(
<HumanInputNode
id={node.id}
data={node.data as HumanInputNodeType}
/>,
)
expect(screen.getByText('Timeout')).toBeInTheDocument()
})
it('should render without crashing when delivery_methods is empty', () => {
const node = createHumanInputNode({ delivery_methods: [] })
expect(() => {
render(
<HumanInputNode
id={node.id}
data={node.data as HumanInputNodeType}
/>,
)
}).not.toThrow()
// Delivery method section should not be rendered
expect(screen.queryByText('webapp')).not.toBeInTheDocument()
expect(screen.queryByText('email')).not.toBeInTheDocument()
})
it('should render without crashing when user_actions is empty', () => {
const node = createHumanInputNode({ user_actions: [] })
expect(() => {
render(
<HumanInputNode
id={node.id}
data={node.data as HumanInputNodeType}
/>,
)
}).not.toThrow()
// Timeout handle should still exist
expect(screen.getByText('Timeout')).toBeInTheDocument()
})
it('should render without crashing when both delivery_methods and user_actions are empty', () => {
const node = createHumanInputNode({
delivery_methods: [],
user_actions: [],
form_content: '',
inputs: [],
})
expect(() => {
render(
<HumanInputNode
id={node.id}
data={node.data as HumanInputNodeType}
/>,
)
}).not.toThrow()
})
it('should render with only webapp delivery method', () => {
const node = createHumanInputNode({
delivery_methods: [
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: true },
],
})
render(
<HumanInputNode
id={node.id}
data={node.data as HumanInputNodeType}
/>,
)
expect(screen.getByText('webapp')).toBeInTheDocument()
expect(screen.queryByText('email')).not.toBeInTheDocument()
})
it('should render with multiple user actions', () => {
const node = createHumanInputNode({
user_actions: [
{ id: 'action_1', title: 'Approve', button_style: UserActionButtonType.Primary },
{ id: 'action_2', title: 'Reject', button_style: UserActionButtonType.Default },
{ id: 'action_3', title: 'Escalate', button_style: UserActionButtonType.Accent },
],
})
render(
<HumanInputNode
id={node.id}
data={node.data as HumanInputNodeType}
/>,
)
expect(screen.getByText('action_1')).toBeInTheDocument()
expect(screen.getByText('action_2')).toBeInTheDocument()
expect(screen.getByText('action_3')).toBeInTheDocument()
})
})
// ── Node registration: human-input is included in the workflow node registry ──
// Verify via WORKFLOW_COMMON_NODES (lightweight metadata-only imports) instead
// of NodeComponentMap/PanelComponentMap which pull in every node's heavy UI deps.
describe('Node Registration', () => {
it('should have HumanInput included in WORKFLOW_COMMON_NODES', () => {
const entry = WORKFLOW_COMMON_NODES.find(
n => n.metaData.type === BlockEnum.HumanInput,
)
expect(entry).toBeDefined()
})
})
// ── Default config & validation ──
describe('HumanInput Default Configuration', () => {
it('should provide default values for a new human-input node', () => {
const defaultValue = humanInputDefault.defaultValue
expect(defaultValue.delivery_methods).toEqual([])
expect(defaultValue.user_actions).toEqual([])
expect(defaultValue.form_content).toBe('')
expect(defaultValue.inputs).toEqual([])
expect(defaultValue.timeout).toBe(3)
expect(defaultValue.timeout_unit).toBe('day')
})
it('should validate that delivery methods are required', () => {
const t = (key: string) => key
const payload = {
...humanInputDefault.defaultValue,
delivery_methods: [],
} as HumanInputNodeType
const result = humanInputDefault.checkValid(payload, t)
expect(result.isValid).toBe(false)
expect(result.errorMessage).toBeTruthy()
})
it('should validate that at least one delivery method is enabled', () => {
const t = (key: string) => key
const payload = {
...humanInputDefault.defaultValue,
delivery_methods: [
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: false },
],
user_actions: [
{ id: 'approve', title: 'Approve', button_style: UserActionButtonType.Primary },
],
} as HumanInputNodeType
const result = humanInputDefault.checkValid(payload, t)
expect(result.isValid).toBe(false)
})
it('should validate that user actions are required', () => {
const t = (key: string) => key
const payload = {
...humanInputDefault.defaultValue,
delivery_methods: [
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: true },
],
user_actions: [],
} as HumanInputNodeType
const result = humanInputDefault.checkValid(payload, t)
expect(result.isValid).toBe(false)
})
it('should validate that user action IDs are not duplicated', () => {
const t = (key: string) => key
const payload = {
...humanInputDefault.defaultValue,
delivery_methods: [
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: true },
],
user_actions: [
{ id: 'approve', title: 'Approve', button_style: UserActionButtonType.Primary },
{ id: 'approve', title: 'Also Approve', button_style: UserActionButtonType.Default },
],
} as HumanInputNodeType
const result = humanInputDefault.checkValid(payload, t)
expect(result.isValid).toBe(false)
})
it('should pass validation with correct configuration', () => {
const t = (key: string) => key
const payload = {
...humanInputDefault.defaultValue,
delivery_methods: [
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: true },
],
user_actions: [
{ id: 'approve', title: 'Approve', button_style: UserActionButtonType.Primary },
{ id: 'reject', title: 'Reject', button_style: UserActionButtonType.Default },
],
} as HumanInputNodeType
const result = humanInputDefault.checkValid(payload, t)
expect(result.isValid).toBe(true)
expect(result.errorMessage).toBe('')
})
})
// ── Output variables generation ──
describe('HumanInput Output Variables', () => {
it('should generate output variables from form inputs', () => {
const payload = {
...humanInputDefault.defaultValue,
inputs: [
{ type: 'text-input', output_variable_name: 'review_result', default: { selector: [], type: 'constant' as const, value: '' } },
{ type: 'text-input', output_variable_name: 'comment', default: { selector: [], type: 'constant' as const, value: '' } },
],
} as HumanInputNodeType
const outputVars = humanInputDefault.getOutputVars!(payload, {}, [])
expect(outputVars).toEqual([
{ variable: 'review_result', type: 'string' },
{ variable: 'comment', type: 'string' },
])
})
it('should return empty output variables when no form inputs exist', () => {
const payload = {
...humanInputDefault.defaultValue,
inputs: [],
} as HumanInputNodeType
const outputVars = humanInputDefault.getOutputVars!(payload, {}, [])
expect(outputVars).toEqual([])
})
})
// ── Full DSL import simulation: start → human-input → end ──
describe('Full Workflow with Human Input Node', () => {
it('should process a start → human-input → end workflow without errors', () => {
const startNode = createStartNode()
const humanInputNode = createHumanInputNode()
const endNode: Node = {
id: 'end-1',
type: 'custom',
position: { x: 700, y: 200 },
data: {
type: BlockEnum.End,
title: 'End',
desc: '',
outputs: [],
} as Node['data'],
}
const nodes = [startNode, humanInputNode, endNode]
const edges = [
createEdge('start-1', 'human-input-1'),
createEdge('human-input-1', 'end-1', 'approve', 'target'),
]
const processed = preprocessNodesAndEdges(nodes as Node[], edges as Edge[])
expect(processed.nodes).toHaveLength(3)
expect(processed.edges).toHaveLength(2)
const initialized = initialNodes(nodes as Node[], edges as Edge[])
expect(initialized).toHaveLength(3)
// All node types should be preserved
const types = initialized.map(n => n.data.type)
expect(types).toContain(BlockEnum.Start)
expect(types).toContain(BlockEnum.HumanInput)
expect(types).toContain(BlockEnum.End)
})
it('should handle multiple branches from human-input user actions', () => {
const startNode = createStartNode()
const humanInputNode = createHumanInputNode()
const approveEndNode: Node = {
id: 'approve-end',
type: 'custom',
position: { x: 700, y: 100 },
data: { type: BlockEnum.End, title: 'Approve End', desc: '', outputs: [] } as Node['data'],
}
const rejectEndNode: Node = {
id: 'reject-end',
type: 'custom',
position: { x: 700, y: 300 },
data: { type: BlockEnum.End, title: 'Reject End', desc: '', outputs: [] } as Node['data'],
}
const nodes = [startNode, humanInputNode, approveEndNode, rejectEndNode]
const edges = [
createEdge('start-1', 'human-input-1'),
createEdge('human-input-1', 'approve-end', 'approve', 'target'),
createEdge('human-input-1', 'reject-end', 'reject', 'target'),
]
const initialized = initialNodes(nodes as Node[], edges as Edge[])
expect(initialized).toHaveLength(4)
// Human input node should still have correct data
const hiNode = initialized.find(n => n.id === 'human-input-1')!
expect((hiNode.data as HumanInputNodeType).user_actions).toHaveLength(2)
expect((hiNode.data as HumanInputNodeType).delivery_methods).toHaveLength(2)
})
})
})

View File

@ -3,6 +3,7 @@ import { useTranslation } from 'react-i18next'
import { InputNumber } from '@/app/components/base/input-number'
import Switch from '@/app/components/base/switch'
import Tooltip from '@/app/components/base/tooltip'
import { env } from '@/env'
export type TopKAndScoreThresholdProps = {
topK: number
@ -15,12 +16,7 @@ export type TopKAndScoreThresholdProps = {
hiddenScoreThreshold?: boolean
}
const maxTopK = (() => {
const configValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-top-k-max-value') || '', 10)
if (configValue && !isNaN(configValue))
return configValue
return 10
})()
const maxTopK = env.NEXT_PUBLIC_TOP_K_MAX_VALUE
const TOP_K_VALUE_LIMIT = {
amount: 1,
min: 1,

View File

@ -1,6 +1,6 @@
import type { ValidationError } from 'jsonschema'
import type { ArrayItems, Field, LLMNodeType } from './types'
import { z } from 'zod'
import * as z from 'zod'
import { draft07Validator, forbidBooleanProperties } from '@/utils/validators'
import { ArrayType, Type } from './types'

View File

@ -1,7 +1,3 @@
import {
RiClipboardLine,
RiCloseLine,
} from '@remixicon/react'
import copy from 'copy-to-clipboard'
import {
memo,
@ -115,9 +111,9 @@ const WorkflowPreview = () => {
onMouseDown={startResizing}
/>
<div className="flex items-center justify-between p-4 pb-1 text-base font-semibold text-text-primary">
{`Test Run${formatWorkflowRunIdentifier(workflowRunningData?.result.finished_at)}`}
{`Test Run${formatWorkflowRunIdentifier(workflowRunningData?.result.finished_at, workflowRunningData?.result.status)}`}
<div className="cursor-pointer p-1" onClick={() => handleCancelDebugAndPreviewPanel()}>
<RiCloseLine className="h-4 w-4 text-text-tertiary" />
<span className="i-ri-close-line h-4 w-4 text-text-tertiary" />
</div>
</div>
<div className="relative flex grow flex-col">
@ -217,7 +213,7 @@ const WorkflowPreview = () => {
Toast.notify({ type: 'success', message: t('actionMsg.copySuccessfully', { ns: 'common' }) })
}}
>
<RiClipboardLine className="h-3.5 w-3.5" />
<span className="i-ri-clipboard-line h-3.5 w-3.5" />
<div>{t('operation.copy', { ns: 'common' })}</div>
</Button>
)}

View File

@ -18,25 +18,25 @@ const StatusContainer: FC<Props> = ({
return (
<div
className={cn(
'system-xs-regular relative break-all rounded-lg border px-3 py-2.5',
'relative break-all rounded-lg border px-3 py-2.5 system-xs-regular',
status === 'succeeded' && 'border-[rgba(23,178,106,0.8)] bg-workflow-display-success-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-success.svg)] text-text-success',
status === 'succeeded' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(23,178,106,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
status === 'succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
status === 'partial-succeeded' && 'border-[rgba(23,178,106,0.8)] bg-workflow-display-success-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-success.svg)] text-text-success',
status === 'partial-succeeded' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(23,178,106,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'partial-succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
status === 'partial-succeeded' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(23,178,106,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
status === 'failed' && 'border-[rgba(240,68,56,0.8)] bg-workflow-display-error-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-error.svg)] text-text-warning',
status === 'failed' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(240,68,56,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'failed' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(240,68,56,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
status === 'failed' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(240,68,56,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
(status === 'stopped' || status === 'paused') && 'border-[rgba(247,144,9,0.8)] bg-workflow-display-warning-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-warning.svg)] text-text-destructive',
(status === 'stopped' || status === 'paused') && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(247,144,9,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
(status === 'stopped' || status === 'paused') && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
(status === 'stopped' || status === 'paused') && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
status === 'exception' && 'border-[rgba(247,144,9,0.8)] bg-workflow-display-warning-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-warning.svg)] text-text-destructive',
status === 'exception' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(247,144,9,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'exception' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
status === 'exception' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(247,144,9,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
status === 'running' && 'border-[rgba(11,165,236,0.8)] bg-workflow-display-normal-bg bg-[url(~@/app/components/workflow/run/assets/bg-line-running.svg)] text-util-colors-blue-light-blue-light-600',
status === 'running' && theme === Theme.light && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.5),inset_0_1px_3px_0_rgba(0,0,0,0.12),inset_0_2px_24px_0_rgba(11,165,236,0.2),0_1px_2px_0_rgba(9,9,11,0.05),0_0_0_1px_rgba(0,0,0,0.05)]',
status === 'running' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(11,165,236,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24, 24, 27, 0.95)]',
status === 'running' && theme === Theme.dark && 'shadow-[inset_2px_2px_0_0_rgba(255,255,255,0.12),inset_0_1px_3px_0_rgba(0,0,0,0.4),inset_0_2px_24px_0_rgba(11,165,236,0.25),0_1px_2px_0_rgba(0,0,0,0.1),0_0_0_1px_rgba(24,24,27,0.95)]',
)}
>
<div className={cn(

View File

@ -41,8 +41,10 @@ export const isEventTargetInputArea = (target: HTMLElement) => {
* @returns Formatted string like " (14:30:25)" or " (Running)"
*/
export const formatWorkflowRunIdentifier = (finishedAt?: number, fallbackText = 'Running'): string => {
if (!finishedAt)
return ` (${fallbackText})`
if (!finishedAt) {
const capitalized = fallbackText.charAt(0).toUpperCase() + fallbackText.slice(1)
return ` (${capitalized})`
}
const date = new Date(finishedAt * 1000)
const timeStr = date.toLocaleTimeString([], {

View File

@ -1,4 +1,4 @@
import { z } from 'zod'
import * as z from 'zod'
const arrayStringSchemaParttern = z.array(z.string())
const arrayNumberSchemaParttern = z.array(z.number())
@ -7,7 +7,7 @@ const arrayNumberSchemaParttern = z.array(z.number())
const literalSchema = z.union([z.string(), z.number(), z.boolean(), z.null()])
type Literal = z.infer<typeof literalSchema>
type Json = Literal | { [key: string]: Json } | Json[]
const jsonSchema: z.ZodType<Json> = z.lazy(() => z.union([literalSchema, z.array(jsonSchema), z.record(jsonSchema)]))
const jsonSchema: z.ZodType<Json> = z.lazy(() => z.union([literalSchema, z.array(jsonSchema), z.record(z.string(), jsonSchema)]))
const arrayJsonSchema: z.ZodType<Json[]> = z.lazy(() => z.array(jsonSchema))
export const validateJSONSchema = (schema: any, type: string) => {

View File

@ -7,7 +7,7 @@ import { useRouter } from 'next/navigation'
import * as React from 'react'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { z } from 'zod'
import * as z from 'zod'
import Button from '@/app/components/base/button'
import { formContext, useAppForm } from '@/app/components/base/form'
import { zodSubmitValidator } from '@/app/components/base/form/utils/zod-submit-validator'
@ -22,10 +22,10 @@ import Input from '../components/base/input'
import Loading from '../components/base/loading'
const accountFormSchema = z.object({
email: z
.string()
.min(1, { message: 'error.emailInValid' })
.email('error.emailInValid'),
email: z.email('error.emailInValid')
.min(1, {
error: 'error.emailInValid',
}),
})
const ForgotPasswordForm = () => {

Some files were not shown because too many files have changed in this diff Show More