mirror of
https://github.com/langgenius/dify.git
synced 2026-02-12 06:15:46 +08:00
Compare commits
27 Commits
test/tool-
...
1.13.0
| Author | SHA1 | Date | |
|---|---|---|---|
| c730fec1e4 | |||
| b4fec9b7aa | |||
| 7e0bccbbf0 | |||
| 2f87ecc0ce | |||
| 5b4c7b2a40 | |||
| 378a1d7d08 | |||
| ce0192620d | |||
| e9feeedc01 | |||
| e32490f54e | |||
| e9db50f781 | |||
| 0310f631ee | |||
| abc5a61e98 | |||
| 5f1698add6 | |||
| 36e50f277f | |||
| 704ee40caa | |||
| 3119c99979 | |||
| 16b8733886 | |||
| 83f64104fd | |||
| 5077879886 | |||
| 697b57631a | |||
| 6015f23e79 | |||
| f355c8d595 | |||
| 0142001fc2 | |||
| 4058e9ae23 | |||
| 95310561ec | |||
| de33561a52 | |||
| 6d9665578b |
@ -715,6 +715,7 @@ ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
# Sandbox expired records clean configuration
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000
|
||||
|
||||
|
||||
2
api/.vscode/launch.json.example
vendored
2
api/.vscode/launch.json.example
vendored
@ -54,7 +54,7 @@
|
||||
"--loglevel",
|
||||
"DEBUG",
|
||||
"-Q",
|
||||
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
|
||||
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,workflow_based_app_execution,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@ -1344,6 +1344,10 @@ class SandboxExpiredRecordsCleanConfig(BaseSettings):
|
||||
description="Maximum number of records to process in each batch",
|
||||
default=1000,
|
||||
)
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: PositiveInt = Field(
|
||||
description="Maximum interval in milliseconds between batches",
|
||||
default=200,
|
||||
)
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: PositiveInt = Field(
|
||||
description="Retention days for sandbox expired workflow_run records and message records",
|
||||
default=30,
|
||||
|
||||
@ -259,11 +259,20 @@ class CeleryConfig(DatabaseConfig):
|
||||
description="Password of the Redis Sentinel master.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||
description="Timeout for Redis Sentinel socket operations in seconds.",
|
||||
default=0.1,
|
||||
)
|
||||
|
||||
CELERY_TASK_ANNOTATIONS: dict[str, Any] | None = Field(
|
||||
description=(
|
||||
"Annotations for Celery tasks as a JSON mapping of task name -> options "
|
||||
"(for example, rate limits or other task-specific settings)."
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
|
||||
@computed_field
|
||||
def CELERY_RESULT_BACKEND(self) -> str | None:
|
||||
if self.CELERY_BACKEND in ("database", "rabbitmq"):
|
||||
|
||||
@ -21,6 +21,7 @@ language_timezone_mapping = {
|
||||
"th-TH": "Asia/Bangkok",
|
||||
"id-ID": "Asia/Jakarta",
|
||||
"ar-TN": "Africa/Tunis",
|
||||
"nl-NL": "Europe/Amsterdam",
|
||||
}
|
||||
|
||||
languages = list(language_timezone_mapping.keys())
|
||||
|
||||
@ -599,7 +599,12 @@ def _get_conversation(app_model, conversation_id):
|
||||
db.session.execute(
|
||||
sa.update(Conversation)
|
||||
.where(Conversation.id == conversation_id, Conversation.read_at.is_(None))
|
||||
.values(read_at=naive_utc_now(), read_account_id=current_user.id)
|
||||
# Keep updated_at unchanged when only marking a conversation as read.
|
||||
.values(
|
||||
read_at=naive_utc_now(),
|
||||
read_account_id=current_user.id,
|
||||
updated_at=Conversation.updated_at,
|
||||
)
|
||||
)
|
||||
db.session.commit()
|
||||
db.session.refresh(conversation)
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import urllib.parse
|
||||
|
||||
import httpx
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
import services
|
||||
@ -10,12 +11,12 @@ from controllers.common.errors import (
|
||||
RemoteFileUploadError,
|
||||
UnsupportedFileTypeError,
|
||||
)
|
||||
from controllers.fastopenapi import console_router
|
||||
from controllers.console import console_ns
|
||||
from core.file import helpers as file_helpers
|
||||
from core.helper import ssrf_proxy
|
||||
from extensions.ext_database import db
|
||||
from fields.file_fields import FileWithSignedUrl, RemoteFileInfo
|
||||
from libs.login import current_account_with_tenant
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from services.file_service import FileService
|
||||
|
||||
|
||||
@ -23,69 +24,73 @@ class RemoteFileUploadPayload(BaseModel):
|
||||
url: str = Field(..., description="URL to fetch")
|
||||
|
||||
|
||||
@console_router.get(
|
||||
"/remote-files/<path:url>",
|
||||
response_model=RemoteFileInfo,
|
||||
tags=["console"],
|
||||
)
|
||||
def get_remote_file_info(url: str) -> RemoteFileInfo:
|
||||
decoded_url = urllib.parse.unquote(url)
|
||||
resp = ssrf_proxy.head(decoded_url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(decoded_url, timeout=3)
|
||||
resp.raise_for_status()
|
||||
return RemoteFileInfo(
|
||||
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
|
||||
file_length=int(resp.headers.get("Content-Length", 0)),
|
||||
)
|
||||
|
||||
|
||||
@console_router.post(
|
||||
"/remote-files/upload",
|
||||
response_model=FileWithSignedUrl,
|
||||
tags=["console"],
|
||||
status_code=201,
|
||||
)
|
||||
def upload_remote_file(payload: RemoteFileUploadPayload) -> FileWithSignedUrl:
|
||||
url = payload.url
|
||||
|
||||
try:
|
||||
resp = ssrf_proxy.head(url=url)
|
||||
@console_ns.route("/remote-files/<path:url>")
|
||||
class GetRemoteFileInfo(Resource):
|
||||
@login_required
|
||||
def get(self, url: str):
|
||||
decoded_url = urllib.parse.unquote(url)
|
||||
resp = ssrf_proxy.head(decoded_url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
|
||||
except httpx.RequestError as e:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
|
||||
resp = ssrf_proxy.get(decoded_url, timeout=3)
|
||||
resp.raise_for_status()
|
||||
return RemoteFileInfo(
|
||||
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
|
||||
file_length=int(resp.headers.get("Content-Length", 0)),
|
||||
).model_dump(mode="json")
|
||||
|
||||
file_info = helpers.guess_file_info_from_response(resp)
|
||||
|
||||
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
|
||||
raise FileTooLargeError
|
||||
@console_ns.route("/remote-files/upload")
|
||||
class RemoteFileUpload(Resource):
|
||||
@login_required
|
||||
def post(self):
|
||||
payload = RemoteFileUploadPayload.model_validate(console_ns.payload)
|
||||
url = payload.url
|
||||
|
||||
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
|
||||
# Try to fetch remote file metadata/content first
|
||||
try:
|
||||
resp = ssrf_proxy.head(url=url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
# Normalize into a user-friendly error message expected by tests
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
|
||||
except httpx.RequestError as e:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
|
||||
|
||||
try:
|
||||
user, _ = current_account_with_tenant()
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file_info.filename,
|
||||
content=content,
|
||||
mimetype=file_info.mimetype,
|
||||
user=user,
|
||||
source_url=url,
|
||||
file_info = helpers.guess_file_info_from_response(resp)
|
||||
|
||||
# Enforce file size limit with 400 (Bad Request) per tests' expectation
|
||||
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
|
||||
raise FileTooLargeError()
|
||||
|
||||
# Load content if needed
|
||||
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
|
||||
|
||||
try:
|
||||
user, _ = current_account_with_tenant()
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file_info.filename,
|
||||
content=content,
|
||||
mimetype=file_info.mimetype,
|
||||
user=user,
|
||||
source_url=url,
|
||||
)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
# Success: return created resource with 201 status
|
||||
return (
|
||||
FileWithSignedUrl(
|
||||
id=upload_file.id,
|
||||
name=upload_file.name,
|
||||
size=upload_file.size,
|
||||
extension=upload_file.extension,
|
||||
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
|
||||
mime_type=upload_file.mime_type,
|
||||
created_by=upload_file.created_by,
|
||||
created_at=int(upload_file.created_at.timestamp()),
|
||||
).model_dump(mode="json"),
|
||||
201,
|
||||
)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
return FileWithSignedUrl(
|
||||
id=upload_file.id,
|
||||
name=upload_file.name,
|
||||
size=upload_file.size,
|
||||
extension=upload_file.extension,
|
||||
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
|
||||
mime_type=upload_file.mime_type,
|
||||
created_by=upload_file.created_by,
|
||||
created_at=int(upload_file.created_at.timestamp()),
|
||||
)
|
||||
|
||||
@ -42,7 +42,15 @@ class SetupResponse(BaseModel):
|
||||
tags=["console"],
|
||||
)
|
||||
def get_setup_status_api() -> SetupStatusResponse:
|
||||
"""Get system setup status."""
|
||||
"""Get system setup status.
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design.
|
||||
|
||||
During first-time bootstrap there is no admin account yet, so frontend initialization must be
|
||||
able to query setup progress before any login flow exists.
|
||||
|
||||
Only bootstrap-safe status information should be returned by this endpoint.
|
||||
"""
|
||||
if dify_config.EDITION == "SELF_HOSTED":
|
||||
setup_status = get_setup_status()
|
||||
if setup_status and not isinstance(setup_status, bool):
|
||||
@ -61,7 +69,12 @@ def get_setup_status_api() -> SetupStatusResponse:
|
||||
)
|
||||
@only_edition_self_hosted
|
||||
def setup_system(payload: SetupRequestPayload) -> SetupResponse:
|
||||
"""Initialize system setup with admin account."""
|
||||
"""Initialize system setup with admin account.
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design for first-time bootstrap.
|
||||
Access is restricted by deployment mode (`SELF_HOSTED`), one-time setup guards,
|
||||
and init-password validation rather than user session authentication.
|
||||
"""
|
||||
if get_setup_status():
|
||||
raise AlreadySetupError()
|
||||
|
||||
|
||||
@ -34,7 +34,7 @@ def stream_topic_events(
|
||||
on_subscribe()
|
||||
while True:
|
||||
try:
|
||||
msg = sub.receive(timeout=0.1)
|
||||
msg = sub.receive(timeout=1)
|
||||
except SubscriptionClosedError:
|
||||
return
|
||||
if msg is None:
|
||||
|
||||
@ -45,6 +45,8 @@ from core.app.entities.task_entities import (
|
||||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||
from core.app.task_pipeline.message_cycle_manager import MessageCycleManager
|
||||
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||
from core.file import helpers as file_helpers
|
||||
from core.file.enums import FileTransferMethod
|
||||
from core.model_manager import ModelInstance
|
||||
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
|
||||
from core.model_runtime.entities.message_entities import (
|
||||
@ -56,10 +58,11 @@ from core.ops.entities.trace_entity import TraceTaskName
|
||||
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
|
||||
from core.prompt.utils.prompt_message_util import PromptMessageUtil
|
||||
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
|
||||
from core.tools.signature import sign_tool_file
|
||||
from events.message_event import message_was_created
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.model import AppMode, Conversation, Message, MessageAgentThought
|
||||
from models.model import AppMode, Conversation, Message, MessageAgentThought, MessageFile, UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -463,6 +466,85 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
||||
metadata=metadata_dict,
|
||||
)
|
||||
|
||||
def _record_files(self):
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
message_files = session.scalars(select(MessageFile).where(MessageFile.message_id == self._message_id)).all()
|
||||
if not message_files:
|
||||
return None
|
||||
|
||||
files_list = []
|
||||
upload_file_ids = [
|
||||
mf.upload_file_id
|
||||
for mf in message_files
|
||||
if mf.transfer_method == FileTransferMethod.LOCAL_FILE and mf.upload_file_id
|
||||
]
|
||||
upload_files_map = {}
|
||||
if upload_file_ids:
|
||||
upload_files = session.scalars(select(UploadFile).where(UploadFile.id.in_(upload_file_ids))).all()
|
||||
upload_files_map = {uf.id: uf for uf in upload_files}
|
||||
|
||||
for message_file in message_files:
|
||||
upload_file = None
|
||||
if message_file.transfer_method == FileTransferMethod.LOCAL_FILE and message_file.upload_file_id:
|
||||
upload_file = upload_files_map.get(message_file.upload_file_id)
|
||||
|
||||
url = None
|
||||
filename = "file"
|
||||
mime_type = "application/octet-stream"
|
||||
size = 0
|
||||
extension = ""
|
||||
|
||||
if message_file.transfer_method == FileTransferMethod.REMOTE_URL:
|
||||
url = message_file.url
|
||||
if message_file.url:
|
||||
filename = message_file.url.split("/")[-1].split("?")[0] # Remove query params
|
||||
elif message_file.transfer_method == FileTransferMethod.LOCAL_FILE:
|
||||
if upload_file:
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(upload_file.id))
|
||||
filename = upload_file.name
|
||||
mime_type = upload_file.mime_type or "application/octet-stream"
|
||||
size = upload_file.size or 0
|
||||
extension = f".{upload_file.extension}" if upload_file.extension else ""
|
||||
elif message_file.upload_file_id:
|
||||
# Fallback: generate URL even if upload_file not found
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(message_file.upload_file_id))
|
||||
elif message_file.transfer_method == FileTransferMethod.TOOL_FILE and message_file.url:
|
||||
# For tool files, use URL directly if it's HTTP, otherwise sign it
|
||||
if message_file.url.startswith("http"):
|
||||
url = message_file.url
|
||||
filename = message_file.url.split("/")[-1].split("?")[0]
|
||||
else:
|
||||
# Extract tool file id and extension from URL
|
||||
url_parts = message_file.url.split("/")
|
||||
if url_parts:
|
||||
file_part = url_parts[-1].split("?")[0] # Remove query params first
|
||||
# Use rsplit to correctly handle filenames with multiple dots
|
||||
if "." in file_part:
|
||||
tool_file_id, ext = file_part.rsplit(".", 1)
|
||||
extension = f".{ext}"
|
||||
else:
|
||||
tool_file_id = file_part
|
||||
extension = ".bin"
|
||||
url = sign_tool_file(tool_file_id=tool_file_id, extension=extension)
|
||||
filename = file_part
|
||||
|
||||
transfer_method_value = message_file.transfer_method
|
||||
remote_url = message_file.url if message_file.transfer_method == FileTransferMethod.REMOTE_URL else ""
|
||||
file_dict = {
|
||||
"related_id": message_file.id,
|
||||
"extension": extension,
|
||||
"filename": filename,
|
||||
"size": size,
|
||||
"mime_type": mime_type,
|
||||
"transfer_method": transfer_method_value,
|
||||
"type": message_file.type,
|
||||
"url": url or "",
|
||||
"upload_file_id": message_file.upload_file_id or message_file.id,
|
||||
"remote_url": remote_url,
|
||||
}
|
||||
files_list.append(file_dict)
|
||||
return files_list or None
|
||||
|
||||
def _agent_message_to_stream_response(self, answer: str, message_id: str) -> AgentMessageStreamResponse:
|
||||
"""
|
||||
Agent message to stream response.
|
||||
|
||||
@ -64,7 +64,13 @@ class MessageCycleManager:
|
||||
|
||||
# Use SQLAlchemy 2.x style session.scalar(select(...))
|
||||
with session_factory.create_session() as session:
|
||||
message_file = session.scalar(select(MessageFile).where(MessageFile.message_id == message_id))
|
||||
message_file = session.scalar(
|
||||
select(MessageFile)
|
||||
.where(
|
||||
MessageFile.message_id == message_id,
|
||||
)
|
||||
.where(MessageFile.belongs_to == "assistant")
|
||||
)
|
||||
|
||||
if message_file:
|
||||
self._message_has_file.add(message_id)
|
||||
|
||||
@ -80,8 +80,14 @@ def init_app(app: DifyApp) -> Celery:
|
||||
worker_hijack_root_logger=False,
|
||||
timezone=pytz.timezone(dify_config.LOG_TZ or "UTC"),
|
||||
task_ignore_result=True,
|
||||
task_annotations=dify_config.CELERY_TASK_ANNOTATIONS,
|
||||
)
|
||||
|
||||
if dify_config.CELERY_BACKEND == "redis":
|
||||
celery_app.conf.update(
|
||||
result_backend_transport_options=broker_transport_options,
|
||||
)
|
||||
|
||||
# Apply SSL configuration if enabled
|
||||
ssl_options = _get_celery_ssl_options()
|
||||
if ssl_options:
|
||||
|
||||
@ -119,7 +119,7 @@ class RedisClientWrapper:
|
||||
|
||||
|
||||
redis_client: RedisClientWrapper = RedisClientWrapper()
|
||||
pubsub_redis_client: RedisClientWrapper = RedisClientWrapper()
|
||||
_pubsub_redis_client: redis.Redis | RedisCluster | None = None
|
||||
|
||||
|
||||
def _get_ssl_configuration() -> tuple[type[Union[Connection, SSLConnection]], dict[str, Any]]:
|
||||
@ -232,7 +232,7 @@ def _create_standalone_client(redis_params: dict[str, Any]) -> Union[redis.Redis
|
||||
return client
|
||||
|
||||
|
||||
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> Union[redis.Redis, RedisCluster]:
|
||||
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> redis.Redis | RedisCluster:
|
||||
if use_clusters:
|
||||
return RedisCluster.from_url(pubsub_url)
|
||||
return redis.Redis.from_url(pubsub_url)
|
||||
@ -256,23 +256,19 @@ def init_app(app: DifyApp):
|
||||
redis_client.initialize(client)
|
||||
app.extensions["redis"] = redis_client
|
||||
|
||||
pubsub_client = client
|
||||
global _pubsub_redis_client
|
||||
_pubsub_redis_client = client
|
||||
if dify_config.normalized_pubsub_redis_url:
|
||||
pubsub_client = _create_pubsub_client(
|
||||
_pubsub_redis_client = _create_pubsub_client(
|
||||
dify_config.normalized_pubsub_redis_url, dify_config.PUBSUB_REDIS_USE_CLUSTERS
|
||||
)
|
||||
pubsub_redis_client.initialize(pubsub_client)
|
||||
|
||||
|
||||
def get_pubsub_redis_client() -> RedisClientWrapper:
|
||||
return pubsub_redis_client
|
||||
|
||||
|
||||
def get_pubsub_broadcast_channel() -> BroadcastChannelProtocol:
|
||||
redis_conn = get_pubsub_redis_client()
|
||||
assert _pubsub_redis_client is not None, "PubSub redis Client should be initialized here."
|
||||
if dify_config.PUBSUB_REDIS_CHANNEL_TYPE == "sharded":
|
||||
return ShardedRedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
|
||||
return RedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
|
||||
return ShardedRedisBroadcastChannel(_pubsub_redis_client)
|
||||
return RedisBroadcastChannel(_pubsub_redis_client)
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
|
||||
@ -152,7 +152,7 @@ class RedisSubscriptionBase(Subscription):
|
||||
"""Iterator for consuming messages from the subscription."""
|
||||
while not self._closed.is_set():
|
||||
try:
|
||||
item = self._queue.get(timeout=0.1)
|
||||
item = self._queue.get(timeout=1)
|
||||
except queue.Empty:
|
||||
continue
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from libs.broadcast_channel.channel import Producer, Subscriber, Subscription
|
||||
from redis import Redis
|
||||
from redis import Redis, RedisCluster
|
||||
|
||||
from ._subscription import RedisSubscriptionBase
|
||||
|
||||
@ -18,7 +18,7 @@ class BroadcastChannel:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
redis_client: Redis,
|
||||
redis_client: Redis | RedisCluster,
|
||||
):
|
||||
self._client = redis_client
|
||||
|
||||
@ -27,7 +27,7 @@ class BroadcastChannel:
|
||||
|
||||
|
||||
class Topic:
|
||||
def __init__(self, redis_client: Redis, topic: str):
|
||||
def __init__(self, redis_client: Redis | RedisCluster, topic: str):
|
||||
self._client = redis_client
|
||||
self._topic = topic
|
||||
|
||||
|
||||
@ -70,8 +70,9 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
|
||||
# Since we have already filtered at the caller's site, we can safely set
|
||||
# `ignore_subscribe_messages=False`.
|
||||
if isinstance(self._client, RedisCluster):
|
||||
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message`
|
||||
# would use busy-looping to wait for incoming message, consuming excessive CPU quota.
|
||||
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message` without
|
||||
# specifying the `target_node` argument would use busy-looping to wait
|
||||
# for incoming message, consuming excessive CPU quota.
|
||||
#
|
||||
# Here we specify the `target_node` to mitigate this problem.
|
||||
node = self._client.get_node_from_key(self._topic)
|
||||
@ -80,8 +81,10 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
|
||||
timeout=1,
|
||||
target_node=node,
|
||||
)
|
||||
else:
|
||||
elif isinstance(self._client, Redis):
|
||||
return self._pubsub.get_sharded_message(ignore_subscribe_messages=False, timeout=1) # type: ignore[attr-defined]
|
||||
else:
|
||||
raise AssertionError("client should be either Redis or RedisCluster.")
|
||||
|
||||
def _get_message_type(self) -> str:
|
||||
return "smessage"
|
||||
|
||||
@ -0,0 +1,59 @@
|
||||
"""add unique constraint to tenant_default_models
|
||||
|
||||
Revision ID: fix_tenant_default_model_unique
|
||||
Revises: 9d77545f524e
|
||||
Create Date: 2026-01-19 15:07:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f55813ffe2c8'
|
||||
down_revision = 'c3df22613c99'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# First, remove duplicate records keeping only the most recent one per (tenant_id, model_type)
|
||||
# This is necessary before adding the unique constraint
|
||||
conn = op.get_bind()
|
||||
|
||||
# Delete duplicates: keep the record with the latest updated_at for each (tenant_id, model_type)
|
||||
# If updated_at is the same, keep the one with the largest id as tiebreaker
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Use DISTINCT ON for efficient deduplication
|
||||
conn.execute(sa.text("""
|
||||
DELETE FROM tenant_default_models
|
||||
WHERE id NOT IN (
|
||||
SELECT DISTINCT ON (tenant_id, model_type) id
|
||||
FROM tenant_default_models
|
||||
ORDER BY tenant_id, model_type, updated_at DESC, id DESC
|
||||
)
|
||||
"""))
|
||||
else:
|
||||
# MySQL: Use self-join to find and delete duplicates
|
||||
# Keep the record with latest updated_at (or largest id if updated_at is equal)
|
||||
conn.execute(sa.text("""
|
||||
DELETE t1 FROM tenant_default_models t1
|
||||
INNER JOIN tenant_default_models t2
|
||||
ON t1.tenant_id = t2.tenant_id
|
||||
AND t1.model_type = t2.model_type
|
||||
AND (t1.updated_at < t2.updated_at
|
||||
OR (t1.updated_at = t2.updated_at AND t1.id < t2.id))
|
||||
"""))
|
||||
|
||||
# Now add the unique constraint
|
||||
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
|
||||
batch_op.create_unique_constraint('unique_tenant_default_model_type', ['tenant_id', 'model_type'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
|
||||
batch_op.drop_constraint('unique_tenant_default_model_type', type_='unique')
|
||||
@ -0,0 +1,39 @@
|
||||
"""fix index to optimize message clean job performance
|
||||
|
||||
Revision ID: fce013ca180e
|
||||
Revises: f55813ffe2c8
|
||||
Create Date: 2026-02-11 15:49:17.603638
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'fce013ca180e'
|
||||
down_revision = 'f55813ffe2c8'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('message_created_at_idx'))
|
||||
|
||||
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
|
||||
batch_op.create_index('saved_message_message_id_idx', ['message_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
|
||||
batch_op.drop_index('saved_message_message_id_idx')
|
||||
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('message_created_at_idx'), ['created_at'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@ -227,7 +227,7 @@ class App(Base):
|
||||
with Session(db.engine) as session:
|
||||
if api_provider_ids:
|
||||
existing_api_providers = [
|
||||
api_provider.id
|
||||
str(api_provider.id)
|
||||
for api_provider in session.execute(
|
||||
text("SELECT id FROM tool_api_providers WHERE id IN :provider_ids"),
|
||||
{"provider_ids": tuple(api_provider_ids)},
|
||||
@ -1040,7 +1040,6 @@ class Message(Base):
|
||||
Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"),
|
||||
Index("message_account_idx", "app_id", "from_source", "from_account_id"),
|
||||
Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
|
||||
Index("message_created_at_idx", "created_at"),
|
||||
Index("message_app_mode_idx", "app_mode"),
|
||||
Index("message_created_at_id_idx", "created_at", "id"),
|
||||
)
|
||||
|
||||
@ -181,6 +181,7 @@ class TenantDefaultModel(TypeBase):
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tenant_default_model_pkey"),
|
||||
sa.Index("tenant_default_model_tenant_id_provider_type_idx", "tenant_id", "provider_name", "model_type"),
|
||||
sa.UniqueConstraint("tenant_id", "model_type", name="unique_tenant_default_model_type"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
|
||||
@ -16,6 +16,7 @@ class SavedMessage(TypeBase):
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="saved_message_pkey"),
|
||||
sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
|
||||
sa.Index("saved_message_message_id_idx", "message_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.12.1"
|
||||
version = "1.13.0"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
@ -23,7 +23,7 @@ dependencies = [
|
||||
"gevent~=25.9.1",
|
||||
"gmpy2~=2.2.1",
|
||||
"google-api-core==2.18.0",
|
||||
"google-api-python-client==2.90.0",
|
||||
"google-api-python-client==2.189.0",
|
||||
"google-auth==2.29.0",
|
||||
"google-auth-httplib2==0.2.0",
|
||||
"google-cloud-aiplatform==1.49.0",
|
||||
|
||||
@ -22,7 +22,7 @@ from libs.exception import BaseHTTPException
|
||||
from models.human_input import RecipientType
|
||||
from models.model import App, AppMode
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE, resume_app_execution
|
||||
from tasks.app_generate.workflow_execute_task import resume_app_execution
|
||||
|
||||
|
||||
class Form:
|
||||
@ -230,7 +230,6 @@ class HumanInputService:
|
||||
try:
|
||||
resume_app_execution.apply_async(
|
||||
kwargs={"payload": payload},
|
||||
queue=WORKFLOW_BASED_APP_EXECUTION_QUEUE,
|
||||
)
|
||||
except Exception: # pragma: no cover
|
||||
logger.exception("Failed to enqueue resume task for workflow run %s", workflow_run_id)
|
||||
|
||||
@ -1,10 +1,13 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from typing import cast
|
||||
|
||||
from sqlalchemy import delete, select
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import delete, select, tuple_
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@ -193,11 +196,15 @@ class MessagesCleanService:
|
||||
self._end_before,
|
||||
)
|
||||
|
||||
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
|
||||
|
||||
while True:
|
||||
stats["batches"] += 1
|
||||
batch_start = time.monotonic()
|
||||
|
||||
# Step 1: Fetch a batch of messages using cursor
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
fetch_messages_start = time.monotonic()
|
||||
msg_stmt = (
|
||||
select(Message.id, Message.app_id, Message.created_at)
|
||||
.where(Message.created_at < self._end_before)
|
||||
@ -209,13 +216,13 @@ class MessagesCleanService:
|
||||
msg_stmt = msg_stmt.where(Message.created_at >= self._start_from)
|
||||
|
||||
# Apply cursor condition: (created_at, id) > (last_created_at, last_message_id)
|
||||
# This translates to:
|
||||
# created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id)
|
||||
if _cursor:
|
||||
# Continuing from previous batch
|
||||
msg_stmt = msg_stmt.where(
|
||||
(Message.created_at > _cursor[0])
|
||||
| ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1]))
|
||||
tuple_(Message.created_at, Message.id)
|
||||
> tuple_(
|
||||
sa.literal(_cursor[0], type_=sa.DateTime()),
|
||||
sa.literal(_cursor[1], type_=Message.id.type),
|
||||
)
|
||||
)
|
||||
|
||||
raw_messages = list(session.execute(msg_stmt).all())
|
||||
@ -223,6 +230,12 @@ class MessagesCleanService:
|
||||
SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at)
|
||||
for msg_id, app_id, msg_created_at in raw_messages
|
||||
]
|
||||
logger.info(
|
||||
"clean_messages (batch %s): fetched %s messages in %sms",
|
||||
stats["batches"],
|
||||
len(messages),
|
||||
int((time.monotonic() - fetch_messages_start) * 1000),
|
||||
)
|
||||
|
||||
# Track total messages fetched across all batches
|
||||
stats["total_messages"] += len(messages)
|
||||
@ -241,8 +254,16 @@ class MessagesCleanService:
|
||||
logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
fetch_apps_start = time.monotonic()
|
||||
app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids))
|
||||
apps = list(session.execute(app_stmt).all())
|
||||
logger.info(
|
||||
"clean_messages (batch %s): fetched %s apps for %s app_ids in %sms",
|
||||
stats["batches"],
|
||||
len(apps),
|
||||
len(app_ids),
|
||||
int((time.monotonic() - fetch_apps_start) * 1000),
|
||||
)
|
||||
|
||||
if not apps:
|
||||
logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"])
|
||||
@ -252,7 +273,15 @@ class MessagesCleanService:
|
||||
app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps}
|
||||
|
||||
# Step 3: Delegate to policy to determine which messages to delete
|
||||
policy_start = time.monotonic()
|
||||
message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant)
|
||||
logger.info(
|
||||
"clean_messages (batch %s): policy selected %s/%s messages in %sms",
|
||||
stats["batches"],
|
||||
len(message_ids_to_delete),
|
||||
len(messages),
|
||||
int((time.monotonic() - policy_start) * 1000),
|
||||
)
|
||||
|
||||
if not message_ids_to_delete:
|
||||
logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"])
|
||||
@ -263,14 +292,20 @@ class MessagesCleanService:
|
||||
# Step 4: Batch delete messages and their relations
|
||||
if not self._dry_run:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
delete_relations_start = time.monotonic()
|
||||
# Delete related records first
|
||||
self._batch_delete_message_relations(session, message_ids_to_delete)
|
||||
delete_relations_ms = int((time.monotonic() - delete_relations_start) * 1000)
|
||||
|
||||
# Delete messages
|
||||
delete_messages_start = time.monotonic()
|
||||
delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete))
|
||||
delete_result = cast(CursorResult, session.execute(delete_stmt))
|
||||
messages_deleted = delete_result.rowcount
|
||||
delete_messages_ms = int((time.monotonic() - delete_messages_start) * 1000)
|
||||
commit_start = time.monotonic()
|
||||
session.commit()
|
||||
commit_ms = int((time.monotonic() - commit_start) * 1000)
|
||||
|
||||
stats["total_deleted"] += messages_deleted
|
||||
|
||||
@ -280,6 +315,19 @@ class MessagesCleanService:
|
||||
len(messages),
|
||||
messages_deleted,
|
||||
)
|
||||
logger.info(
|
||||
"clean_messages (batch %s): relations %sms, messages %sms, commit %sms, batch total %sms",
|
||||
stats["batches"],
|
||||
delete_relations_ms,
|
||||
delete_messages_ms,
|
||||
commit_ms,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
|
||||
# Random sleep between batches to avoid overwhelming the database
|
||||
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
|
||||
logger.info("clean_messages (batch %s): sleeping for %.2fms", stats["batches"], sleep_ms)
|
||||
time.sleep(sleep_ms / 1000)
|
||||
else:
|
||||
# Log random sample of message IDs that would be deleted (up to 10)
|
||||
sample_size = min(10, len(message_ids_to_delete))
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import time
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
import click
|
||||
@ -72,7 +75,12 @@ class WorkflowRunCleanup:
|
||||
batch_index = 0
|
||||
last_seen: tuple[datetime.datetime, str] | None = None
|
||||
|
||||
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
|
||||
|
||||
while True:
|
||||
batch_start = time.monotonic()
|
||||
|
||||
fetch_start = time.monotonic()
|
||||
run_rows = self.workflow_run_repo.get_runs_batch_by_time_range(
|
||||
start_from=self.window_start,
|
||||
end_before=self.window_end,
|
||||
@ -80,12 +88,30 @@ class WorkflowRunCleanup:
|
||||
batch_size=self.batch_size,
|
||||
)
|
||||
if not run_rows:
|
||||
logger.info("workflow_run_cleanup (batch #%s): no more rows to process", batch_index + 1)
|
||||
break
|
||||
|
||||
batch_index += 1
|
||||
last_seen = (run_rows[-1].created_at, run_rows[-1].id)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): fetched %s rows in %sms",
|
||||
batch_index,
|
||||
len(run_rows),
|
||||
int((time.monotonic() - fetch_start) * 1000),
|
||||
)
|
||||
|
||||
tenant_ids = {row.tenant_id for row in run_rows}
|
||||
|
||||
filter_start = time.monotonic()
|
||||
free_tenants = self._filter_free_tenants(tenant_ids)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): filtered %s free tenants from %s tenants in %sms",
|
||||
batch_index,
|
||||
len(free_tenants),
|
||||
len(tenant_ids),
|
||||
int((time.monotonic() - filter_start) * 1000),
|
||||
)
|
||||
|
||||
free_runs = [row for row in run_rows if row.tenant_id in free_tenants]
|
||||
paid_or_skipped = len(run_rows) - len(free_runs)
|
||||
|
||||
@ -104,11 +130,17 @@ class WorkflowRunCleanup:
|
||||
total_runs_targeted += len(free_runs)
|
||||
|
||||
if self.dry_run:
|
||||
count_start = time.monotonic()
|
||||
batch_counts = self.workflow_run_repo.count_runs_with_related(
|
||||
free_runs,
|
||||
count_node_executions=self._count_node_executions,
|
||||
count_trigger_logs=self._count_trigger_logs,
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s, dry_run): counted related records in %sms",
|
||||
batch_index,
|
||||
int((time.monotonic() - count_start) * 1000),
|
||||
)
|
||||
if related_totals is not None:
|
||||
for key in related_totals:
|
||||
related_totals[key] += batch_counts.get(key, 0)
|
||||
@ -120,14 +152,21 @@ class WorkflowRunCleanup:
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s, dry_run): batch total %sms",
|
||||
batch_index,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
delete_start = time.monotonic()
|
||||
counts = self.workflow_run_repo.delete_runs_with_related(
|
||||
free_runs,
|
||||
delete_node_executions=self._delete_node_executions,
|
||||
delete_trigger_logs=self._delete_trigger_logs,
|
||||
)
|
||||
delete_ms = int((time.monotonic() - delete_start) * 1000)
|
||||
except Exception:
|
||||
logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0])
|
||||
raise
|
||||
@ -143,6 +182,17 @@ class WorkflowRunCleanup:
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): delete %sms, batch total %sms",
|
||||
batch_index,
|
||||
delete_ms,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
|
||||
# Random sleep between batches to avoid overwhelming the database
|
||||
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
|
||||
logger.info("workflow_run_cleanup (batch #%s): sleeping for %.2fms", batch_index, sleep_ms)
|
||||
time.sleep(sleep_ms / 1000)
|
||||
|
||||
if self.dry_run:
|
||||
if self.window_start:
|
||||
|
||||
@ -129,15 +129,15 @@ def build_workflow_event_stream(
|
||||
return
|
||||
|
||||
try:
|
||||
event = buffer_state.queue.get(timeout=0.1)
|
||||
event = buffer_state.queue.get(timeout=1)
|
||||
except queue.Empty:
|
||||
current_time = time.time()
|
||||
if current_time - last_msg_time > idle_timeout:
|
||||
logger.debug(
|
||||
"No workflow events received for %s seconds, keeping stream open",
|
||||
"Idle timeout of %s seconds reached, closing workflow event stream.",
|
||||
idle_timeout,
|
||||
)
|
||||
last_msg_time = current_time
|
||||
return
|
||||
if current_time - last_ping_time >= ping_interval:
|
||||
yield StreamEvent.PING.value
|
||||
last_ping_time = current_time
|
||||
@ -405,7 +405,7 @@ def _start_buffering(subscription) -> BufferState:
|
||||
dropped_count = 0
|
||||
try:
|
||||
while not buffer_state.stop_event.is_set():
|
||||
msg = subscription.receive(timeout=0.1)
|
||||
msg = subscription.receive(timeout=1)
|
||||
if msg is None:
|
||||
continue
|
||||
event = _parse_event_message(msg)
|
||||
|
||||
@ -51,7 +51,7 @@ def _patch_redis_clients_on_loaded_modules():
|
||||
continue
|
||||
if hasattr(module, "redis_client"):
|
||||
module.redis_client = redis_mock
|
||||
if hasattr(module, "pubsub_redis_client"):
|
||||
if hasattr(module, "_pubsub_redis_client"):
|
||||
module.pubsub_redis_client = redis_mock
|
||||
|
||||
|
||||
@ -72,7 +72,7 @@ def _patch_redis_clients():
|
||||
|
||||
with (
|
||||
patch.object(ext_redis, "redis_client", redis_mock),
|
||||
patch.object(ext_redis, "pubsub_redis_client", redis_mock),
|
||||
patch.object(ext_redis, "_pubsub_redis_client", redis_mock),
|
||||
):
|
||||
_patch_redis_clients_on_loaded_modules()
|
||||
yield
|
||||
|
||||
@ -0,0 +1,34 @@
|
||||
from datetime import datetime
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from controllers.console.app.conversation import _get_conversation
|
||||
|
||||
|
||||
def test_get_conversation_mark_read_keeps_updated_at_unchanged():
|
||||
app_model = SimpleNamespace(id="app-id")
|
||||
account = SimpleNamespace(id="account-id")
|
||||
conversation = MagicMock()
|
||||
conversation.id = "conversation-id"
|
||||
|
||||
with (
|
||||
patch("controllers.console.app.conversation.current_account_with_tenant", return_value=(account, None)),
|
||||
patch("controllers.console.app.conversation.naive_utc_now", return_value=datetime(2026, 2, 9, 0, 0, 0)),
|
||||
patch("controllers.console.app.conversation.db.session") as mock_session,
|
||||
):
|
||||
mock_session.query.return_value.where.return_value.first.return_value = conversation
|
||||
|
||||
_get_conversation(app_model, "conversation-id")
|
||||
|
||||
statement = mock_session.execute.call_args[0][0]
|
||||
compiled = statement.compile()
|
||||
sql_text = str(compiled).lower()
|
||||
compact_sql_text = sql_text.replace(" ", "")
|
||||
params = compiled.params
|
||||
|
||||
assert "updated_at=current_timestamp" not in compact_sql_text
|
||||
assert "updated_at=conversations.updated_at" in compact_sql_text
|
||||
assert "read_at=:read_at" in compact_sql_text
|
||||
assert "read_account_id=:read_account_id" in compact_sql_text
|
||||
assert params["read_at"] == datetime(2026, 2, 9, 0, 0, 0)
|
||||
assert params["read_account_id"] == "account-id"
|
||||
@ -1,92 +1,286 @@
|
||||
import builtins
|
||||
"""Tests for remote file upload API endpoints using Flask-RESTX."""
|
||||
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from extensions import ext_fastopenapi
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
from flask import Flask, g
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app() -> Flask:
|
||||
"""Create Flask app for testing."""
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
app.config["SECRET_KEY"] = "test-secret-key"
|
||||
return app
|
||||
|
||||
|
||||
def test_console_remote_files_fastopenapi_get_info(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client with console blueprint registered."""
|
||||
from controllers.console import bp
|
||||
|
||||
response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.txt"),
|
||||
headers={"Content-Type": "text/plain", "Content-Length": "10"},
|
||||
)
|
||||
|
||||
with patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response):
|
||||
client = app.test_client()
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert resp.get_json() == {"file_type": "text/plain", "file_length": 10}
|
||||
app.register_blueprint(bp)
|
||||
return app.test_client()
|
||||
|
||||
|
||||
def test_console_remote_files_fastopenapi_upload(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
@pytest.fixture
|
||||
def mock_account():
|
||||
"""Create a mock account for testing."""
|
||||
from models import Account
|
||||
|
||||
head_response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", "http://example.com/file.txt"),
|
||||
content=b"hello",
|
||||
)
|
||||
file_info = SimpleNamespace(
|
||||
extension="txt",
|
||||
size=5,
|
||||
filename="file.txt",
|
||||
mimetype="text/plain",
|
||||
)
|
||||
uploaded = SimpleNamespace(
|
||||
id="file-id",
|
||||
name="file.txt",
|
||||
size=5,
|
||||
extension="txt",
|
||||
mime_type="text/plain",
|
||||
created_by="user-id",
|
||||
created_at=datetime(2024, 1, 1),
|
||||
)
|
||||
account = Mock(spec=Account)
|
||||
account.id = "test-account-id"
|
||||
account.current_tenant_id = "test-tenant-id"
|
||||
return account
|
||||
|
||||
with (
|
||||
patch("controllers.console.remote_files.db", new=SimpleNamespace(engine=object())),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
|
||||
patch("controllers.console.remote_files.helpers.guess_file_info_from_response", return_value=file_info),
|
||||
patch("controllers.console.remote_files.FileService.is_file_size_within_limit", return_value=True),
|
||||
patch("controllers.console.remote_files.FileService.__init__", return_value=None),
|
||||
patch("controllers.console.remote_files.current_account_with_tenant", return_value=(object(), "tenant-id")),
|
||||
patch("controllers.console.remote_files.FileService.upload_file", return_value=uploaded),
|
||||
patch("controllers.console.remote_files.file_helpers.get_signed_file_url", return_value="signed-url"),
|
||||
):
|
||||
client = app.test_client()
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": "http://example.com/file.txt"},
|
||||
|
||||
@pytest.fixture
|
||||
def auth_ctx(app, mock_account):
|
||||
"""Context manager to set auth/tenant context in flask.g for a request."""
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ctx():
|
||||
with app.test_request_context():
|
||||
g._login_user = mock_account
|
||||
g._current_tenant = mock_account.current_tenant_id
|
||||
yield
|
||||
|
||||
return _ctx
|
||||
|
||||
|
||||
class TestGetRemoteFileInfo:
|
||||
"""Test GET /console/api/remote-files/<path:url> endpoint."""
|
||||
|
||||
def test_get_remote_file_info_success(self, app, client, mock_account):
|
||||
"""Test successful retrieval of remote file info."""
|
||||
response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.txt"),
|
||||
headers={"Content-Type": "text/plain", "Content-Length": "1024"},
|
||||
)
|
||||
|
||||
assert resp.status_code == 201
|
||||
assert resp.get_json() == {
|
||||
"id": "file-id",
|
||||
"name": "file.txt",
|
||||
"size": 5,
|
||||
"extension": "txt",
|
||||
"url": "signed-url",
|
||||
"mime_type": "text/plain",
|
||||
"created_by": "user-id",
|
||||
"created_at": int(uploaded.created_at.timestamp()),
|
||||
}
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
with app.test_request_context():
|
||||
g._login_user = mock_account
|
||||
g._current_tenant = mock_account.current_tenant_id
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
assert resp.status_code == 200
|
||||
data = resp.get_json()
|
||||
assert data["file_type"] == "text/plain"
|
||||
assert data["file_length"] == 1024
|
||||
|
||||
def test_get_remote_file_info_fallback_to_get_on_head_failure(self, app, client, mock_account):
|
||||
"""Test fallback to GET when HEAD returns non-200 status."""
|
||||
head_response = httpx.Response(
|
||||
404,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.pdf"),
|
||||
)
|
||||
get_response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", "http://example.com/file.pdf"),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "2048"},
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_response),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
with app.test_request_context():
|
||||
g._login_user = mock_account
|
||||
g._current_tenant = mock_account.current_tenant_id
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.pdf"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
assert resp.status_code == 200
|
||||
data = resp.get_json()
|
||||
assert data["file_type"] == "application/pdf"
|
||||
assert data["file_length"] == 2048
|
||||
|
||||
|
||||
class TestRemoteFileUpload:
|
||||
"""Test POST /console/api/remote-files/upload endpoint."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("head_status", "use_get"),
|
||||
[
|
||||
(200, False), # HEAD succeeds
|
||||
(405, True), # HEAD fails -> fallback GET
|
||||
],
|
||||
)
|
||||
def test_upload_remote_file_success_paths(self, client, mock_account, auth_ctx, head_status, use_get):
|
||||
url = "http://example.com/file.pdf"
|
||||
head_resp = httpx.Response(
|
||||
head_status,
|
||||
request=httpx.Request("HEAD", url),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
|
||||
)
|
||||
get_resp = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", url),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
|
||||
content=b"file content",
|
||||
)
|
||||
|
||||
file_info = SimpleNamespace(
|
||||
extension="pdf",
|
||||
size=1024,
|
||||
filename="file.pdf",
|
||||
mimetype="application/pdf",
|
||||
)
|
||||
uploaded_file = SimpleNamespace(
|
||||
id="uploaded-file-id",
|
||||
name="file.pdf",
|
||||
size=1024,
|
||||
extension="pdf",
|
||||
mime_type="application/pdf",
|
||||
created_by="test-account-id",
|
||||
created_at=datetime(2024, 1, 1, 12, 0, 0),
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp) as p_head,
|
||||
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_resp) as p_get,
|
||||
patch(
|
||||
"controllers.console.remote_files.helpers.guess_file_info_from_response",
|
||||
return_value=file_info,
|
||||
),
|
||||
patch(
|
||||
"controllers.console.remote_files.FileService.is_file_size_within_limit",
|
||||
return_value=True,
|
||||
),
|
||||
patch("controllers.console.remote_files.db", spec=["engine"]),
|
||||
patch("controllers.console.remote_files.FileService") as mock_file_service,
|
||||
patch(
|
||||
"controllers.console.remote_files.file_helpers.get_signed_file_url",
|
||||
return_value="http://example.com/signed-url",
|
||||
),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
mock_file_service.return_value.upload_file.return_value = uploaded_file
|
||||
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": url},
|
||||
)
|
||||
|
||||
assert resp.status_code == 201
|
||||
p_head.assert_called_once()
|
||||
# GET is used either for fallback (HEAD fails) or to fetch content after HEAD succeeds
|
||||
p_get.assert_called_once()
|
||||
mock_file_service.return_value.upload_file.assert_called_once()
|
||||
|
||||
data = resp.get_json()
|
||||
assert data["id"] == "uploaded-file-id"
|
||||
assert data["name"] == "file.pdf"
|
||||
assert data["size"] == 1024
|
||||
assert data["extension"] == "pdf"
|
||||
assert data["url"] == "http://example.com/signed-url"
|
||||
assert data["mime_type"] == "application/pdf"
|
||||
assert data["created_by"] == "test-account-id"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("size_ok", "raises", "expected_status", "expected_msg"),
|
||||
[
|
||||
# When size check fails in controller, API returns 413 with message "File size exceeded..."
|
||||
(False, None, 413, "file size exceeded"),
|
||||
# When service raises unsupported type, controller maps to 415 with message "File type not allowed."
|
||||
(True, "unsupported", 415, "file type not allowed"),
|
||||
],
|
||||
)
|
||||
def test_upload_remote_file_errors(
|
||||
self, client, mock_account, auth_ctx, size_ok, raises, expected_status, expected_msg
|
||||
):
|
||||
url = "http://example.com/x.pdf"
|
||||
head_resp = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", url),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "9"},
|
||||
)
|
||||
file_info = SimpleNamespace(extension="pdf", size=9, filename="x.pdf", mimetype="application/pdf")
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp),
|
||||
patch(
|
||||
"controllers.console.remote_files.helpers.guess_file_info_from_response",
|
||||
return_value=file_info,
|
||||
),
|
||||
patch(
|
||||
"controllers.console.remote_files.FileService.is_file_size_within_limit",
|
||||
return_value=size_ok,
|
||||
),
|
||||
patch("controllers.console.remote_files.db", spec=["engine"]),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
if raises == "unsupported":
|
||||
from services.errors.file import UnsupportedFileTypeError
|
||||
|
||||
with patch("controllers.console.remote_files.FileService") as mock_file_service:
|
||||
mock_file_service.return_value.upload_file.side_effect = UnsupportedFileTypeError("bad")
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": url},
|
||||
)
|
||||
else:
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": url},
|
||||
)
|
||||
|
||||
assert resp.status_code == expected_status
|
||||
data = resp.get_json()
|
||||
msg = (data.get("error") or {}).get("message") or data.get("message", "")
|
||||
assert expected_msg in msg.lower()
|
||||
|
||||
def test_upload_remote_file_fetch_failure(self, client, mock_account, auth_ctx):
|
||||
"""Test upload when fetching of remote file fails."""
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch(
|
||||
"controllers.console.remote_files.ssrf_proxy.head",
|
||||
side_effect=httpx.RequestError("Connection failed"),
|
||||
),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": "http://unreachable.com/file.pdf"},
|
||||
)
|
||||
|
||||
assert resp.status_code == 400
|
||||
data = resp.get_json()
|
||||
msg = (data.get("error") or {}).get("message") or data.get("message", "")
|
||||
assert "failed to fetch" in msg.lower()
|
||||
|
||||
@ -25,15 +25,19 @@ class TestMessageCycleManagerOptimization:
|
||||
task_state = Mock()
|
||||
return MessageCycleManager(application_generate_entity=mock_application_generate_entity, task_state=task_state)
|
||||
|
||||
def test_get_message_event_type_with_message_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE_FILE when message has files."""
|
||||
def test_get_message_event_type_with_assistant_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE_FILE when message has assistant-generated files.
|
||||
|
||||
This ensures that AI-generated images (belongs_to='assistant') trigger the MESSAGE_FILE event,
|
||||
allowing the frontend to properly display generated image files with url field.
|
||||
"""
|
||||
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
|
||||
# Setup mock session and message file
|
||||
mock_session = Mock()
|
||||
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
mock_message_file = Mock()
|
||||
# Current implementation uses session.scalar(select(...))
|
||||
mock_message_file.belongs_to = "assistant"
|
||||
mock_session.scalar.return_value = mock_message_file
|
||||
|
||||
# Execute
|
||||
@ -44,6 +48,31 @@ class TestMessageCycleManagerOptimization:
|
||||
assert result == StreamEvent.MESSAGE_FILE
|
||||
mock_session.scalar.assert_called_once()
|
||||
|
||||
def test_get_message_event_type_with_user_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE when message only has user-uploaded files.
|
||||
|
||||
This is a regression test for the issue where user-uploaded images (belongs_to='user')
|
||||
caused the LLM text response to be incorrectly tagged with MESSAGE_FILE event,
|
||||
resulting in broken images in the chat UI. The query filters for belongs_to='assistant',
|
||||
so when only user files exist, the database query returns None, resulting in MESSAGE event type.
|
||||
"""
|
||||
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
|
||||
# Setup mock session and message file
|
||||
mock_session = Mock()
|
||||
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# When querying for assistant files with only user files present, return None
|
||||
# (simulates database query with belongs_to='assistant' filter returning no results)
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
# Execute
|
||||
with current_app.app_context():
|
||||
result = message_cycle_manager.get_message_event_type("test-message-id")
|
||||
|
||||
# Assert
|
||||
assert result == StreamEvent.MESSAGE
|
||||
mock_session.scalar.assert_called_once()
|
||||
|
||||
def test_get_message_event_type_without_message_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE when message has no files."""
|
||||
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
|
||||
@ -69,7 +98,7 @@ class TestMessageCycleManagerOptimization:
|
||||
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
mock_message_file = Mock()
|
||||
# Current implementation uses session.scalar(select(...))
|
||||
mock_message_file.belongs_to = "assistant"
|
||||
mock_session.scalar.return_value = mock_message_file
|
||||
|
||||
# Execute: compute event type once, then pass to message_to_stream_response
|
||||
|
||||
@ -496,6 +496,9 @@ class TestSchemaResolverClass:
|
||||
avg_time_no_cache = sum(results1) / len(results1)
|
||||
|
||||
# Second run (with cache) - run multiple times
|
||||
# Warm up cache first
|
||||
resolve_dify_schema_refs(schema)
|
||||
|
||||
results2 = []
|
||||
for _ in range(3):
|
||||
start = time.perf_counter()
|
||||
|
||||
@ -198,6 +198,15 @@ class SubscriptionTestCase:
|
||||
description: str = ""
|
||||
|
||||
|
||||
class FakeRedisClient:
|
||||
"""Minimal fake Redis client for unit tests."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.publish = MagicMock()
|
||||
self.spublish = MagicMock()
|
||||
self.pubsub = MagicMock(return_value=MagicMock())
|
||||
|
||||
|
||||
class TestRedisSubscription:
|
||||
"""Test cases for the _RedisSubscription class."""
|
||||
|
||||
@ -619,10 +628,13 @@ class TestRedisSubscription:
|
||||
class TestRedisShardedSubscription:
|
||||
"""Test cases for the _RedisShardedSubscription class."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def patch_sharded_redis_type(self, monkeypatch):
|
||||
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self) -> MagicMock:
|
||||
client = MagicMock()
|
||||
return client
|
||||
def mock_redis_client(self) -> FakeRedisClient:
|
||||
return FakeRedisClient()
|
||||
|
||||
@pytest.fixture
|
||||
def mock_pubsub(self) -> MagicMock:
|
||||
@ -636,7 +648,7 @@ class TestRedisShardedSubscription:
|
||||
|
||||
@pytest.fixture
|
||||
def sharded_subscription(
|
||||
self, mock_pubsub: MagicMock, mock_redis_client: MagicMock
|
||||
self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
|
||||
) -> Generator[_RedisShardedSubscription, None, None]:
|
||||
"""Create a _RedisShardedSubscription instance for testing."""
|
||||
subscription = _RedisShardedSubscription(
|
||||
@ -657,7 +669,7 @@ class TestRedisShardedSubscription:
|
||||
|
||||
# ==================== Lifecycle Tests ====================
|
||||
|
||||
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
|
||||
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
|
||||
"""Test that sharded subscription is properly initialized."""
|
||||
subscription = _RedisShardedSubscription(
|
||||
client=mock_redis_client,
|
||||
@ -970,7 +982,7 @@ class TestRedisShardedSubscription:
|
||||
],
|
||||
)
|
||||
def test_sharded_subscription_scenarios(
|
||||
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: MagicMock
|
||||
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
|
||||
):
|
||||
"""Test various sharded subscription scenarios using table-driven approach."""
|
||||
subscription = _RedisShardedSubscription(
|
||||
@ -1058,7 +1070,7 @@ class TestRedisShardedSubscription:
|
||||
# Close should still work
|
||||
sharded_subscription.close() # Should not raise
|
||||
|
||||
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
|
||||
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
|
||||
"""Test various sharded channel name formats."""
|
||||
channel_names = [
|
||||
"simple",
|
||||
@ -1120,10 +1132,13 @@ class TestRedisSubscriptionCommon:
|
||||
"""Parameterized fixture providing subscription type and class."""
|
||||
return request.param
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def patch_sharded_redis_type(self, monkeypatch):
|
||||
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self) -> MagicMock:
|
||||
client = MagicMock()
|
||||
return client
|
||||
def mock_redis_client(self) -> FakeRedisClient:
|
||||
return FakeRedisClient()
|
||||
|
||||
@pytest.fixture
|
||||
def mock_pubsub(self) -> MagicMock:
|
||||
@ -1140,7 +1155,7 @@ class TestRedisSubscriptionCommon:
|
||||
return pubsub
|
||||
|
||||
@pytest.fixture
|
||||
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
|
||||
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
|
||||
"""Create a subscription instance based on parameterized type."""
|
||||
subscription_type, subscription_class = subscription_params
|
||||
topic_name = f"test-{subscription_type}-topic"
|
||||
|
||||
@ -17,7 +17,6 @@ from core.workflow.nodes.human_input.entities import (
|
||||
from core.workflow.nodes.human_input.enums import FormInputType, HumanInputFormKind, HumanInputFormStatus
|
||||
from models.human_input import RecipientType
|
||||
from services.human_input_service import Form, FormExpiredError, HumanInputService, InvalidFormDataError
|
||||
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -88,7 +87,6 @@ def test_enqueue_resume_dispatches_task_for_workflow(mocker, mock_session_factor
|
||||
|
||||
resume_task.apply_async.assert_called_once()
|
||||
call_kwargs = resume_task.apply_async.call_args.kwargs
|
||||
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
|
||||
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"
|
||||
|
||||
|
||||
@ -130,7 +128,6 @@ def test_enqueue_resume_dispatches_task_for_advanced_chat(mocker, mock_session_f
|
||||
|
||||
resume_task.apply_async.assert_called_once()
|
||||
call_kwargs = resume_task.apply_async.call_args.kwargs
|
||||
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
|
||||
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"
|
||||
|
||||
|
||||
|
||||
84
api/uv.lock
generated
84
api/uv.lock
generated
@ -1237,49 +1237,47 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.3"
|
||||
version = "46.0.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1368,7 +1366,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.12.1"
|
||||
version = "1.13.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
@ -1594,7 +1592,7 @@ requires-dist = [
|
||||
{ name = "gevent", specifier = "~=25.9.1" },
|
||||
{ name = "gmpy2", specifier = "~=2.2.1" },
|
||||
{ name = "google-api-core", specifier = "==2.18.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.90.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.189.0" },
|
||||
{ name = "google-auth", specifier = "==2.29.0" },
|
||||
{ name = "google-auth-httplib2", specifier = "==0.2.0" },
|
||||
{ name = "google-cloud-aiplatform", specifier = "==1.49.0" },
|
||||
@ -2306,7 +2304,7 @@ grpc = [
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.90.0"
|
||||
version = "2.189.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-api-core" },
|
||||
@ -2315,9 +2313,9 @@ dependencies = [
|
||||
{ name = "httplib2" },
|
||||
{ name = "uritemplate" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311, upload-time = "2023-06-20T16:29:25.008Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/f8/0783aeca3410ee053d4dd1fccafd85197847b8f84dd038e036634605d083/google_api_python_client-2.189.0.tar.gz", hash = "sha256:45f2d8559b5c895dde6ad3fb33de025f5cb2c197fa5862f18df7f5295a172741", size = 13979470, upload-time = "2026-02-03T19:24:55.432Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891, upload-time = "2023-06-20T16:29:19.532Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/44/3677ff27998214f2fa7957359da48da378a0ffff1bd0bdaba42e752bc13e/google_api_python_client-2.189.0-py3-none-any.whl", hash = "sha256:a258c09660a49c6159173f8bbece171278e917e104a11f0640b34751b79c8a1a", size = 14547633, upload-time = "2026-02-03T19:24:52.845Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@ -106,10 +106,10 @@ if [[ -z "${QUEUES}" ]]; then
|
||||
# Configure queues based on edition
|
||||
if [[ "${EDITION}" == "CLOUD" ]]; then
|
||||
# Cloud edition: separate queues for dataset and trigger tasks
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
else
|
||||
# Community edition (SELF_HOSTED): dataset and workflow have separate queues
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
fi
|
||||
|
||||
echo "No queues specified, using edition-based defaults: ${QUEUES}"
|
||||
|
||||
@ -62,6 +62,9 @@ LANG=C.UTF-8
|
||||
LC_ALL=C.UTF-8
|
||||
PYTHONIOENCODING=utf-8
|
||||
|
||||
# Set UV cache directory to avoid permission issues with non-existent home directory
|
||||
UV_CACHE_DIR=/tmp/.uv-cache
|
||||
|
||||
# ------------------------------
|
||||
# Server Configuration
|
||||
# ------------------------------
|
||||
@ -384,6 +387,8 @@ CELERY_USE_SENTINEL=false
|
||||
CELERY_SENTINEL_MASTER_NAME=
|
||||
CELERY_SENTINEL_PASSWORD=
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT=0.1
|
||||
# e.g. {"tasks.add": {"rate_limit": "10/s"}}
|
||||
CELERY_TASK_ANNOTATIONS=null
|
||||
|
||||
# ------------------------------
|
||||
# CORS Configuration
|
||||
@ -1518,6 +1523,7 @@ AMPLITUDE_API_KEY=
|
||||
# Sandbox expired records clean configuration
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
|
||||
|
||||
|
||||
@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.1
|
||||
image: langgenius/dify-web:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -16,6 +16,7 @@ x-shared-env: &shared-api-worker-env
|
||||
LANG: ${LANG:-C.UTF-8}
|
||||
LC_ALL: ${LC_ALL:-C.UTF-8}
|
||||
PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8}
|
||||
UV_CACHE_DIR: ${UV_CACHE_DIR:-/tmp/.uv-cache}
|
||||
LOG_LEVEL: ${LOG_LEVEL:-INFO}
|
||||
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
|
||||
LOG_FILE: ${LOG_FILE:-/app/logs/server.log}
|
||||
@ -105,6 +106,7 @@ x-shared-env: &shared-api-worker-env
|
||||
CELERY_SENTINEL_MASTER_NAME: ${CELERY_SENTINEL_MASTER_NAME:-}
|
||||
CELERY_SENTINEL_PASSWORD: ${CELERY_SENTINEL_PASSWORD:-}
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: ${CELERY_SENTINEL_SOCKET_TIMEOUT:-0.1}
|
||||
CELERY_TASK_ANNOTATIONS: ${CELERY_TASK_ANNOTATIONS:-null}
|
||||
WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*}
|
||||
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
|
||||
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
|
||||
@ -682,6 +684,7 @@ x-shared-env: &shared-api-worker-env
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200}
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30}
|
||||
PUBSUB_REDIS_URL: ${PUBSUB_REDIS_URL:-}
|
||||
PUBSUB_REDIS_CHANNEL_TYPE: ${PUBSUB_REDIS_CHANNEL_TYPE:-pubsub}
|
||||
@ -712,7 +715,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -754,7 +757,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -793,7 +796,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -823,7 +826,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.1
|
||||
image: langgenius/dify-web:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
8
sdks/nodejs-client/pnpm-lock.yaml
generated
8
sdks/nodejs-client/pnpm-lock.yaml
generated
@ -10,7 +10,7 @@ importers:
|
||||
dependencies:
|
||||
axios:
|
||||
specifier: ^1.13.2
|
||||
version: 1.13.2
|
||||
version: 1.13.5
|
||||
devDependencies:
|
||||
'@eslint/js':
|
||||
specifier: ^9.39.2
|
||||
@ -544,8 +544,8 @@ packages:
|
||||
asynckit@0.4.0:
|
||||
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
|
||||
|
||||
axios@1.13.2:
|
||||
resolution: {integrity: sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==}
|
||||
axios@1.13.5:
|
||||
resolution: {integrity: sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==}
|
||||
|
||||
balanced-match@1.0.2:
|
||||
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
|
||||
@ -1677,7 +1677,7 @@ snapshots:
|
||||
|
||||
asynckit@0.4.0: {}
|
||||
|
||||
axios@1.13.2:
|
||||
axios@1.13.5:
|
||||
dependencies:
|
||||
follow-redirects: 1.15.11
|
||||
form-data: 4.0.5
|
||||
|
||||
@ -1,261 +0,0 @@
|
||||
/**
|
||||
* MAX_PARALLEL_LIMIT Configuration Bug Test
|
||||
*
|
||||
* This test reproduces and verifies the fix for issue #23083:
|
||||
* MAX_PARALLEL_LIMIT environment variable does not take effect in iteration panel
|
||||
*/
|
||||
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
|
||||
// Mock environment variables before importing constants
|
||||
const originalEnv = process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
// Test with different environment values
|
||||
function setupEnvironment(value?: string) {
|
||||
if (value)
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = value
|
||||
else
|
||||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
// Clear module cache to force re-evaluation
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
function restoreEnvironment() {
|
||||
if (originalEnv)
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = originalEnv
|
||||
else
|
||||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
// Mock i18next with proper implementation
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
if (key.includes('MaxParallelismTitle'))
|
||||
return 'Max Parallelism'
|
||||
if (key.includes('MaxParallelismDesc'))
|
||||
return 'Maximum number of parallel executions'
|
||||
if (key.includes('parallelMode'))
|
||||
return 'Parallel Mode'
|
||||
if (key.includes('parallelPanelDesc'))
|
||||
return 'Enable parallel execution'
|
||||
if (key.includes('errorResponseMethod'))
|
||||
return 'Error Response Method'
|
||||
return key
|
||||
},
|
||||
}),
|
||||
initReactI18next: {
|
||||
type: '3rdParty',
|
||||
init: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock i18next module completely to prevent initialization issues
|
||||
vi.mock('i18next', () => ({
|
||||
use: vi.fn().mockReturnThis(),
|
||||
init: vi.fn().mockReturnThis(),
|
||||
t: vi.fn(key => key),
|
||||
isInitialized: true,
|
||||
}))
|
||||
|
||||
// Mock the useConfig hook
|
||||
vi.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({
|
||||
default: () => ({
|
||||
inputs: {
|
||||
is_parallel: true,
|
||||
parallel_nums: 5,
|
||||
error_handle_mode: 'terminated',
|
||||
},
|
||||
changeParallel: vi.fn(),
|
||||
changeParallelNums: vi.fn(),
|
||||
changeErrorHandleMode: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock other components
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/variable/var-reference-picker', () => ({
|
||||
default: function MockVarReferencePicker() {
|
||||
return <div data-testid="var-reference-picker">VarReferencePicker</div>
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/split', () => ({
|
||||
default: function MockSplit() {
|
||||
return <div data-testid="split">Split</div>
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/field', () => ({
|
||||
default: function MockField({ title, children }: { title: string, children: React.ReactNode }) {
|
||||
return (
|
||||
<div data-testid="field">
|
||||
<label>{title}</label>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
},
|
||||
}))
|
||||
|
||||
const getParallelControls = () => ({
|
||||
numberInput: screen.getByRole('spinbutton'),
|
||||
slider: screen.getByRole('slider'),
|
||||
})
|
||||
|
||||
describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
|
||||
const mockNodeData = {
|
||||
id: 'test-iteration-node',
|
||||
type: 'iteration' as const,
|
||||
data: {
|
||||
title: 'Test Iteration',
|
||||
desc: 'Test iteration node',
|
||||
iterator_selector: ['test'],
|
||||
output_selector: ['output'],
|
||||
is_parallel: true,
|
||||
parallel_nums: 5,
|
||||
error_handle_mode: 'terminated' as const,
|
||||
},
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
restoreEnvironment()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
restoreEnvironment()
|
||||
})
|
||||
|
||||
describe('Environment Variable Parsing', () => {
|
||||
it('should parse MAX_PARALLEL_LIMIT from NEXT_PUBLIC_MAX_PARALLEL_LIMIT environment variable', async () => {
|
||||
setupEnvironment('25')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(25)
|
||||
})
|
||||
|
||||
it('should fallback to default when environment variable is not set', async () => {
|
||||
setupEnvironment() // No environment variable
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle invalid environment variable values', async () => {
|
||||
setupEnvironment('invalid')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when parsing fails
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle empty environment variable', async () => {
|
||||
setupEnvironment('')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when empty
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
// Edge cases for boundary values
|
||||
it('should clamp MAX_PARALLEL_LIMIT to MIN when env is 0 or negative', async () => {
|
||||
setupEnvironment('0')
|
||||
let { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
|
||||
setupEnvironment('-5')
|
||||
;({ MAX_PARALLEL_LIMIT } = await import('@/config'))
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
})
|
||||
|
||||
it('should handle float numbers by parseInt behavior', async () => {
|
||||
setupEnvironment('12.7')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
// parseInt truncates to integer
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(12)
|
||||
})
|
||||
})
|
||||
|
||||
describe('UI Component Integration (Main Fix Verification)', () => {
|
||||
it('should render iteration panel with environment-configured max value', async () => {
|
||||
// Set environment variable to a different value
|
||||
setupEnvironment('30')
|
||||
|
||||
// Import Panel after setting environment
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Behavior-focused assertion: UI max should equal MAX_PARALLEL_LIMIT
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
expect(numberInput).toHaveAttribute('max', String(MAX_PARALLEL_LIMIT))
|
||||
expect(slider).toHaveAttribute('aria-valuemax', String(MAX_PARALLEL_LIMIT))
|
||||
|
||||
// Verify the actual values
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(30)
|
||||
expect(numberInput.getAttribute('max')).toBe('30')
|
||||
expect(slider.getAttribute('aria-valuemax')).toBe('30')
|
||||
})
|
||||
|
||||
it('should maintain UI consistency with different environment values', async () => {
|
||||
setupEnvironment('15')
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Both input and slider should use the same max value from MAX_PARALLEL_LIMIT
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
|
||||
expect(numberInput.getAttribute('max')).toBe(slider.getAttribute('aria-valuemax'))
|
||||
expect(numberInput.getAttribute('max')).toBe(String(MAX_PARALLEL_LIMIT))
|
||||
})
|
||||
})
|
||||
|
||||
describe('Legacy Constant Verification (For Transition Period)', () => {
|
||||
// Marked as transition/deprecation tests
|
||||
it('should maintain MAX_ITERATION_PARALLEL_NUM for backward compatibility', async () => {
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10) // Hardcoded legacy value
|
||||
})
|
||||
|
||||
it('should demonstrate MAX_PARALLEL_LIMIT vs legacy constant difference', async () => {
|
||||
setupEnvironment('50')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
|
||||
// MAX_PARALLEL_LIMIT is configurable, MAX_ITERATION_PARALLEL_NUM is not
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(50)
|
||||
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10)
|
||||
expect(MAX_PARALLEL_LIMIT).not.toBe(MAX_ITERATION_PARALLEL_NUM)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Constants Validation', () => {
|
||||
it('should validate that required constants exist and have correct types', async () => {
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MIN_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_PARALLEL_LIMIT).toBe('number')
|
||||
expect(typeof MIN_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_PARALLEL_LIMIT).toBeGreaterThanOrEqual(MIN_ITERATION_PARALLEL_NUM)
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -8,6 +8,7 @@ import { UserActionButtonType } from '@/app/components/workflow/nodes/human-inpu
|
||||
import 'dayjs/locale/en'
|
||||
import 'dayjs/locale/zh-cn'
|
||||
import 'dayjs/locale/ja'
|
||||
import 'dayjs/locale/nl'
|
||||
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(relativeTime)
|
||||
@ -45,6 +46,7 @@ const localeMap: Record<string, string> = {
|
||||
'en-US': 'en',
|
||||
'zh-Hans': 'zh-cn',
|
||||
'ja-JP': 'ja',
|
||||
'nl-NL': 'nl',
|
||||
}
|
||||
|
||||
export const getRelativeTime = (
|
||||
|
||||
@ -98,7 +98,9 @@ const VoiceParamConfig = ({
|
||||
className="h-full w-full cursor-pointer rounded-lg border-0 bg-components-input-bg-normal py-1.5 pl-3 pr-10 focus-visible:bg-state-base-hover focus-visible:outline-none group-hover:bg-state-base-hover sm:text-sm sm:leading-6"
|
||||
>
|
||||
<span className={cn('block truncate text-left text-text-secondary', !languageItem?.name && 'text-text-tertiary')}>
|
||||
{languageItem?.name ? t(`voice.language.${replace(languageItem?.value, '-', '')}`, { ns: 'common' }) : localLanguagePlaceholder}
|
||||
{languageItem?.name
|
||||
? t(`voice.language.${replace(languageItem?.value ?? '', '-', '')}`, languageItem?.name, { ns: 'common' as const })
|
||||
: localLanguagePlaceholder}
|
||||
</span>
|
||||
<span className="pointer-events-none absolute inset-y-0 right-0 flex items-center pr-2">
|
||||
<ChevronDownIcon
|
||||
@ -129,7 +131,7 @@ const VoiceParamConfig = ({
|
||||
<span
|
||||
className={cn('block', selected && 'font-normal')}
|
||||
>
|
||||
{t(`voice.language.${replace((item.value), '-', '')}`, { ns: 'common' })}
|
||||
{t(`voice.language.${replace((item.value), '-', '')}`, item.name, { ns: 'common' as const })}
|
||||
</span>
|
||||
{(selected || item.value === text2speech?.language) && (
|
||||
<span
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import type { RemixiconComponentType } from '@remixicon/react'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
|
||||
export const InputTypeEnum = z.enum([
|
||||
'text-input',
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { ZodNumber, ZodSchema, ZodString } from 'zod'
|
||||
import type { BaseConfiguration } from './types'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { BaseFieldType } from './types'
|
||||
|
||||
export const generateZodSchema = (fields: BaseConfiguration[]) => {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
|
||||
const ContactMethod = z.union([
|
||||
z.literal('email'),
|
||||
@ -22,10 +22,10 @@ export const UserSchema = z.object({
|
||||
.min(3, 'Surname must be at least 3 characters long')
|
||||
.regex(/^[A-Z]/, 'Surname must start with a capital letter'),
|
||||
isAcceptingTerms: z.boolean().refine(val => val, {
|
||||
message: 'You must accept the terms and conditions',
|
||||
error: 'You must accept the terms and conditions',
|
||||
}),
|
||||
contact: z.object({
|
||||
email: z.string().email('Invalid email address'),
|
||||
email: z.email('Invalid email address'),
|
||||
phone: z.string().optional(),
|
||||
preferredContactMethod: ContactMethod,
|
||||
}),
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { ZodSchema, ZodString } from 'zod'
|
||||
import type { InputFieldConfiguration } from './types'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { SupportedFileTypes, TransferMethod } from '@/app/components/rag-pipeline/components/panel/input-field/editor/form/schema'
|
||||
import { InputFieldType } from './types'
|
||||
|
||||
|
||||
@ -204,23 +204,10 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
}
|
||||
}
|
||||
catch {
|
||||
try {
|
||||
// eslint-disable-next-line no-new-func
|
||||
const result = new Function(`return ${trimmedContent}`)()
|
||||
if (typeof result === 'object' && result !== null) {
|
||||
setFinalChartOption(result)
|
||||
setChartState('success')
|
||||
processedRef.current = true
|
||||
return
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// If we have a complete JSON structure but it doesn't parse,
|
||||
// it's likely an error rather than incomplete data
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
return
|
||||
}
|
||||
// Avoid executing arbitrary code; require valid JSON for chart options.
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
@ -249,19 +236,9 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
|
||||
}
|
||||
}
|
||||
catch {
|
||||
try {
|
||||
// eslint-disable-next-line no-new-func
|
||||
const result = new Function(`return ${trimmedContent}`)()
|
||||
if (typeof result === 'object' && result !== null) {
|
||||
setFinalChartOption(result)
|
||||
isValidOption = true
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// Both parsing methods failed, but content looks complete
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
}
|
||||
// Only accept JSON to avoid executing arbitrary code from the message.
|
||||
setChartState('error')
|
||||
processedRef.current = true
|
||||
}
|
||||
|
||||
if (isValidOption) {
|
||||
|
||||
@ -2,6 +2,7 @@
|
||||
import type { FC } from 'react'
|
||||
import * as React from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { env } from '@/env'
|
||||
import ParamItem from '.'
|
||||
|
||||
type Props = {
|
||||
@ -11,12 +12,7 @@ type Props = {
|
||||
enable: boolean
|
||||
}
|
||||
|
||||
const maxTopK = (() => {
|
||||
const configValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-top-k-max-value') || '', 10)
|
||||
if (configValue && !isNaN(configValue))
|
||||
return configValue
|
||||
return 10
|
||||
})()
|
||||
const maxTopK = env.NEXT_PUBLIC_TOP_K_MAX_VALUE
|
||||
const VALUE_LIMIT = {
|
||||
default: 2,
|
||||
step: 1,
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { noop } from 'es-toolkit/function'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import withValidation from '.'
|
||||
|
||||
describe('withValidation HOC', () => {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import withValidation from '.'
|
||||
|
||||
// Sample components to wrap with validation
|
||||
@ -65,7 +65,7 @@ const ProductCard = ({ name, price, category, inStock }: ProductCardProps) => {
|
||||
// Create validated versions
|
||||
const userSchema = z.object({
|
||||
name: z.string().min(1, 'Name is required'),
|
||||
email: z.string().email('Invalid email'),
|
||||
email: z.email('Invalid email'),
|
||||
age: z.number().min(0).max(150),
|
||||
})
|
||||
|
||||
@ -371,7 +371,7 @@ export const ConfigurationValidation: Story = {
|
||||
)
|
||||
|
||||
const configSchema = z.object({
|
||||
apiUrl: z.string().url('Must be valid URL'),
|
||||
apiUrl: z.url('Must be valid URL'),
|
||||
timeout: z.number().min(0).max(30000),
|
||||
retries: z.number().min(0).max(5),
|
||||
debug: z.boolean(),
|
||||
@ -430,7 +430,7 @@ export const UsageDocumentation: Story = {
|
||||
<div>
|
||||
<h4 className="mb-2 text-sm font-semibold text-gray-900">Usage Example</h4>
|
||||
<pre className="overflow-x-auto rounded-lg bg-gray-900 p-4 text-xs text-gray-100">
|
||||
{`import { z } from 'zod'
|
||||
{`import * as z from 'zod'
|
||||
import withValidation from './withValidation'
|
||||
|
||||
// Define your component
|
||||
|
||||
@ -5,6 +5,7 @@ import { useTranslation } from 'react-i18next'
|
||||
import Input from '@/app/components/base/input'
|
||||
import { InputNumber } from '@/app/components/base/input-number'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { env } from '@/env'
|
||||
|
||||
const TextLabel: FC<PropsWithChildren> = (props) => {
|
||||
return <label className="text-xs font-semibold leading-none text-text-secondary">{props.children}</label>
|
||||
@ -46,7 +47,7 @@ export const DelimiterInput: FC<InputProps & { tooltip?: string }> = (props) =>
|
||||
}
|
||||
|
||||
export const MaxLengthInput: FC<InputNumberProps> = (props) => {
|
||||
const maxValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000', 10)
|
||||
const maxValue = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
|
||||
|
||||
const { t } = useTranslation()
|
||||
return (
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
import type { ParentMode, PreProcessingRule, ProcessRule, Rules, SummaryIndexSetting as SummaryIndexSettingType } from '@/models/datasets'
|
||||
import { useCallback, useRef, useState } from 'react'
|
||||
import { env } from '@/env'
|
||||
import { ChunkingMode, ProcessMode } from '@/models/datasets'
|
||||
import escape from './escape'
|
||||
import unescape from './unescape'
|
||||
@ -8,10 +9,7 @@ import unescape from './unescape'
|
||||
export const DEFAULT_SEGMENT_IDENTIFIER = '\\n\\n'
|
||||
export const DEFAULT_MAXIMUM_CHUNK_LENGTH = 1024
|
||||
export const DEFAULT_OVERLAP = 50
|
||||
export const MAXIMUM_CHUNK_TOKEN_LENGTH = Number.parseInt(
|
||||
globalThis.document?.body?.getAttribute('data-public-indexing-max-segmentation-tokens-length') || '4000',
|
||||
10,
|
||||
)
|
||||
export const MAXIMUM_CHUNK_TOKEN_LENGTH = env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH
|
||||
|
||||
export type ParentChildConfig = {
|
||||
chunkForContext: ParentMode
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import type { BaseConfiguration } from '@/app/components/base/form/form-scenarios/base/types'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { BaseFieldType } from '@/app/components/base/form/form-scenarios/base/types'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import Actions from './actions'
|
||||
@ -53,7 +53,7 @@ const createFailingSchema = () => {
|
||||
issues: [{ path: ['field1'], message: 'is required' }],
|
||||
},
|
||||
}),
|
||||
} as unknown as z.ZodSchema
|
||||
} as unknown as z.ZodType
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
|
||||
@ -0,0 +1,129 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { DocType } from '@/models/datasets'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Radio from '@/app/components/base/radio'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { useMetadataMap } from '@/hooks/use-metadata'
|
||||
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import s from '../style.module.css'
|
||||
|
||||
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
|
||||
return <div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
|
||||
}
|
||||
|
||||
const IconButton: FC<{ type: DocType, isChecked: boolean }> = ({ type, isChecked = false }) => {
|
||||
const metadataMap = useMetadataMap()
|
||||
return (
|
||||
<Tooltip popupContent={metadataMap[type].text}>
|
||||
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
|
||||
<TypeIcon
|
||||
iconName={metadataMap[type].iconName || ''}
|
||||
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
type DocTypeSelectorProps = {
|
||||
docType: DocType | ''
|
||||
documentType?: DocType | ''
|
||||
tempDocType: DocType | ''
|
||||
onTempDocTypeChange: (type: DocType | '') => void
|
||||
onConfirm: () => void
|
||||
onCancel: () => void
|
||||
}
|
||||
|
||||
const DocTypeSelector: FC<DocTypeSelectorProps> = ({
|
||||
docType,
|
||||
documentType,
|
||||
tempDocType,
|
||||
onTempDocTypeChange,
|
||||
onConfirm,
|
||||
onCancel,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const isFirstTime = !docType && !documentType
|
||||
const currValue = tempDocType ?? documentType
|
||||
|
||||
return (
|
||||
<>
|
||||
{isFirstTime && (
|
||||
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
|
||||
)}
|
||||
<div className={s.operationWrapper}>
|
||||
{isFirstTime && (
|
||||
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
|
||||
)}
|
||||
{documentType && (
|
||||
<>
|
||||
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
|
||||
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
|
||||
</>
|
||||
)}
|
||||
<Radio.Group value={currValue ?? ''} onChange={onTempDocTypeChange} className={s.radioGroup}>
|
||||
{CUSTOMIZABLE_DOC_TYPES.map(type => (
|
||||
<Radio key={type} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
|
||||
<IconButton type={type} isChecked={currValue === type} />
|
||||
</Radio>
|
||||
))}
|
||||
</Radio.Group>
|
||||
{isFirstTime && (
|
||||
<Button variant="primary" onClick={onConfirm} disabled={!tempDocType}>
|
||||
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
|
||||
</Button>
|
||||
)}
|
||||
{documentType && (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={onConfirm} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">
|
||||
{t('operation.save', { ns: 'common' })}
|
||||
</Button>
|
||||
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>
|
||||
{t('operation.cancel', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
type DocumentTypeDisplayProps = {
|
||||
displayType: DocType | ''
|
||||
showChangeLink?: boolean
|
||||
onChangeClick?: () => void
|
||||
}
|
||||
|
||||
export const DocumentTypeDisplay: FC<DocumentTypeDisplayProps> = ({
|
||||
displayType,
|
||||
showChangeLink = false,
|
||||
onChangeClick,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const metadataMap = useMetadataMap()
|
||||
const effectiveType = displayType || 'book'
|
||||
|
||||
return (
|
||||
<div className={s.documentTypeShow}>
|
||||
{(displayType || !showChangeLink) && (
|
||||
<>
|
||||
<TypeIcon iconName={metadataMap[effectiveType]?.iconName || ''} className={s.iconShow} />
|
||||
{metadataMap[effectiveType].text}
|
||||
{showChangeLink && (
|
||||
<div className="ml-1 inline-flex items-center gap-1">
|
||||
·
|
||||
<div onClick={onChangeClick} className="cursor-pointer hover:text-text-accent">
|
||||
{t('operation.change', { ns: 'common' })}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default DocTypeSelector
|
||||
@ -0,0 +1,89 @@
|
||||
'use client'
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import type { inputType } from '@/hooks/use-metadata'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
|
||||
import Input from '@/app/components/base/input'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import { getTextWidthWithCanvas } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import s from '../style.module.css'
|
||||
|
||||
type FieldInfoProps = {
|
||||
label: string
|
||||
value?: string
|
||||
valueIcon?: ReactNode
|
||||
displayedValue?: string
|
||||
defaultValue?: string
|
||||
showEdit?: boolean
|
||||
inputType?: inputType
|
||||
selectOptions?: Array<{ value: string, name: string }>
|
||||
onUpdate?: (v: string) => void
|
||||
}
|
||||
|
||||
const FieldInfo: FC<FieldInfoProps> = ({
|
||||
label,
|
||||
value = '',
|
||||
valueIcon,
|
||||
displayedValue = '',
|
||||
defaultValue,
|
||||
showEdit = false,
|
||||
inputType = 'input',
|
||||
selectOptions = [],
|
||||
onUpdate,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
|
||||
const editAlignTop = showEdit && inputType === 'textarea'
|
||||
const readAlignTop = !showEdit && textNeedWrap
|
||||
|
||||
const renderContent = () => {
|
||||
if (!showEdit)
|
||||
return displayedValue
|
||||
|
||||
if (inputType === 'select') {
|
||||
return (
|
||||
<SimpleSelect
|
||||
onSelect={({ value }) => onUpdate?.(value as string)}
|
||||
items={selectOptions}
|
||||
defaultValue={value}
|
||||
className={s.select}
|
||||
wrapperClassName={s.selectWrapper}
|
||||
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
if (inputType === 'textarea') {
|
||||
return (
|
||||
<AutoHeightTextarea
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
className={s.textArea}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Input
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
defaultValue={defaultValue}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
|
||||
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
|
||||
<div className="flex grow items-center gap-1 text-text-secondary">
|
||||
{valueIcon}
|
||||
{renderContent()}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default FieldInfo
|
||||
@ -0,0 +1,88 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { metadataType } from '@/hooks/use-metadata'
|
||||
import type { FullDocumentDetail } from '@/models/datasets'
|
||||
import { get } from 'es-toolkit/compat'
|
||||
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
|
||||
import FieldInfo from './field-info'
|
||||
|
||||
const map2Options = (map: Record<string, string>) => {
|
||||
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
|
||||
}
|
||||
|
||||
function useCategoryMapResolver(mainField: metadataType | '') {
|
||||
const languageMap = useLanguages()
|
||||
const bookCategoryMap = useBookCategories()
|
||||
const personalDocCategoryMap = usePersonalDocCategories()
|
||||
const businessDocCategoryMap = useBusinessDocCategories()
|
||||
|
||||
return (field: string): Record<string, string> => {
|
||||
if (field === 'language')
|
||||
return languageMap
|
||||
if (field === 'category' && mainField === 'book')
|
||||
return bookCategoryMap
|
||||
if (field === 'document_type') {
|
||||
if (mainField === 'personal_document')
|
||||
return personalDocCategoryMap
|
||||
if (mainField === 'business_document')
|
||||
return businessDocCategoryMap
|
||||
}
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
type MetadataFieldListProps = {
|
||||
mainField: metadataType | ''
|
||||
canEdit?: boolean
|
||||
metadata?: Record<string, string>
|
||||
docDetail?: FullDocumentDetail
|
||||
onFieldUpdate?: (field: string, value: string) => void
|
||||
}
|
||||
|
||||
const MetadataFieldList: FC<MetadataFieldListProps> = ({
|
||||
mainField,
|
||||
canEdit = false,
|
||||
metadata,
|
||||
docDetail,
|
||||
onFieldUpdate,
|
||||
}) => {
|
||||
const metadataMap = useMetadataMap()
|
||||
const getCategoryMap = useCategoryMapResolver(mainField)
|
||||
|
||||
if (!mainField)
|
||||
return null
|
||||
|
||||
const fieldMap = metadataMap[mainField]?.subFieldsMap
|
||||
const isFixedField = ['originInfo', 'technicalParameters'].includes(mainField)
|
||||
const sourceData = isFixedField ? docDetail : metadata
|
||||
|
||||
const getDisplayValue = (field: string) => {
|
||||
const val = get(sourceData, field, '')
|
||||
if (!val && val !== 0)
|
||||
return '-'
|
||||
if (fieldMap[field]?.inputType === 'select')
|
||||
return getCategoryMap(field)[val]
|
||||
if (fieldMap[field]?.render)
|
||||
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
|
||||
return val
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
{Object.keys(fieldMap).map(field => (
|
||||
<FieldInfo
|
||||
key={fieldMap[field]?.label}
|
||||
label={fieldMap[field]?.label}
|
||||
displayedValue={getDisplayValue(field)}
|
||||
value={get(sourceData, field, '')}
|
||||
inputType={fieldMap[field]?.inputType || 'input'}
|
||||
showEdit={canEdit}
|
||||
onUpdate={val => onFieldUpdate?.(field, val)}
|
||||
selectOptions={map2Options(getCategoryMap(field))}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default MetadataFieldList
|
||||
@ -0,0 +1,137 @@
|
||||
'use client'
|
||||
import type { CommonResponse } from '@/models/common'
|
||||
import type { DocType, FullDocumentDetail } from '@/models/datasets'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { modifyDocMetadata } from '@/service/datasets'
|
||||
import { asyncRunSafe } from '@/utils'
|
||||
import { useDocumentContext } from '../../context'
|
||||
|
||||
type MetadataState = {
|
||||
documentType?: DocType | ''
|
||||
metadata: Record<string, string>
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize raw doc_type: treat 'others' as empty string.
|
||||
*/
|
||||
const normalizeDocType = (rawDocType: string): DocType | '' => {
|
||||
return rawDocType === 'others' ? '' : rawDocType as DocType | ''
|
||||
}
|
||||
|
||||
type UseMetadataStateOptions = {
|
||||
docDetail?: FullDocumentDetail
|
||||
onUpdate?: () => void
|
||||
}
|
||||
|
||||
export function useMetadataState({ docDetail, onUpdate }: UseMetadataStateOptions) {
|
||||
const { doc_metadata = {} } = docDetail || {}
|
||||
const rawDocType = docDetail?.doc_type ?? ''
|
||||
const docType = normalizeDocType(rawDocType)
|
||||
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const datasetId = useDocumentContext(s => s.datasetId)
|
||||
const documentId = useDocumentContext(s => s.documentId)
|
||||
|
||||
// If no documentType yet, start in editing + showDocTypes mode
|
||||
const [editStatus, setEditStatus] = useState(!docType)
|
||||
const [metadataParams, setMetadataParams] = useState<MetadataState>(
|
||||
docType
|
||||
? { documentType: docType, metadata: (doc_metadata || {}) as Record<string, string> }
|
||||
: { metadata: {} },
|
||||
)
|
||||
const [showDocTypes, setShowDocTypes] = useState(!docType)
|
||||
const [tempDocType, setTempDocType] = useState<DocType | ''>('')
|
||||
const [saveLoading, setSaveLoading] = useState(false)
|
||||
|
||||
// Sync local state when the upstream docDetail changes (e.g. after save or navigation).
|
||||
// These setters are intentionally called together to batch-reset multiple pieces
|
||||
// of derived editing state that cannot be expressed as pure derived values.
|
||||
useEffect(() => {
|
||||
if (docDetail?.doc_type) {
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setEditStatus(false)
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setShowDocTypes(false)
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setTempDocType(docType)
|
||||
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
|
||||
setMetadataParams({
|
||||
documentType: docType,
|
||||
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
|
||||
})
|
||||
}
|
||||
}, [docDetail?.doc_type, docDetail?.doc_metadata, docType])
|
||||
|
||||
const confirmDocType = () => {
|
||||
if (!tempDocType)
|
||||
return
|
||||
setMetadataParams({
|
||||
documentType: tempDocType,
|
||||
// Clear metadata when switching to a different doc type
|
||||
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {},
|
||||
})
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
const cancelDocType = () => {
|
||||
setTempDocType(metadataParams.documentType ?? '')
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
const enableEdit = () => {
|
||||
setEditStatus(true)
|
||||
}
|
||||
|
||||
const cancelEdit = () => {
|
||||
setMetadataParams({ documentType: docType || '', metadata: { ...(docDetail?.doc_metadata || {}) } })
|
||||
setEditStatus(!docType)
|
||||
if (!docType)
|
||||
setShowDocTypes(true)
|
||||
}
|
||||
|
||||
const saveMetadata = async () => {
|
||||
setSaveLoading(true)
|
||||
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
|
||||
datasetId,
|
||||
documentId,
|
||||
body: {
|
||||
doc_type: metadataParams.documentType || docType || '',
|
||||
doc_metadata: metadataParams.metadata,
|
||||
},
|
||||
}) as Promise<CommonResponse>)
|
||||
if (!e)
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
else
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
onUpdate?.()
|
||||
setEditStatus(false)
|
||||
setSaveLoading(false)
|
||||
}
|
||||
|
||||
const updateMetadataField = (field: string, value: string) => {
|
||||
setMetadataParams(prev => ({ ...prev, metadata: { ...prev.metadata, [field]: value } }))
|
||||
}
|
||||
|
||||
return {
|
||||
docType,
|
||||
editStatus,
|
||||
showDocTypes,
|
||||
tempDocType,
|
||||
saveLoading,
|
||||
metadataParams,
|
||||
setTempDocType,
|
||||
setShowDocTypes,
|
||||
confirmDocType,
|
||||
cancelDocType,
|
||||
enableEdit,
|
||||
cancelEdit,
|
||||
saveMetadata,
|
||||
updateMetadataField,
|
||||
}
|
||||
}
|
||||
@ -1,7 +1,6 @@
|
||||
import type { FullDocumentDetail } from '@/models/datasets'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import Metadata, { FieldInfo } from './index'
|
||||
|
||||
// Mock document context
|
||||
@ -121,7 +120,6 @@ vi.mock('@/hooks/use-metadata', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock getTextWidthWithCanvas
|
||||
vi.mock('@/utils', () => ({
|
||||
asyncRunSafe: async (promise: Promise<unknown>) => {
|
||||
try {
|
||||
@ -135,33 +133,32 @@ vi.mock('@/utils', () => ({
|
||||
getTextWidthWithCanvas: () => 100,
|
||||
}))
|
||||
|
||||
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
|
||||
id: 'doc-1',
|
||||
name: 'Test Document',
|
||||
doc_type: 'book',
|
||||
doc_metadata: {
|
||||
title: 'Test Book',
|
||||
author: 'Test Author',
|
||||
language: 'en',
|
||||
},
|
||||
data_source_type: 'upload_file',
|
||||
segment_count: 10,
|
||||
hit_count: 5,
|
||||
...overrides,
|
||||
} as FullDocumentDetail)
|
||||
|
||||
describe('Metadata', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
|
||||
id: 'doc-1',
|
||||
name: 'Test Document',
|
||||
doc_type: 'book',
|
||||
doc_metadata: {
|
||||
title: 'Test Book',
|
||||
author: 'Test Author',
|
||||
language: 'en',
|
||||
},
|
||||
data_source_type: 'upload_file',
|
||||
segment_count: 10,
|
||||
hit_count: 5,
|
||||
...overrides,
|
||||
} as FullDocumentDetail)
|
||||
|
||||
const defaultProps = {
|
||||
docDetail: createMockDocDetail(),
|
||||
loading: false,
|
||||
onUpdate: vi.fn(),
|
||||
}
|
||||
|
||||
// Rendering tests
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
// Arrange & Act
|
||||
@ -191,7 +188,7 @@ describe('Metadata', () => {
|
||||
// Arrange & Act
|
||||
render(<Metadata {...defaultProps} loading={true} />)
|
||||
|
||||
// Assert - Loading component should be rendered
|
||||
// Assert - Loading component should be rendered, title should not
|
||||
expect(screen.queryByText(/metadata\.title/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
@ -204,7 +201,7 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Edit mode tests
|
||||
// Edit mode (tests useMetadataState hook integration)
|
||||
describe('Edit Mode', () => {
|
||||
it('should enter edit mode when edit button is clicked', () => {
|
||||
// Arrange
|
||||
@ -303,7 +300,7 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Document type selection
|
||||
// Document type selection (tests DocTypeSelector sub-component integration)
|
||||
describe('Document Type Selection', () => {
|
||||
it('should show doc type selection when no doc_type exists', () => {
|
||||
// Arrange
|
||||
@ -353,13 +350,13 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Origin info and technical parameters
|
||||
// Fixed fields (tests MetadataFieldList sub-component integration)
|
||||
describe('Fixed Fields', () => {
|
||||
it('should render origin info fields', () => {
|
||||
// Arrange & Act
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Assert - Origin info fields should be displayed
|
||||
// Assert
|
||||
expect(screen.getByText('Data Source Type')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@ -382,7 +379,7 @@ describe('Metadata', () => {
|
||||
// Act
|
||||
const { container } = render(<Metadata {...defaultProps} docDetail={docDetail} />)
|
||||
|
||||
// Assert - should render without crashing
|
||||
// Assert
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@ -390,7 +387,7 @@ describe('Metadata', () => {
|
||||
// Arrange & Act
|
||||
const { container } = render(<Metadata {...defaultProps} docDetail={undefined} loading={false} />)
|
||||
|
||||
// Assert - should render without crashing
|
||||
// Assert
|
||||
expect(container.firstChild).toBeInTheDocument()
|
||||
})
|
||||
|
||||
@ -425,7 +422,6 @@ describe('Metadata', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// FieldInfo component tests
|
||||
describe('FieldInfo', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
@ -543,3 +539,149 @@ describe('FieldInfo', () => {
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// --- useMetadataState hook coverage tests (via component interactions) ---
|
||||
describe('useMetadataState coverage', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
const defaultProps = {
|
||||
docDetail: createMockDocDetail(),
|
||||
loading: false,
|
||||
onUpdate: vi.fn(),
|
||||
}
|
||||
|
||||
describe('cancelDocType', () => {
|
||||
it('should cancel doc type change and return to edit mode', () => {
|
||||
// Arrange
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode → click change to open doc type selector
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
fireEvent.click(screen.getByText(/operation\.change/i))
|
||||
|
||||
// Now in doc type selector mode — should show cancel button
|
||||
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
|
||||
|
||||
// Act — cancel the doc type change
|
||||
fireEvent.click(screen.getByText(/operation\.cancel/i))
|
||||
|
||||
// Assert — should be back to edit mode (cancel + save buttons visible)
|
||||
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('confirmDocType', () => {
|
||||
it('should confirm same doc type and return to edit mode keeping metadata', () => {
|
||||
// Arrange — useEffect syncs tempDocType='book' from docDetail
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode → click change to open doc type selector
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
fireEvent.click(screen.getByText(/operation\.change/i))
|
||||
|
||||
// DocTypeSelector shows save/cancel buttons
|
||||
expect(screen.getByText(/metadata\.docTypeChangeTitle/i)).toBeInTheDocument()
|
||||
|
||||
// Act — click save to confirm same doc type (tempDocType='book')
|
||||
fireEvent.click(screen.getByText(/operation\.save/i))
|
||||
|
||||
// Assert — should return to edit mode with metadata fields visible
|
||||
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('cancelEdit when no docType', () => {
|
||||
it('should show doc type selection when cancel is clicked with doc_type others', () => {
|
||||
// Arrange — doc with 'others' type normalizes to '' internally.
|
||||
// The useEffect sees doc_type='others' (truthy) and syncs state,
|
||||
// so the component initially shows view mode. Enter edit → cancel to trigger cancelEdit.
|
||||
const docDetail = createMockDocDetail({ doc_type: 'others' })
|
||||
render(<Metadata {...defaultProps} docDetail={docDetail} />)
|
||||
|
||||
// 'others' is normalized to '' → useEffect fires (doc_type truthy) → view mode
|
||||
// The rendered type uses default 'book' fallback for display
|
||||
expect(screen.getByText(/operation\.edit/i)).toBeInTheDocument()
|
||||
|
||||
// Enter edit mode
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
|
||||
|
||||
// Act — cancel edit; internally docType is '' so cancelEdit goes to showDocTypes
|
||||
fireEvent.click(screen.getByText(/operation\.cancel/i))
|
||||
|
||||
// Assert — should show doc type selection since normalized docType was ''
|
||||
expect(screen.getByText(/metadata\.docTypeSelectTitle/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateMetadataField', () => {
|
||||
it('should update metadata field value via input', () => {
|
||||
// Arrange
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
|
||||
// Act — find an input and change its value (Title field)
|
||||
const inputs = screen.getAllByRole('textbox')
|
||||
expect(inputs.length).toBeGreaterThan(0)
|
||||
fireEvent.change(inputs[0], { target: { value: 'Updated Title' } })
|
||||
|
||||
// Assert — the input should have the new value
|
||||
expect(inputs[0]).toHaveValue('Updated Title')
|
||||
})
|
||||
})
|
||||
|
||||
describe('saveMetadata calls modifyDocMetadata with correct body', () => {
|
||||
it('should pass doc_type and doc_metadata in save request', async () => {
|
||||
// Arrange
|
||||
mockModifyDocMetadata.mockResolvedValueOnce({})
|
||||
render(<Metadata {...defaultProps} />)
|
||||
|
||||
// Enter edit mode
|
||||
fireEvent.click(screen.getByText(/operation\.edit/i))
|
||||
|
||||
// Act — save
|
||||
fireEvent.click(screen.getByText(/operation\.save/i))
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(mockModifyDocMetadata).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
datasetId: 'test-dataset-id',
|
||||
documentId: 'test-document-id',
|
||||
body: expect.objectContaining({
|
||||
doc_type: 'book',
|
||||
}),
|
||||
}),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('useEffect sync', () => {
|
||||
it('should handle doc_metadata being null in effect sync', () => {
|
||||
// Arrange — first render with null metadata
|
||||
const { rerender } = render(
|
||||
<Metadata
|
||||
{...defaultProps}
|
||||
docDetail={createMockDocDetail({ doc_metadata: null })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Act — rerender with a different doc_type to trigger useEffect sync
|
||||
rerender(
|
||||
<Metadata
|
||||
{...defaultProps}
|
||||
docDetail={createMockDocDetail({ doc_type: 'paper', doc_metadata: null })}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Assert — should render without crashing, showing Paper type
|
||||
expect(screen.getByText('Paper')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,422 +1,124 @@
|
||||
'use client'
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import type { inputType, metadataType } from '@/hooks/use-metadata'
|
||||
import type { CommonResponse } from '@/models/common'
|
||||
import type { DocType, FullDocumentDetail } from '@/models/datasets'
|
||||
import type { FC } from 'react'
|
||||
import type { FullDocumentDetail } from '@/models/datasets'
|
||||
import { PencilIcon } from '@heroicons/react/24/outline'
|
||||
import { get } from 'es-toolkit/compat'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import Radio from '@/app/components/base/radio'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
|
||||
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
|
||||
import { modifyDocMetadata } from '@/service/datasets'
|
||||
import { asyncRunSafe, getTextWidthWithCanvas } from '@/utils'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { useDocumentContext } from '../context'
|
||||
import { useMetadataMap } from '@/hooks/use-metadata'
|
||||
import DocTypeSelector, { DocumentTypeDisplay } from './components/doc-type-selector'
|
||||
import MetadataFieldList from './components/metadata-field-list'
|
||||
import { useMetadataState } from './hooks/use-metadata-state'
|
||||
import s from './style.module.css'
|
||||
|
||||
const map2Options = (map: { [key: string]: string }) => {
|
||||
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
|
||||
}
|
||||
export { default as FieldInfo } from './components/field-info'
|
||||
|
||||
type IFieldInfoProps = {
|
||||
label: string
|
||||
value?: string
|
||||
valueIcon?: ReactNode
|
||||
displayedValue?: string
|
||||
defaultValue?: string
|
||||
showEdit?: boolean
|
||||
inputType?: inputType
|
||||
selectOptions?: Array<{ value: string, name: string }>
|
||||
onUpdate?: (v: any) => void
|
||||
}
|
||||
|
||||
export const FieldInfo: FC<IFieldInfoProps> = ({
|
||||
label,
|
||||
value = '',
|
||||
valueIcon,
|
||||
displayedValue = '',
|
||||
defaultValue,
|
||||
showEdit = false,
|
||||
inputType = 'input',
|
||||
selectOptions = [],
|
||||
onUpdate,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
|
||||
const editAlignTop = showEdit && inputType === 'textarea'
|
||||
const readAlignTop = !showEdit && textNeedWrap
|
||||
|
||||
const renderContent = () => {
|
||||
if (!showEdit)
|
||||
return displayedValue
|
||||
|
||||
if (inputType === 'select') {
|
||||
return (
|
||||
<SimpleSelect
|
||||
onSelect={({ value }) => onUpdate?.(value as string)}
|
||||
items={selectOptions}
|
||||
defaultValue={value}
|
||||
className={s.select}
|
||||
wrapperClassName={s.selectWrapper}
|
||||
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
if (inputType === 'textarea') {
|
||||
return (
|
||||
<AutoHeightTextarea
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
className={s.textArea}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Input
|
||||
onChange={e => onUpdate?.(e.target.value)}
|
||||
value={value}
|
||||
defaultValue={defaultValue}
|
||||
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
|
||||
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
|
||||
<div className="flex grow items-center gap-1 text-text-secondary">
|
||||
{valueIcon}
|
||||
{renderContent()}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
|
||||
return (
|
||||
<div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
|
||||
)
|
||||
}
|
||||
|
||||
const IconButton: FC<{
|
||||
type: DocType
|
||||
isChecked: boolean
|
||||
}> = ({ type, isChecked = false }) => {
|
||||
const metadataMap = useMetadataMap()
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
popupContent={metadataMap[type].text}
|
||||
>
|
||||
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
|
||||
<TypeIcon
|
||||
iconName={metadataMap[type].iconName || ''}
|
||||
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
type IMetadataProps = {
|
||||
type MetadataProps = {
|
||||
docDetail?: FullDocumentDetail
|
||||
loading: boolean
|
||||
onUpdate: () => void
|
||||
}
|
||||
|
||||
type MetadataState = {
|
||||
documentType?: DocType | ''
|
||||
metadata: Record<string, string>
|
||||
}
|
||||
|
||||
const Metadata: FC<IMetadataProps> = ({ docDetail, loading, onUpdate }) => {
|
||||
const { doc_metadata = {} } = docDetail || {}
|
||||
const rawDocType = docDetail?.doc_type ?? ''
|
||||
const doc_type = rawDocType === 'others' ? '' : rawDocType
|
||||
|
||||
const Metadata: FC<MetadataProps> = ({ docDetail, loading, onUpdate }) => {
|
||||
const { t } = useTranslation()
|
||||
const metadataMap = useMetadataMap()
|
||||
const languageMap = useLanguages()
|
||||
const bookCategoryMap = useBookCategories()
|
||||
const personalDocCategoryMap = usePersonalDocCategories()
|
||||
const businessDocCategoryMap = useBusinessDocCategories()
|
||||
const [editStatus, setEditStatus] = useState(!doc_type) // if no documentType, in editing status by default
|
||||
// the initial values are according to the documentType
|
||||
const [metadataParams, setMetadataParams] = useState<MetadataState>(
|
||||
doc_type
|
||||
? {
|
||||
documentType: doc_type as DocType,
|
||||
metadata: (doc_metadata || {}) as Record<string, string>,
|
||||
}
|
||||
: { metadata: {} },
|
||||
)
|
||||
const [showDocTypes, setShowDocTypes] = useState(!doc_type) // whether show doc types
|
||||
const [tempDocType, setTempDocType] = useState<DocType | ''>('') // for remember icon click
|
||||
const [saveLoading, setSaveLoading] = useState(false)
|
||||
|
||||
const { notify } = useContext(ToastContext)
|
||||
const datasetId = useDocumentContext(s => s.datasetId)
|
||||
const documentId = useDocumentContext(s => s.documentId)
|
||||
|
||||
useEffect(() => {
|
||||
if (docDetail?.doc_type) {
|
||||
setEditStatus(false)
|
||||
setShowDocTypes(false)
|
||||
setTempDocType(doc_type as DocType | '')
|
||||
setMetadataParams({
|
||||
documentType: doc_type as DocType | '',
|
||||
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
|
||||
})
|
||||
}
|
||||
}, [docDetail?.doc_type, docDetail?.doc_metadata, doc_type])
|
||||
|
||||
// confirm doc type
|
||||
const confirmDocType = () => {
|
||||
if (!tempDocType)
|
||||
return
|
||||
setMetadataParams({
|
||||
documentType: tempDocType,
|
||||
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {} as Record<string, string>, // change doc type, clear metadata
|
||||
})
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
// cancel doc type
|
||||
const cancelDocType = () => {
|
||||
setTempDocType(metadataParams.documentType ?? '')
|
||||
setEditStatus(true)
|
||||
setShowDocTypes(false)
|
||||
}
|
||||
|
||||
// show doc type select
|
||||
const renderSelectDocType = () => {
|
||||
const { documentType } = metadataParams
|
||||
const {
|
||||
docType,
|
||||
editStatus,
|
||||
showDocTypes,
|
||||
tempDocType,
|
||||
saveLoading,
|
||||
metadataParams,
|
||||
setTempDocType,
|
||||
setShowDocTypes,
|
||||
confirmDocType,
|
||||
cancelDocType,
|
||||
enableEdit,
|
||||
cancelEdit,
|
||||
saveMetadata,
|
||||
updateMetadataField,
|
||||
} = useMetadataState({ docDetail, onUpdate })
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<>
|
||||
{!doc_type && !documentType && (
|
||||
<>
|
||||
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
|
||||
</>
|
||||
)}
|
||||
<div className={s.operationWrapper}>
|
||||
{!doc_type && !documentType && (
|
||||
<>
|
||||
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
|
||||
</>
|
||||
)}
|
||||
{documentType && (
|
||||
<>
|
||||
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
|
||||
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
|
||||
</>
|
||||
)}
|
||||
<Radio.Group value={tempDocType ?? documentType ?? ''} onChange={setTempDocType} className={s.radioGroup}>
|
||||
{CUSTOMIZABLE_DOC_TYPES.map((type, index) => {
|
||||
const currValue = tempDocType ?? documentType
|
||||
return (
|
||||
<Radio key={index} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
|
||||
<IconButton
|
||||
type={type}
|
||||
isChecked={currValue === type}
|
||||
/>
|
||||
</Radio>
|
||||
)
|
||||
})}
|
||||
</Radio.Group>
|
||||
{!doc_type && !documentType && (
|
||||
<Button
|
||||
variant="primary"
|
||||
onClick={confirmDocType}
|
||||
disabled={!tempDocType}
|
||||
>
|
||||
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
|
||||
</Button>
|
||||
)}
|
||||
{documentType && (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={confirmDocType} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">{t('operation.save', { ns: 'common' })}</Button>
|
||||
<Button onClick={cancelDocType} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
// show metadata info and edit
|
||||
const renderFieldInfos = ({ mainField = 'book', canEdit }: { mainField?: metadataType | '', canEdit?: boolean }) => {
|
||||
if (!mainField)
|
||||
return null
|
||||
const fieldMap = metadataMap[mainField]?.subFieldsMap
|
||||
const sourceData = ['originInfo', 'technicalParameters'].includes(mainField) ? docDetail : metadataParams.metadata
|
||||
|
||||
const getTargetMap = (field: string) => {
|
||||
if (field === 'language')
|
||||
return languageMap
|
||||
if (field === 'category' && mainField === 'book')
|
||||
return bookCategoryMap
|
||||
|
||||
if (field === 'document_type') {
|
||||
if (mainField === 'personal_document')
|
||||
return personalDocCategoryMap
|
||||
if (mainField === 'business_document')
|
||||
return businessDocCategoryMap
|
||||
}
|
||||
return {} as any
|
||||
}
|
||||
|
||||
const getTargetValue = (field: string) => {
|
||||
const val = get(sourceData, field, '')
|
||||
if (!val && val !== 0)
|
||||
return '-'
|
||||
if (fieldMap[field]?.inputType === 'select')
|
||||
return getTargetMap(field)[val]
|
||||
if (fieldMap[field]?.render)
|
||||
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
|
||||
return val
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-1">
|
||||
{Object.keys(fieldMap).map((field) => {
|
||||
return (
|
||||
<FieldInfo
|
||||
key={fieldMap[field]?.label}
|
||||
label={fieldMap[field]?.label}
|
||||
displayedValue={getTargetValue(field)}
|
||||
value={get(sourceData, field, '')}
|
||||
inputType={fieldMap[field]?.inputType || 'input'}
|
||||
showEdit={canEdit}
|
||||
onUpdate={(val) => {
|
||||
setMetadataParams(pre => ({ ...pre, metadata: { ...pre.metadata, [field]: val } }))
|
||||
}}
|
||||
selectOptions={map2Options(getTargetMap(field))}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
<div className={`${s.main} bg-gray-25`}>
|
||||
<Loading type="app" />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const enabledEdit = () => {
|
||||
setEditStatus(true)
|
||||
}
|
||||
|
||||
const onCancel = () => {
|
||||
setMetadataParams({ documentType: doc_type || '', metadata: { ...docDetail?.doc_metadata } })
|
||||
setEditStatus(!doc_type)
|
||||
if (!doc_type)
|
||||
setShowDocTypes(true)
|
||||
}
|
||||
|
||||
const onSave = async () => {
|
||||
setSaveLoading(true)
|
||||
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
|
||||
datasetId,
|
||||
documentId,
|
||||
body: {
|
||||
doc_type: metadataParams.documentType || doc_type || '',
|
||||
doc_metadata: metadataParams.metadata,
|
||||
},
|
||||
}) as Promise<CommonResponse>)
|
||||
if (!e)
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
else
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
onUpdate?.()
|
||||
setEditStatus(false)
|
||||
setSaveLoading(false)
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={`${s.main} ${editStatus ? 'bg-white' : 'bg-gray-25'}`}>
|
||||
{loading
|
||||
? (<Loading type="app" />)
|
||||
: (
|
||||
<>
|
||||
<div className={s.titleWrapper}>
|
||||
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
|
||||
{!editStatus
|
||||
? (
|
||||
<Button onClick={enabledEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
|
||||
<PencilIcon className={s.opIcon} />
|
||||
{t('operation.edit', { ns: 'common' })}
|
||||
</Button>
|
||||
)
|
||||
: showDocTypes
|
||||
? null
|
||||
: (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
|
||||
<Button
|
||||
onClick={onSave}
|
||||
className={`${s.opBtn} ${s.opSaveBtn}`}
|
||||
variant="primary"
|
||||
loading={saveLoading}
|
||||
>
|
||||
{t('operation.save', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
{/* Header: title + action buttons */}
|
||||
<div className={s.titleWrapper}>
|
||||
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
|
||||
{!editStatus
|
||||
? (
|
||||
<Button onClick={enableEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
|
||||
<PencilIcon className={s.opIcon} />
|
||||
{t('operation.edit', { ns: 'common' })}
|
||||
</Button>
|
||||
)
|
||||
: !showDocTypes && (
|
||||
<div className={s.opBtnWrapper}>
|
||||
<Button onClick={cancelEdit} className={`${s.opBtn} ${s.opCancelBtn}`}>
|
||||
{t('operation.cancel', { ns: 'common' })}
|
||||
</Button>
|
||||
<Button onClick={saveMetadata} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary" loading={saveLoading}>
|
||||
{t('operation.save', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
{/* show selected doc type and changing entry */}
|
||||
{!editStatus
|
||||
? (
|
||||
<div className={s.documentTypeShow}>
|
||||
<TypeIcon iconName={metadataMap[doc_type || 'book']?.iconName || ''} className={s.iconShow} />
|
||||
{metadataMap[doc_type || 'book'].text}
|
||||
</div>
|
||||
)
|
||||
: showDocTypes
|
||||
? null
|
||||
: (
|
||||
<div className={s.documentTypeShow}>
|
||||
{metadataParams.documentType && (
|
||||
<>
|
||||
<TypeIcon iconName={metadataMap[metadataParams.documentType || 'book'].iconName || ''} className={s.iconShow} />
|
||||
{metadataMap[metadataParams.documentType || 'book'].text}
|
||||
{editStatus && (
|
||||
<div className="ml-1 inline-flex items-center gap-1">
|
||||
·
|
||||
<div
|
||||
onClick={() => { setShowDocTypes(true) }}
|
||||
className="cursor-pointer hover:text-text-accent"
|
||||
>
|
||||
{t('operation.change', { ns: 'common' })}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{(!doc_type && showDocTypes) ? null : <Divider />}
|
||||
{showDocTypes ? renderSelectDocType() : renderFieldInfos({ mainField: metadataParams.documentType, canEdit: editStatus })}
|
||||
{/* show fixed fields */}
|
||||
<Divider />
|
||||
{renderFieldInfos({ mainField: 'originInfo', canEdit: false })}
|
||||
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
|
||||
<Divider />
|
||||
{renderFieldInfos({ mainField: 'technicalParameters', canEdit: false })}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Document type display / selector */}
|
||||
{!editStatus
|
||||
? <DocumentTypeDisplay displayType={docType} />
|
||||
: showDocTypes
|
||||
? null
|
||||
: (
|
||||
<DocumentTypeDisplay
|
||||
displayType={metadataParams.documentType || ''}
|
||||
showChangeLink={editStatus}
|
||||
onChangeClick={() => setShowDocTypes(true)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Divider between type display and fields (skip when in first-time selection) */}
|
||||
{(!docType && showDocTypes) ? null : <Divider />}
|
||||
|
||||
{/* Doc type selector or editable metadata fields */}
|
||||
{showDocTypes
|
||||
? (
|
||||
<DocTypeSelector
|
||||
docType={docType}
|
||||
documentType={metadataParams.documentType}
|
||||
tempDocType={tempDocType}
|
||||
onTempDocTypeChange={setTempDocType}
|
||||
onConfirm={confirmDocType}
|
||||
onCancel={cancelDocType}
|
||||
/>
|
||||
)
|
||||
: (
|
||||
<MetadataFieldList
|
||||
mainField={metadataParams.documentType || ''}
|
||||
canEdit={editStatus}
|
||||
metadata={metadataParams.metadata}
|
||||
docDetail={docDetail}
|
||||
onFieldUpdate={updateMetadataField}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Fixed fields: origin info */}
|
||||
<Divider />
|
||||
<MetadataFieldList mainField="originInfo" docDetail={docDetail} />
|
||||
|
||||
{/* Fixed fields: technical parameters */}
|
||||
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
|
||||
<Divider />
|
||||
<MetadataFieldList mainField="technicalParameters" docDetail={docDetail} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@ -28,6 +28,7 @@ import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
import { useModalContext } from '@/context/modal-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { env } from '@/env'
|
||||
import { useLogout } from '@/service/use-common'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import AccountAbout from '../account-about'
|
||||
@ -178,7 +179,7 @@ export default function AppSelector() {
|
||||
</Link>
|
||||
</MenuItem>
|
||||
{
|
||||
document?.body?.getAttribute('data-public-site-about') !== 'hide' && (
|
||||
env.NEXT_PUBLIC_SITE_ABOUT !== 'hide' && (
|
||||
<MenuItem>
|
||||
<div
|
||||
className={cn(itemClassName, 'justify-between', 'data-[active]:bg-state-base-hover')}
|
||||
|
||||
@ -104,7 +104,7 @@ const MembersPage = () => {
|
||||
<UpgradeBtn className="mr-2" loc="member-invite" />
|
||||
)}
|
||||
<div className="shrink-0">
|
||||
<InviteButton disabled={!isCurrentWorkspaceManager || isMemberFull} onClick={() => setInviteModalVisible(true)} />
|
||||
{isCurrentWorkspaceManager && <InviteButton disabled={isMemberFull} onClick={() => setInviteModalVisible(true)} />}
|
||||
</div>
|
||||
</div>
|
||||
<div className="overflow-visible lg:overflow-visible">
|
||||
|
||||
@ -3,6 +3,7 @@
|
||||
import { SerwistProvider } from '@serwist/turbopack/react'
|
||||
import { useEffect } from 'react'
|
||||
import { IS_DEV } from '@/config'
|
||||
import { env } from '@/env'
|
||||
import { isClient } from '@/utils/client'
|
||||
|
||||
export function PWAProvider({ children }: { children: React.ReactNode }) {
|
||||
@ -10,7 +11,7 @@ export function PWAProvider({ children }: { children: React.ReactNode }) {
|
||||
return <DisabledPWAProvider>{children}</DisabledPWAProvider>
|
||||
}
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const basePath = env.NEXT_PUBLIC_BASE_PATH
|
||||
const swUrl = `${basePath}/serwist/sw.js`
|
||||
|
||||
return (
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { TFunction } from 'i18next'
|
||||
import type { SchemaOptions } from './types'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { InputTypeEnum } from '@/app/components/base/form/components/field/input-type-select/types'
|
||||
import { MAX_VAR_KEY_LENGTH } from '@/config'
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { PropsWithChildren } from 'react'
|
||||
import { act, cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { DSLImportStatus } from '@/models/app'
|
||||
import UpdateDSLModal from './update-dsl-modal'
|
||||
|
||||
@ -145,11 +145,6 @@ vi.mock('@/app/components/workflow/constants', () => ({
|
||||
WORKFLOW_DATA_UPDATE: 'WORKFLOW_DATA_UPDATE',
|
||||
}))
|
||||
|
||||
afterEach(() => {
|
||||
cleanup()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('UpdateDSLModal', () => {
|
||||
const mockOnCancel = vi.fn()
|
||||
const mockOnBackup = vi.fn()
|
||||
|
||||
@ -1,40 +1,17 @@
|
||||
'use client'
|
||||
|
||||
import type { MouseEventHandler } from 'react'
|
||||
import {
|
||||
RiAlertFill,
|
||||
RiCloseLine,
|
||||
RiFileDownloadLine,
|
||||
} from '@remixicon/react'
|
||||
import {
|
||||
memo,
|
||||
useCallback,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { memo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import Uploader from '@/app/components/app/create-from-dsl-modal/uploader'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Modal from '@/app/components/base/modal'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { WORKFLOW_DATA_UPDATE } from '@/app/components/workflow/constants'
|
||||
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
|
||||
import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
import {
|
||||
initialEdges,
|
||||
initialNodes,
|
||||
} from '@/app/components/workflow/utils'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import {
|
||||
DSLImportMode,
|
||||
DSLImportStatus,
|
||||
} from '@/models/app'
|
||||
import {
|
||||
useImportPipelineDSL,
|
||||
useImportPipelineDSLConfirm,
|
||||
} from '@/service/use-pipeline'
|
||||
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
import { useUpdateDSLModal } from '../hooks/use-update-dsl-modal'
|
||||
import VersionMismatchModal from './version-mismatch-modal'
|
||||
|
||||
type UpdateDSLModalProps = {
|
||||
onCancel: () => void
|
||||
@ -48,146 +25,17 @@ const UpdateDSLModal = ({
|
||||
onImport,
|
||||
}: UpdateDSLModalProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const [currentFile, setDSLFile] = useState<File>()
|
||||
const [fileContent, setFileContent] = useState<string>()
|
||||
const [loading, setLoading] = useState(false)
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const [show, setShow] = useState(true)
|
||||
const [showErrorModal, setShowErrorModal] = useState(false)
|
||||
const [versions, setVersions] = useState<{ importedVersion: string, systemVersion: string }>()
|
||||
const [importId, setImportId] = useState<string>()
|
||||
const { handleCheckPluginDependencies } = usePluginDependencies()
|
||||
const { mutateAsync: importDSL } = useImportPipelineDSL()
|
||||
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
|
||||
const workflowStore = useWorkflowStore()
|
||||
|
||||
const readFile = (file: File) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = function (event) {
|
||||
const content = event.target?.result
|
||||
setFileContent(content as string)
|
||||
}
|
||||
reader.readAsText(file)
|
||||
}
|
||||
|
||||
const handleFile = (file?: File) => {
|
||||
setDSLFile(file)
|
||||
if (file)
|
||||
readFile(file)
|
||||
if (!file)
|
||||
setFileContent('')
|
||||
}
|
||||
|
||||
const handleWorkflowUpdate = useCallback(async (pipelineId: string) => {
|
||||
const {
|
||||
graph,
|
||||
hash,
|
||||
rag_pipeline_variables,
|
||||
} = await fetchWorkflowDraft(`/rag/pipelines/${pipelineId}/workflows/draft`)
|
||||
|
||||
const { nodes, edges, viewport } = graph
|
||||
|
||||
eventEmitter?.emit({
|
||||
type: WORKFLOW_DATA_UPDATE,
|
||||
payload: {
|
||||
nodes: initialNodes(nodes, edges),
|
||||
edges: initialEdges(edges, nodes),
|
||||
viewport,
|
||||
hash,
|
||||
rag_pipeline_variables: rag_pipeline_variables || [],
|
||||
},
|
||||
} as any)
|
||||
}, [eventEmitter])
|
||||
|
||||
const isCreatingRef = useRef(false)
|
||||
const handleImport: MouseEventHandler = useCallback(async () => {
|
||||
const { pipelineId } = workflowStore.getState()
|
||||
if (isCreatingRef.current)
|
||||
return
|
||||
isCreatingRef.current = true
|
||||
if (!currentFile)
|
||||
return
|
||||
try {
|
||||
if (pipelineId && fileContent) {
|
||||
setLoading(true)
|
||||
const response = await importDSL({ mode: DSLImportMode.YAML_CONTENT, yaml_content: fileContent, pipeline_id: pipelineId })
|
||||
const { id, status, pipeline_id, imported_dsl_version, current_dsl_version } = response
|
||||
|
||||
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
|
||||
if (!pipeline_id) {
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
return
|
||||
}
|
||||
handleWorkflowUpdate(pipeline_id)
|
||||
if (onImport)
|
||||
onImport()
|
||||
notify({
|
||||
type: status === DSLImportStatus.COMPLETED ? 'success' : 'warning',
|
||||
message: t(status === DSLImportStatus.COMPLETED ? 'common.importSuccess' : 'common.importWarning', { ns: 'workflow' }),
|
||||
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('common.importWarningDetails', { ns: 'workflow' }),
|
||||
})
|
||||
await handleCheckPluginDependencies(pipeline_id, true)
|
||||
setLoading(false)
|
||||
onCancel()
|
||||
}
|
||||
else if (status === DSLImportStatus.PENDING) {
|
||||
setShow(false)
|
||||
setTimeout(() => {
|
||||
setShowErrorModal(true)
|
||||
}, 300)
|
||||
setVersions({
|
||||
importedVersion: imported_dsl_version ?? '',
|
||||
systemVersion: current_dsl_version ?? '',
|
||||
})
|
||||
setImportId(id)
|
||||
}
|
||||
else {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line unused-imports/no-unused-vars
|
||||
catch (e) {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}
|
||||
isCreatingRef.current = false
|
||||
}, [currentFile, fileContent, onCancel, notify, t, onImport, handleWorkflowUpdate, handleCheckPluginDependencies, workflowStore, importDSL])
|
||||
|
||||
const onUpdateDSLConfirm: MouseEventHandler = async () => {
|
||||
try {
|
||||
if (!importId)
|
||||
return
|
||||
const response = await importDSLConfirm(importId)
|
||||
|
||||
const { status, pipeline_id } = response
|
||||
|
||||
if (status === DSLImportStatus.COMPLETED) {
|
||||
if (!pipeline_id) {
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
return
|
||||
}
|
||||
handleWorkflowUpdate(pipeline_id)
|
||||
await handleCheckPluginDependencies(pipeline_id, true)
|
||||
if (onImport)
|
||||
onImport()
|
||||
notify({ type: 'success', message: t('common.importSuccess', { ns: 'workflow' }) })
|
||||
setLoading(false)
|
||||
onCancel()
|
||||
}
|
||||
else if (status === DSLImportStatus.FAILED) {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line unused-imports/no-unused-vars
|
||||
catch (e) {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}
|
||||
}
|
||||
const {
|
||||
currentFile,
|
||||
handleFile,
|
||||
show,
|
||||
showErrorModal,
|
||||
setShowErrorModal,
|
||||
loading,
|
||||
versions,
|
||||
handleImport,
|
||||
onUpdateDSLConfirm,
|
||||
} = useUpdateDSLModal({ onCancel, onImport })
|
||||
|
||||
return (
|
||||
<>
|
||||
@ -250,32 +98,12 @@ const UpdateDSLModal = ({
|
||||
</Button>
|
||||
</div>
|
||||
</Modal>
|
||||
<Modal
|
||||
<VersionMismatchModal
|
||||
isShow={showErrorModal}
|
||||
versions={versions}
|
||||
onClose={() => setShowErrorModal(false)}
|
||||
className="w-[480px]"
|
||||
>
|
||||
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
|
||||
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
|
||||
<div className="system-md-regular flex grow flex-col text-text-secondary">
|
||||
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
|
||||
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
|
||||
<br />
|
||||
<div>
|
||||
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
|
||||
<span className="system-md-medium">{versions?.importedVersion}</span>
|
||||
</div>
|
||||
<div>
|
||||
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
|
||||
<span className="system-md-medium">{versions?.systemVersion}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
|
||||
<Button variant="secondary" onClick={() => setShowErrorModal(false)}>{t('newApp.Cancel', { ns: 'app' })}</Button>
|
||||
<Button variant="primary" destructive onClick={onUpdateDSLConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
|
||||
</div>
|
||||
</Modal>
|
||||
onConfirm={onUpdateDSLConfirm}
|
||||
/>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
@ -0,0 +1,117 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import VersionMismatchModal from './version-mismatch-modal'
|
||||
|
||||
describe('VersionMismatchModal', () => {
|
||||
const mockOnClose = vi.fn()
|
||||
const mockOnConfirm = vi.fn()
|
||||
|
||||
const defaultVersions = {
|
||||
importedVersion: '0.8.0',
|
||||
systemVersion: '1.0.0',
|
||||
}
|
||||
|
||||
const defaultProps = {
|
||||
isShow: true,
|
||||
versions: defaultVersions,
|
||||
onClose: mockOnClose,
|
||||
onConfirm: mockOnConfirm,
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('rendering', () => {
|
||||
it('should render dialog when isShow is true', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render dialog when isShow is false', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} isShow={false} />)
|
||||
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render error title', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render all error description parts', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorPart1')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorPart2')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorPart3')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorPart4')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display imported and system version numbers', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText('0.8.0')).toBeInTheDocument()
|
||||
expect(screen.getByText('1.0.0')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render cancel and confirm buttons', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
expect(screen.getByRole('button', { name: /app\.newApp\.Cancel/ })).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: /app\.newApp\.Confirm/ })).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('user interactions', () => {
|
||||
it('should call onClose when cancel button is clicked', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Cancel/ }))
|
||||
|
||||
expect(mockOnClose).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should call onConfirm when confirm button is clicked', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Confirm/ }))
|
||||
|
||||
expect(mockOnConfirm).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('button variants', () => {
|
||||
it('should render cancel button with secondary variant', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
const cancelBtn = screen.getByRole('button', { name: /app\.newApp\.Cancel/ })
|
||||
expect(cancelBtn).toHaveClass('btn-secondary')
|
||||
})
|
||||
|
||||
it('should render confirm button with primary destructive variant', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} />)
|
||||
|
||||
const confirmBtn = screen.getByRole('button', { name: /app\.newApp\.Confirm/ })
|
||||
expect(confirmBtn).toHaveClass('btn-primary')
|
||||
expect(confirmBtn).toHaveClass('btn-destructive')
|
||||
})
|
||||
})
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle undefined versions gracefully', () => {
|
||||
render(<VersionMismatchModal {...defaultProps} versions={undefined} />)
|
||||
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle empty version strings', () => {
|
||||
const emptyVersions = { importedVersion: '', systemVersion: '' }
|
||||
render(<VersionMismatchModal {...defaultProps} versions={emptyVersions} />)
|
||||
|
||||
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -0,0 +1,54 @@
|
||||
import type { MouseEventHandler } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import Modal from '@/app/components/base/modal'
|
||||
|
||||
type VersionMismatchModalProps = {
|
||||
isShow: boolean
|
||||
versions?: {
|
||||
importedVersion: string
|
||||
systemVersion: string
|
||||
}
|
||||
onClose: () => void
|
||||
onConfirm: MouseEventHandler
|
||||
}
|
||||
|
||||
const VersionMismatchModal = ({
|
||||
isShow,
|
||||
versions,
|
||||
onClose,
|
||||
onConfirm,
|
||||
}: VersionMismatchModalProps) => {
|
||||
const { t } = useTranslation()
|
||||
|
||||
return (
|
||||
<Modal
|
||||
isShow={isShow}
|
||||
onClose={onClose}
|
||||
className="w-[480px]"
|
||||
>
|
||||
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
|
||||
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
|
||||
<div className="system-md-regular flex grow flex-col text-text-secondary">
|
||||
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
|
||||
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
|
||||
<br />
|
||||
<div>
|
||||
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
|
||||
<span className="system-md-medium">{versions?.importedVersion}</span>
|
||||
</div>
|
||||
<div>
|
||||
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
|
||||
<span className="system-md-medium">{versions?.systemVersion}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
|
||||
<Button variant="secondary" onClick={onClose}>{t('newApp.Cancel', { ns: 'app' })}</Button>
|
||||
<Button variant="primary" destructive onClick={onConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
|
||||
</div>
|
||||
</Modal>
|
||||
)
|
||||
}
|
||||
|
||||
export default VersionMismatchModal
|
||||
@ -68,23 +68,20 @@ vi.mock('@/config', () => ({
|
||||
API_PREFIX: '/api',
|
||||
}))
|
||||
|
||||
// Mock postWithKeepalive from service/fetch
|
||||
const mockPostWithKeepalive = vi.fn()
|
||||
vi.mock('@/service/fetch', () => ({
|
||||
postWithKeepalive: (...args: unknown[]) => mockPostWithKeepalive(...args),
|
||||
}))
|
||||
|
||||
// ============================================================================
|
||||
// Tests
|
||||
// ============================================================================
|
||||
|
||||
describe('useNodesSyncDraft', () => {
|
||||
const mockSendBeacon = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Setup navigator.sendBeacon mock
|
||||
Object.defineProperty(navigator, 'sendBeacon', {
|
||||
value: mockSendBeacon,
|
||||
writable: true,
|
||||
configurable: true,
|
||||
})
|
||||
|
||||
// Default store state
|
||||
mockStoreGetState.mockReturnValue({
|
||||
getNodes: mockGetNodes,
|
||||
@ -134,7 +131,7 @@ describe('useNodesSyncDraft', () => {
|
||||
})
|
||||
|
||||
describe('syncWorkflowDraftWhenPageClose', () => {
|
||||
it('should not call sendBeacon when nodes are read only', () => {
|
||||
it('should not call postWithKeepalive when nodes are read only', () => {
|
||||
mockGetNodesReadOnly.mockReturnValue(true)
|
||||
|
||||
const { result } = renderHook(() => useNodesSyncDraft())
|
||||
@ -143,10 +140,10 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
expect(mockSendBeacon).not.toHaveBeenCalled()
|
||||
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call sendBeacon with correct URL and params', () => {
|
||||
it('should call postWithKeepalive with correct URL and params', () => {
|
||||
mockGetNodesReadOnly.mockReturnValue(false)
|
||||
mockGetNodes.mockReturnValue([
|
||||
{ id: 'node-1', data: { type: 'start' }, position: { x: 0, y: 0 } },
|
||||
@ -158,13 +155,16 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
expect(mockSendBeacon).toHaveBeenCalledWith(
|
||||
expect(mockPostWithKeepalive).toHaveBeenCalledWith(
|
||||
'/api/rag/pipelines/test-pipeline-id/workflows/draft',
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
graph: expect.any(Object),
|
||||
hash: 'test-hash',
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
it('should not call sendBeacon when pipelineId is missing', () => {
|
||||
it('should not call postWithKeepalive when pipelineId is missing', () => {
|
||||
mockWorkflowStoreGetState.mockReturnValue({
|
||||
pipelineId: undefined,
|
||||
environmentVariables: [],
|
||||
@ -178,10 +178,10 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
expect(mockSendBeacon).not.toHaveBeenCalled()
|
||||
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not call sendBeacon when nodes array is empty', () => {
|
||||
it('should not call postWithKeepalive when nodes array is empty', () => {
|
||||
mockGetNodes.mockReturnValue([])
|
||||
|
||||
const { result } = renderHook(() => useNodesSyncDraft())
|
||||
@ -190,7 +190,7 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
expect(mockSendBeacon).not.toHaveBeenCalled()
|
||||
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should filter out temp nodes', () => {
|
||||
@ -204,8 +204,8 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
// Should not call sendBeacon because after filtering temp nodes, array is empty
|
||||
expect(mockSendBeacon).not.toHaveBeenCalled()
|
||||
// Should not call postWithKeepalive because after filtering temp nodes, array is empty
|
||||
expect(mockPostWithKeepalive).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should remove underscore-prefixed data keys from nodes', () => {
|
||||
@ -219,9 +219,9 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
expect(mockSendBeacon).toHaveBeenCalled()
|
||||
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
|
||||
expect(sentData.graph.nodes[0].data._privateData).toBeUndefined()
|
||||
expect(mockPostWithKeepalive).toHaveBeenCalled()
|
||||
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
|
||||
expect(sentParams.graph.nodes[0].data._privateData).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
@ -395,8 +395,8 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
|
||||
expect(sentData.graph.viewport).toEqual({ x: 100, y: 200, zoom: 1.5 })
|
||||
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
|
||||
expect(sentParams.graph.viewport).toEqual({ x: 100, y: 200, zoom: 1.5 })
|
||||
})
|
||||
|
||||
it('should include environment variables in params', () => {
|
||||
@ -418,8 +418,8 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
|
||||
expect(sentData.environment_variables).toEqual([{ key: 'API_KEY', value: 'secret' }])
|
||||
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
|
||||
expect(sentParams.environment_variables).toEqual([{ key: 'API_KEY', value: 'secret' }])
|
||||
})
|
||||
|
||||
it('should include rag pipeline variables in params', () => {
|
||||
@ -441,8 +441,8 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
|
||||
expect(sentData.rag_pipeline_variables).toEqual([{ variable: 'input', type: 'text-input' }])
|
||||
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
|
||||
expect(sentParams.rag_pipeline_variables).toEqual([{ variable: 'input', type: 'text-input' }])
|
||||
})
|
||||
|
||||
it('should remove underscore-prefixed keys from edges', () => {
|
||||
@ -461,9 +461,9 @@ describe('useNodesSyncDraft', () => {
|
||||
result.current.syncWorkflowDraftWhenPageClose()
|
||||
})
|
||||
|
||||
const sentData = JSON.parse(mockSendBeacon.mock.calls[0][1])
|
||||
expect(sentData.graph.edges[0].data._hidden).toBeUndefined()
|
||||
expect(sentData.graph.edges[0].data.visible).toBe(false)
|
||||
const sentParams = mockPostWithKeepalive.mock.calls[0][1]
|
||||
expect(sentParams.graph.edges[0].data._hidden).toBeUndefined()
|
||||
expect(sentParams.graph.edges[0].data.visible).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -9,6 +9,7 @@ import {
|
||||
useWorkflowStore,
|
||||
} from '@/app/components/workflow/store'
|
||||
import { API_PREFIX } from '@/config'
|
||||
import { postWithKeepalive } from '@/service/fetch'
|
||||
import { syncWorkflowDraft } from '@/service/workflow'
|
||||
import { usePipelineRefreshDraft } from '.'
|
||||
|
||||
@ -76,12 +77,8 @@ export const useNodesSyncDraft = () => {
|
||||
return
|
||||
const postParams = getPostParams()
|
||||
|
||||
if (postParams) {
|
||||
navigator.sendBeacon(
|
||||
`${API_PREFIX}${postParams.url}`,
|
||||
JSON.stringify(postParams.params),
|
||||
)
|
||||
}
|
||||
if (postParams)
|
||||
postWithKeepalive(`${API_PREFIX}${postParams.url}`, postParams.params)
|
||||
}, [getPostParams, getNodesReadOnly])
|
||||
|
||||
const performSync = useCallback(async (
|
||||
|
||||
@ -92,8 +92,10 @@ vi.mock('@/service/workflow', () => ({
|
||||
}))
|
||||
|
||||
const mockInvalidAllLastRun = vi.fn()
|
||||
const mockInvalidateRunHistory = vi.fn()
|
||||
vi.mock('@/service/use-workflow', () => ({
|
||||
useInvalidAllLastRun: () => mockInvalidAllLastRun,
|
||||
useInvalidateWorkflowRunHistory: () => mockInvalidateRunHistory,
|
||||
}))
|
||||
|
||||
// Mock FlowType
|
||||
@ -472,6 +474,7 @@ describe('usePipelineRun', () => {
|
||||
})
|
||||
|
||||
expect(onWorkflowStarted).toHaveBeenCalledWith({ task_id: 'task-1' })
|
||||
expect(mockInvalidateRunHistory).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call onWorkflowFinished callback when provided', async () => {
|
||||
@ -493,6 +496,7 @@ describe('usePipelineRun', () => {
|
||||
})
|
||||
|
||||
expect(onWorkflowFinished).toHaveBeenCalledWith({ status: 'succeeded' })
|
||||
expect(mockInvalidateRunHistory).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call onError callback when provided', async () => {
|
||||
@ -514,6 +518,7 @@ describe('usePipelineRun', () => {
|
||||
})
|
||||
|
||||
expect(onError).toHaveBeenCalledWith({ message: 'error' })
|
||||
expect(mockInvalidateRunHistory).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call onNodeStarted callback when provided', async () => {
|
||||
|
||||
@ -12,7 +12,7 @@ import { useWorkflowRunEvent } from '@/app/components/workflow/hooks/use-workflo
|
||||
import { useStore, useWorkflowStore } from '@/app/components/workflow/store'
|
||||
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
|
||||
import { ssePost } from '@/service/base'
|
||||
import { useInvalidAllLastRun } from '@/service/use-workflow'
|
||||
import { useInvalidAllLastRun, useInvalidateWorkflowRunHistory } from '@/service/use-workflow'
|
||||
import { stopWorkflowRun } from '@/service/workflow'
|
||||
import { FlowType } from '@/types/common'
|
||||
import { useNodesSyncDraft } from './use-nodes-sync-draft'
|
||||
@ -93,6 +93,7 @@ export const usePipelineRun = () => {
|
||||
|
||||
const pipelineId = useStore(s => s.pipelineId)
|
||||
const invalidAllLastRun = useInvalidAllLastRun(FlowType.ragPipeline, pipelineId)
|
||||
const invalidateRunHistory = useInvalidateWorkflowRunHistory()
|
||||
const { fetchInspectVars } = useSetWorkflowVarsWithValue({
|
||||
flowType: FlowType.ragPipeline,
|
||||
flowId: pipelineId!,
|
||||
@ -132,6 +133,7 @@ export const usePipelineRun = () => {
|
||||
...restCallback
|
||||
} = callback || {}
|
||||
const { pipelineId } = workflowStore.getState()
|
||||
const runHistoryUrl = `/rag/pipelines/${pipelineId}/workflow-runs`
|
||||
workflowStore.setState({ historyWorkflowData: undefined })
|
||||
const workflowContainer = document.getElementById('workflow-container')
|
||||
|
||||
@ -170,12 +172,14 @@ export const usePipelineRun = () => {
|
||||
},
|
||||
onWorkflowStarted: (params) => {
|
||||
handleWorkflowStarted(params)
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
|
||||
if (onWorkflowStarted)
|
||||
onWorkflowStarted(params)
|
||||
},
|
||||
onWorkflowFinished: (params) => {
|
||||
handleWorkflowFinished(params)
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
fetchInspectVars({})
|
||||
invalidAllLastRun()
|
||||
|
||||
@ -184,6 +188,7 @@ export const usePipelineRun = () => {
|
||||
},
|
||||
onError: (params) => {
|
||||
handleWorkflowFailed()
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
|
||||
if (onError)
|
||||
onError(params)
|
||||
@ -275,7 +280,7 @@ export const usePipelineRun = () => {
|
||||
...restCallback,
|
||||
},
|
||||
)
|
||||
}, [store, doSyncWorkflowDraft, workflowStore, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, handleWorkflowFailed, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace])
|
||||
}, [store, doSyncWorkflowDraft, workflowStore, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, invalidateRunHistory, handleWorkflowFailed, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace])
|
||||
|
||||
const handleStopRun = useCallback((taskId: string) => {
|
||||
const { pipelineId } = workflowStore.getState()
|
||||
|
||||
@ -0,0 +1,551 @@
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { DSLImportMode, DSLImportStatus } from '@/models/app'
|
||||
import { useUpdateDSLModal } from './use-update-dsl-modal'
|
||||
|
||||
// --- FileReader stub ---
|
||||
class MockFileReader {
|
||||
onload: ((this: FileReader, event: ProgressEvent<FileReader>) => void) | null = null
|
||||
|
||||
readAsText(_file: Blob) {
|
||||
const event = { target: { result: 'test content' } } as unknown as ProgressEvent<FileReader>
|
||||
this.onload?.call(this as unknown as FileReader, event)
|
||||
}
|
||||
}
|
||||
vi.stubGlobal('FileReader', MockFileReader as unknown as typeof FileReader)
|
||||
|
||||
// --- Module-level mock functions ---
|
||||
const mockNotify = vi.fn()
|
||||
const mockEmit = vi.fn()
|
||||
const mockImportDSL = vi.fn()
|
||||
const mockImportDSLConfirm = vi.fn()
|
||||
const mockHandleCheckPluginDependencies = vi.fn()
|
||||
|
||||
// --- Mocks ---
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({ t: (key: string) => key }),
|
||||
}))
|
||||
|
||||
vi.mock('use-context-selector', () => ({
|
||||
useContext: () => ({ notify: mockNotify }),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
ToastContext: {},
|
||||
}))
|
||||
|
||||
vi.mock('@/context/event-emitter', () => ({
|
||||
useEventEmitterContextContext: () => ({
|
||||
eventEmitter: { emit: mockEmit },
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/store', () => ({
|
||||
useWorkflowStore: () => ({
|
||||
getState: () => ({ pipelineId: 'test-pipeline-id' }),
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/utils', () => ({
|
||||
initialNodes: (nodes: unknown[]) => nodes,
|
||||
initialEdges: (edges: unknown[]) => edges,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/constants', () => ({
|
||||
WORKFLOW_DATA_UPDATE: 'WORKFLOW_DATA_UPDATE',
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/plugin-dependency/hooks', () => ({
|
||||
usePluginDependencies: () => ({
|
||||
handleCheckPluginDependencies: mockHandleCheckPluginDependencies,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-pipeline', () => ({
|
||||
useImportPipelineDSL: () => ({ mutateAsync: mockImportDSL }),
|
||||
useImportPipelineDSLConfirm: () => ({ mutateAsync: mockImportDSLConfirm }),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/workflow', () => ({
|
||||
fetchWorkflowDraft: vi.fn().mockResolvedValue({
|
||||
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
|
||||
hash: 'test-hash',
|
||||
rag_pipeline_variables: [],
|
||||
}),
|
||||
}))
|
||||
|
||||
// --- Helpers ---
|
||||
const createFile = () => new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
|
||||
|
||||
// Cast MouseEventHandler to a plain callable for tests (event param is unused)
|
||||
type AsyncFn = () => Promise<void>
|
||||
|
||||
describe('useUpdateDSLModal', () => {
|
||||
const mockOnCancel = vi.fn()
|
||||
const mockOnImport = vi.fn()
|
||||
|
||||
const renderUpdateDSLModal = (overrides?: { onImport?: () => void }) =>
|
||||
renderHook(() =>
|
||||
useUpdateDSLModal({
|
||||
onCancel: mockOnCancel,
|
||||
onImport: overrides?.onImport ?? mockOnImport,
|
||||
}),
|
||||
)
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
|
||||
})
|
||||
|
||||
// Initial state values
|
||||
describe('initial state', () => {
|
||||
it('should return correct defaults', () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
expect(result.current.currentFile).toBeUndefined()
|
||||
expect(result.current.show).toBe(true)
|
||||
expect(result.current.showErrorModal).toBe(false)
|
||||
expect(result.current.loading).toBe(false)
|
||||
expect(result.current.versions).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
// File handling
|
||||
describe('handleFile', () => {
|
||||
it('should set currentFile when file is provided', () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
const file = createFile()
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(file)
|
||||
})
|
||||
|
||||
expect(result.current.currentFile).toBe(file)
|
||||
})
|
||||
|
||||
it('should clear currentFile when called with undefined', () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
act(() => {
|
||||
result.current.handleFile(undefined)
|
||||
})
|
||||
|
||||
expect(result.current.currentFile).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
// Modal state management
|
||||
describe('modal state', () => {
|
||||
it('should allow toggling showErrorModal', () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
expect(result.current.showErrorModal).toBe(false)
|
||||
|
||||
act(() => {
|
||||
result.current.setShowErrorModal(true)
|
||||
})
|
||||
expect(result.current.showErrorModal).toBe(true)
|
||||
|
||||
act(() => {
|
||||
result.current.setShowErrorModal(false)
|
||||
})
|
||||
expect(result.current.showErrorModal).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
// Import flow
|
||||
describe('handleImport', () => {
|
||||
it('should call importDSL with correct parameters', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockImportDSL).toHaveBeenCalledWith({
|
||||
mode: DSLImportMode.YAML_CONTENT,
|
||||
yaml_content: 'test content',
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
})
|
||||
|
||||
it('should not call importDSL when no file is selected', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockImportDSL).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
// COMPLETED status
|
||||
it('should notify success on COMPLETED status', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'success' }))
|
||||
})
|
||||
|
||||
it('should call onImport on successful import', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockOnImport).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call onCancel on successful import', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockOnCancel).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should emit workflow update event on success', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockEmit).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call handleCheckPluginDependencies on success', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockHandleCheckPluginDependencies).toHaveBeenCalledWith('test-pipeline-id', true)
|
||||
})
|
||||
|
||||
// COMPLETED_WITH_WARNINGS status
|
||||
it('should notify warning on COMPLETED_WITH_WARNINGS status', async () => {
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.COMPLETED_WITH_WARNINGS,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'warning' }))
|
||||
})
|
||||
|
||||
// PENDING status (version mismatch)
|
||||
it('should switch to version mismatch modal on PENDING status', async () => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true })
|
||||
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.PENDING,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
imported_dsl_version: '0.8.0',
|
||||
current_dsl_version: '1.0.0',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
await vi.advanceTimersByTimeAsync(350)
|
||||
})
|
||||
|
||||
expect(result.current.show).toBe(false)
|
||||
expect(result.current.showErrorModal).toBe(true)
|
||||
expect(result.current.versions).toEqual({
|
||||
importedVersion: '0.8.0',
|
||||
systemVersion: '1.0.0',
|
||||
})
|
||||
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
it('should default version strings to empty when undefined', async () => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true })
|
||||
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.PENDING,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
imported_dsl_version: undefined,
|
||||
current_dsl_version: undefined,
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
await vi.advanceTimersByTimeAsync(350)
|
||||
})
|
||||
|
||||
expect(result.current.versions).toEqual({
|
||||
importedVersion: '',
|
||||
systemVersion: '',
|
||||
})
|
||||
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
// FAILED / unknown status
|
||||
it('should notify error on FAILED status', async () => {
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.FAILED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
// Exception
|
||||
it('should notify error when importDSL throws', async () => {
|
||||
mockImportDSL.mockRejectedValue(new Error('Network error'))
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
// Missing pipeline_id
|
||||
it('should notify error when pipeline_id is missing on success', async () => {
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: undefined,
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
})
|
||||
|
||||
// Confirm flow (after PENDING → version mismatch)
|
||||
describe('onUpdateDSLConfirm', () => {
|
||||
// Helper: drive the hook into PENDING state so importId is set
|
||||
const setupPendingState = async (result: { current: ReturnType<typeof useUpdateDSLModal> }) => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true })
|
||||
|
||||
mockImportDSL.mockResolvedValue({
|
||||
id: 'import-id',
|
||||
status: DSLImportStatus.PENDING,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
imported_dsl_version: '0.8.0',
|
||||
current_dsl_version: '1.0.0',
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
await vi.advanceTimersByTimeAsync(350)
|
||||
})
|
||||
|
||||
vi.useRealTimers()
|
||||
vi.clearAllMocks()
|
||||
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
|
||||
}
|
||||
|
||||
it('should call importDSLConfirm with the stored importId', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockImportDSLConfirm).toHaveBeenCalledWith('import-id')
|
||||
})
|
||||
|
||||
it('should notify success and call onCancel after successful confirm', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'success' }))
|
||||
expect(mockOnCancel).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call onImport after successful confirm', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockOnImport).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should notify error on FAILED confirm status', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.FAILED,
|
||||
pipeline_id: 'test-pipeline-id',
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
it('should notify error when confirm throws exception', async () => {
|
||||
mockImportDSLConfirm.mockRejectedValue(new Error('Confirm failed'))
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
it('should notify error when confirm succeeds but pipeline_id is missing', async () => {
|
||||
mockImportDSLConfirm.mockResolvedValue({
|
||||
status: DSLImportStatus.COMPLETED,
|
||||
pipeline_id: undefined,
|
||||
})
|
||||
|
||||
const { result } = renderUpdateDSLModal()
|
||||
await setupPendingState(result)
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
|
||||
})
|
||||
|
||||
it('should not call importDSLConfirm when importId is not set', async () => {
|
||||
const { result } = renderUpdateDSLModal()
|
||||
|
||||
// No pending state → importId is undefined
|
||||
await act(async () => {
|
||||
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
expect(mockImportDSLConfirm).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
// Optional onImport callback
|
||||
describe('optional onImport', () => {
|
||||
it('should work without onImport callback', async () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUpdateDSLModal({ onCancel: mockOnCancel }),
|
||||
)
|
||||
|
||||
act(() => {
|
||||
result.current.handleFile(createFile())
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await (result.current.handleImport as unknown as AsyncFn)()
|
||||
})
|
||||
|
||||
// Should succeed without throwing
|
||||
expect(mockOnCancel).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
205
web/app/components/rag-pipeline/hooks/use-update-dsl-modal.ts
Normal file
205
web/app/components/rag-pipeline/hooks/use-update-dsl-modal.ts
Normal file
@ -0,0 +1,205 @@
|
||||
import type { MouseEventHandler } from 'react'
|
||||
import {
|
||||
useCallback,
|
||||
useRef,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { WORKFLOW_DATA_UPDATE } from '@/app/components/workflow/constants'
|
||||
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
|
||||
import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
import {
|
||||
initialEdges,
|
||||
initialNodes,
|
||||
} from '@/app/components/workflow/utils'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import {
|
||||
DSLImportMode,
|
||||
DSLImportStatus,
|
||||
} from '@/models/app'
|
||||
import {
|
||||
useImportPipelineDSL,
|
||||
useImportPipelineDSLConfirm,
|
||||
} from '@/service/use-pipeline'
|
||||
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
|
||||
type VersionInfo = {
|
||||
importedVersion: string
|
||||
systemVersion: string
|
||||
}
|
||||
|
||||
type UseUpdateDSLModalParams = {
|
||||
onCancel: () => void
|
||||
onImport?: () => void
|
||||
}
|
||||
|
||||
const isCompletedStatus = (status: DSLImportStatus): boolean =>
|
||||
status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS
|
||||
|
||||
export const useUpdateDSLModal = ({ onCancel, onImport }: UseUpdateDSLModalParams) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const workflowStore = useWorkflowStore()
|
||||
const { handleCheckPluginDependencies } = usePluginDependencies()
|
||||
const { mutateAsync: importDSL } = useImportPipelineDSL()
|
||||
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
|
||||
|
||||
// File state
|
||||
const [currentFile, setDSLFile] = useState<File>()
|
||||
const [fileContent, setFileContent] = useState<string>()
|
||||
|
||||
// Modal state
|
||||
const [show, setShow] = useState(true)
|
||||
const [showErrorModal, setShowErrorModal] = useState(false)
|
||||
|
||||
// Import state
|
||||
const [loading, setLoading] = useState(false)
|
||||
const [versions, setVersions] = useState<VersionInfo>()
|
||||
const [importId, setImportId] = useState<string>()
|
||||
const isCreatingRef = useRef(false)
|
||||
|
||||
const readFile = (file: File) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = (event) => {
|
||||
setFileContent(event.target?.result as string)
|
||||
}
|
||||
reader.readAsText(file)
|
||||
}
|
||||
|
||||
const handleFile = (file?: File) => {
|
||||
setDSLFile(file)
|
||||
if (file)
|
||||
readFile(file)
|
||||
if (!file)
|
||||
setFileContent('')
|
||||
}
|
||||
|
||||
const notifyError = useCallback(() => {
|
||||
setLoading(false)
|
||||
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
|
||||
}, [notify, t])
|
||||
|
||||
const updateWorkflow = useCallback(async (pipelineId: string) => {
|
||||
const { graph, hash, rag_pipeline_variables } = await fetchWorkflowDraft(
|
||||
`/rag/pipelines/${pipelineId}/workflows/draft`,
|
||||
)
|
||||
const { nodes, edges, viewport } = graph
|
||||
|
||||
eventEmitter?.emit({
|
||||
type: WORKFLOW_DATA_UPDATE,
|
||||
payload: {
|
||||
nodes: initialNodes(nodes, edges),
|
||||
edges: initialEdges(edges, nodes),
|
||||
viewport,
|
||||
hash,
|
||||
rag_pipeline_variables: rag_pipeline_variables || [],
|
||||
},
|
||||
})
|
||||
}, [eventEmitter])
|
||||
|
||||
const completeImport = useCallback(async (
|
||||
pipelineId: string | undefined,
|
||||
status: DSLImportStatus = DSLImportStatus.COMPLETED,
|
||||
) => {
|
||||
if (!pipelineId) {
|
||||
notifyError()
|
||||
return
|
||||
}
|
||||
|
||||
updateWorkflow(pipelineId)
|
||||
onImport?.()
|
||||
|
||||
const isWarning = status === DSLImportStatus.COMPLETED_WITH_WARNINGS
|
||||
notify({
|
||||
type: isWarning ? 'warning' : 'success',
|
||||
message: t(isWarning ? 'common.importWarning' : 'common.importSuccess', { ns: 'workflow' }),
|
||||
children: isWarning && t('common.importWarningDetails', { ns: 'workflow' }),
|
||||
})
|
||||
|
||||
await handleCheckPluginDependencies(pipelineId, true)
|
||||
setLoading(false)
|
||||
onCancel()
|
||||
}, [updateWorkflow, onImport, notify, t, handleCheckPluginDependencies, onCancel, notifyError])
|
||||
|
||||
const showVersionMismatch = useCallback((
|
||||
id: string,
|
||||
importedVersion?: string,
|
||||
systemVersion?: string,
|
||||
) => {
|
||||
setShow(false)
|
||||
setTimeout(() => setShowErrorModal(true), 300)
|
||||
setVersions({
|
||||
importedVersion: importedVersion ?? '',
|
||||
systemVersion: systemVersion ?? '',
|
||||
})
|
||||
setImportId(id)
|
||||
}, [])
|
||||
|
||||
const handleImport: MouseEventHandler = useCallback(async () => {
|
||||
const { pipelineId } = workflowStore.getState()
|
||||
if (isCreatingRef.current)
|
||||
return
|
||||
isCreatingRef.current = true
|
||||
if (!currentFile)
|
||||
return
|
||||
|
||||
try {
|
||||
if (!pipelineId || !fileContent)
|
||||
return
|
||||
|
||||
setLoading(true)
|
||||
const response = await importDSL({
|
||||
mode: DSLImportMode.YAML_CONTENT,
|
||||
yaml_content: fileContent,
|
||||
pipeline_id: pipelineId,
|
||||
})
|
||||
const { id, status, pipeline_id, imported_dsl_version, current_dsl_version } = response
|
||||
|
||||
if (isCompletedStatus(status))
|
||||
await completeImport(pipeline_id, status)
|
||||
else if (status === DSLImportStatus.PENDING)
|
||||
showVersionMismatch(id, imported_dsl_version, current_dsl_version)
|
||||
else
|
||||
notifyError()
|
||||
}
|
||||
catch {
|
||||
notifyError()
|
||||
}
|
||||
isCreatingRef.current = false
|
||||
}, [currentFile, fileContent, workflowStore, importDSL, completeImport, showVersionMismatch, notifyError])
|
||||
|
||||
const onUpdateDSLConfirm: MouseEventHandler = useCallback(async () => {
|
||||
if (!importId)
|
||||
return
|
||||
|
||||
try {
|
||||
const { status, pipeline_id } = await importDSLConfirm(importId)
|
||||
|
||||
if (status === DSLImportStatus.COMPLETED) {
|
||||
await completeImport(pipeline_id)
|
||||
return
|
||||
}
|
||||
|
||||
if (status === DSLImportStatus.FAILED)
|
||||
notifyError()
|
||||
}
|
||||
catch {
|
||||
notifyError()
|
||||
}
|
||||
}, [importId, importDSLConfirm, completeImport, notifyError])
|
||||
|
||||
return {
|
||||
currentFile,
|
||||
handleFile,
|
||||
show,
|
||||
showErrorModal,
|
||||
setShowErrorModal,
|
||||
loading,
|
||||
versions,
|
||||
handleImport,
|
||||
onUpdateDSLConfirm,
|
||||
}
|
||||
}
|
||||
@ -4,12 +4,13 @@ import * as Sentry from '@sentry/react'
|
||||
import { useEffect } from 'react'
|
||||
|
||||
import { IS_DEV } from '@/config'
|
||||
import { env } from '@/env'
|
||||
|
||||
const SentryInitializer = ({
|
||||
children,
|
||||
}: { children: React.ReactElement }) => {
|
||||
useEffect(() => {
|
||||
const SENTRY_DSN = document?.body?.getAttribute('data-public-sentry-dsn')
|
||||
const SENTRY_DSN = env.NEXT_PUBLIC_SENTRY_DSN
|
||||
if (!IS_DEV && SENTRY_DSN) {
|
||||
Sentry.init({
|
||||
dsn: SENTRY_DSN,
|
||||
|
||||
@ -6,6 +6,7 @@ import { useSerialAsyncCallback } from '@/app/components/workflow/hooks/use-seri
|
||||
import { useNodesReadOnly } from '@/app/components/workflow/hooks/use-workflow'
|
||||
import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
import { API_PREFIX } from '@/config'
|
||||
import { postWithKeepalive } from '@/service/fetch'
|
||||
import { syncWorkflowDraft } from '@/service/workflow'
|
||||
import { useWorkflowRefreshDraft } from '.'
|
||||
|
||||
@ -85,7 +86,7 @@ export const useNodesSyncDraft = () => {
|
||||
const postParams = getPostParams()
|
||||
|
||||
if (postParams)
|
||||
navigator.sendBeacon(`${API_PREFIX}${postParams.url}`, JSON.stringify(postParams.params))
|
||||
postWithKeepalive(`${API_PREFIX}${postParams.url}`, postParams.params)
|
||||
}, [getPostParams, getNodesReadOnly])
|
||||
|
||||
const performSync = useCallback(async (
|
||||
|
||||
@ -23,7 +23,7 @@ import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
import { WorkflowRunningStatus } from '@/app/components/workflow/types'
|
||||
import { handleStream, post, sseGet, ssePost } from '@/service/base'
|
||||
import { ContentType } from '@/service/fetch'
|
||||
import { useInvalidAllLastRun } from '@/service/use-workflow'
|
||||
import { useInvalidAllLastRun, useInvalidateWorkflowRunHistory } from '@/service/use-workflow'
|
||||
import { stopWorkflowRun } from '@/service/workflow'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { useSetWorkflowVarsWithValue } from '../../workflow/hooks/use-fetch-workflow-inspect-vars'
|
||||
@ -66,6 +66,7 @@ export const useWorkflowRun = () => {
|
||||
const configsMap = useConfigsMap()
|
||||
const { flowId, flowType } = configsMap
|
||||
const invalidAllLastRun = useInvalidAllLastRun(flowType, flowId)
|
||||
const invalidateRunHistory = useInvalidateWorkflowRunHistory()
|
||||
|
||||
const { fetchInspectVars } = useSetWorkflowVarsWithValue({
|
||||
...configsMap,
|
||||
@ -189,6 +190,9 @@ export const useWorkflowRun = () => {
|
||||
} = callback || {}
|
||||
workflowStore.setState({ historyWorkflowData: undefined })
|
||||
const appDetail = useAppStore.getState().appDetail
|
||||
const runHistoryUrl = appDetail?.mode === AppModeEnum.ADVANCED_CHAT
|
||||
? `/apps/${appDetail.id}/advanced-chat/workflow-runs`
|
||||
: `/apps/${appDetail?.id}/workflow-runs`
|
||||
const workflowContainer = document.getElementById('workflow-container')
|
||||
|
||||
const {
|
||||
@ -363,6 +367,7 @@ export const useWorkflowRun = () => {
|
||||
const wrappedOnError = (params: any) => {
|
||||
clearAbortController()
|
||||
handleWorkflowFailed()
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
clearListeningState()
|
||||
|
||||
if (onError)
|
||||
@ -381,6 +386,7 @@ export const useWorkflowRun = () => {
|
||||
...restCallback,
|
||||
onWorkflowStarted: (params) => {
|
||||
handleWorkflowStarted(params)
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
|
||||
if (onWorkflowStarted)
|
||||
onWorkflowStarted(params)
|
||||
@ -388,6 +394,7 @@ export const useWorkflowRun = () => {
|
||||
onWorkflowFinished: (params) => {
|
||||
clearListeningState()
|
||||
handleWorkflowFinished(params)
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
|
||||
if (onWorkflowFinished)
|
||||
onWorkflowFinished(params)
|
||||
@ -496,6 +503,7 @@ export const useWorkflowRun = () => {
|
||||
},
|
||||
onWorkflowPaused: (params) => {
|
||||
handleWorkflowPaused()
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
if (onWorkflowPaused)
|
||||
onWorkflowPaused(params)
|
||||
const url = `/workflow/${params.workflow_run_id}/events`
|
||||
@ -694,6 +702,7 @@ export const useWorkflowRun = () => {
|
||||
},
|
||||
onWorkflowFinished: (params) => {
|
||||
handleWorkflowFinished(params)
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
|
||||
if (onWorkflowFinished)
|
||||
onWorkflowFinished(params)
|
||||
@ -704,6 +713,7 @@ export const useWorkflowRun = () => {
|
||||
},
|
||||
onError: (params) => {
|
||||
handleWorkflowFailed()
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
|
||||
if (onError)
|
||||
onError(params)
|
||||
@ -803,6 +813,7 @@ export const useWorkflowRun = () => {
|
||||
},
|
||||
onWorkflowPaused: (params) => {
|
||||
handleWorkflowPaused()
|
||||
invalidateRunHistory(runHistoryUrl)
|
||||
if (onWorkflowPaused)
|
||||
onWorkflowPaused(params)
|
||||
const url = `/workflow/${params.workflow_run_id}/events`
|
||||
@ -837,7 +848,7 @@ export const useWorkflowRun = () => {
|
||||
},
|
||||
finalCallbacks,
|
||||
)
|
||||
}, [store, doSyncWorkflowDraft, workflowStore, pathname, handleWorkflowFailed, flowId, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace, handleWorkflowPaused, handleWorkflowNodeHumanInputRequired, handleWorkflowNodeHumanInputFormFilled, handleWorkflowNodeHumanInputFormTimeout])
|
||||
}, [store, doSyncWorkflowDraft, workflowStore, pathname, handleWorkflowFailed, flowId, handleWorkflowStarted, handleWorkflowFinished, fetchInspectVars, invalidAllLastRun, invalidateRunHistory, handleWorkflowNodeStarted, handleWorkflowNodeFinished, handleWorkflowNodeIterationStarted, handleWorkflowNodeIterationNext, handleWorkflowNodeIterationFinished, handleWorkflowNodeLoopStarted, handleWorkflowNodeLoopNext, handleWorkflowNodeLoopFinished, handleWorkflowNodeRetry, handleWorkflowAgentLog, handleWorkflowTextChunk, handleWorkflowTextReplace, handleWorkflowPaused, handleWorkflowNodeHumanInputRequired, handleWorkflowNodeHumanInputFormFilled, handleWorkflowNodeHumanInputFormTimeout])
|
||||
|
||||
const handleStopRun = useCallback((taskId: string) => {
|
||||
const setStoppedState = () => {
|
||||
|
||||
@ -1,18 +1,8 @@
|
||||
import {
|
||||
RiCheckboxCircleLine,
|
||||
RiCloseLine,
|
||||
RiErrorWarningLine,
|
||||
} from '@remixicon/react'
|
||||
import {
|
||||
memo,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { AlertTriangle } from '@/app/components/base/icons/src/vender/line/alertsAndFeedback'
|
||||
import {
|
||||
ClockPlay,
|
||||
ClockPlaySlim,
|
||||
} from '@/app/components/base/icons/src/vender/line/time'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import {
|
||||
PortalToFollowElem,
|
||||
@ -89,9 +79,7 @@ const ViewHistory = ({
|
||||
open && 'bg-components-button-secondary-bg-hover',
|
||||
)}
|
||||
>
|
||||
<ClockPlay
|
||||
className="mr-1 h-4 w-4"
|
||||
/>
|
||||
<span className="i-custom-vender-line-time-clock-play mr-1 h-4 w-4" />
|
||||
{t('common.showRunHistory', { ns: 'workflow' })}
|
||||
</div>
|
||||
)
|
||||
@ -107,7 +95,7 @@ const ViewHistory = ({
|
||||
onClearLogAndMessageModal?.()
|
||||
}}
|
||||
>
|
||||
<ClockPlay className={cn('h-4 w-4 group-hover:text-components-button-secondary-accent-text', open ? 'text-components-button-secondary-accent-text' : 'text-components-button-ghost-text')} />
|
||||
<span className={cn('i-custom-vender-line-time-clock-play', 'h-4 w-4 group-hover:text-components-button-secondary-accent-text', open ? 'text-components-button-secondary-accent-text' : 'text-components-button-ghost-text')} />
|
||||
</div>
|
||||
</Tooltip>
|
||||
)
|
||||
@ -129,7 +117,7 @@ const ViewHistory = ({
|
||||
setOpen(false)
|
||||
}}
|
||||
>
|
||||
<RiCloseLine className="h-4 w-4 text-text-tertiary" />
|
||||
<span className="i-ri-close-line h-4 w-4 text-text-tertiary" />
|
||||
</div>
|
||||
</div>
|
||||
{
|
||||
@ -145,7 +133,7 @@ const ViewHistory = ({
|
||||
{
|
||||
!data?.data.length && (
|
||||
<div className="py-12">
|
||||
<ClockPlaySlim className="mx-auto mb-2 h-8 w-8 text-text-quaternary" />
|
||||
<span className="i-custom-vender-line-time-clock-play-slim mx-auto mb-2 h-8 w-8 text-text-quaternary" />
|
||||
<div className="text-center text-[13px] text-text-quaternary">
|
||||
{t('common.notRunning', { ns: 'workflow' })}
|
||||
</div>
|
||||
@ -175,18 +163,18 @@ const ViewHistory = ({
|
||||
}}
|
||||
>
|
||||
{
|
||||
!isChatMode && item.status === WorkflowRunningStatus.Stopped && (
|
||||
<AlertTriangle className="mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#F79009]" />
|
||||
!isChatMode && [WorkflowRunningStatus.Stopped, WorkflowRunningStatus.Paused].includes(item.status) && (
|
||||
<span className="i-custom-vender-line-alertsAndFeedback-alert-triangle mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#F79009]" />
|
||||
)
|
||||
}
|
||||
{
|
||||
!isChatMode && item.status === WorkflowRunningStatus.Failed && (
|
||||
<RiErrorWarningLine className="mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#F04438]" />
|
||||
<span className="i-ri-error-warning-line mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#F04438]" />
|
||||
)
|
||||
}
|
||||
{
|
||||
!isChatMode && item.status === WorkflowRunningStatus.Succeeded && (
|
||||
<RiCheckboxCircleLine className="mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#12B76A]" />
|
||||
<span className="i-ri-checkbox-circle-line mr-1.5 mt-0.5 h-3.5 w-3.5 text-[#12B76A]" />
|
||||
)
|
||||
}
|
||||
<div>
|
||||
@ -196,7 +184,7 @@ const ViewHistory = ({
|
||||
item.id === historyWorkflowData?.id && 'text-text-accent',
|
||||
)}
|
||||
>
|
||||
{`Test ${isChatMode ? 'Chat' : 'Run'}${formatWorkflowRunIdentifier(item.finished_at)}`}
|
||||
{`Test ${isChatMode ? 'Chat' : 'Run'}${formatWorkflowRunIdentifier(item.finished_at, item.status)}`}
|
||||
</div>
|
||||
<div className="flex items-center text-xs leading-[18px] text-text-tertiary">
|
||||
{item.created_by_account?.name}
|
||||
|
||||
@ -159,6 +159,9 @@ const useLastRun = <T>({
|
||||
if (!warningForNode)
|
||||
return false
|
||||
|
||||
if (warningForNode.unConnected && !warningForNode.errorMessage)
|
||||
return false
|
||||
|
||||
const message = warningForNode.errorMessage || 'This node has unresolved checklist issues'
|
||||
Toast.notify({ type: 'error', message })
|
||||
return true
|
||||
|
||||
@ -4,13 +4,6 @@ import type {
|
||||
} from 'react'
|
||||
import type { IterationNodeType } from '@/app/components/workflow/nodes/iteration/types'
|
||||
import type { NodeProps } from '@/app/components/workflow/types'
|
||||
import {
|
||||
RiAlertFill,
|
||||
RiCheckboxCircleFill,
|
||||
RiErrorWarningFill,
|
||||
RiLoader2Line,
|
||||
RiPauseCircleFill,
|
||||
} from '@remixicon/react'
|
||||
import {
|
||||
cloneElement,
|
||||
memo,
|
||||
@ -109,7 +102,7 @@ const BaseNode: FC<BaseNodeProps> = ({
|
||||
} = useMemo(() => {
|
||||
return {
|
||||
showRunningBorder: (data._runningStatus === NodeRunningStatus.Running || data._runningStatus === NodeRunningStatus.Paused) && !showSelectedBorder,
|
||||
showSuccessBorder: (data._runningStatus === NodeRunningStatus.Succeeded || hasVarValue) && !showSelectedBorder,
|
||||
showSuccessBorder: (data._runningStatus === NodeRunningStatus.Succeeded || (hasVarValue && !data._runningStatus)) && !showSelectedBorder,
|
||||
showFailedBorder: data._runningStatus === NodeRunningStatus.Failed && !showSelectedBorder,
|
||||
showExceptionBorder: data._runningStatus === NodeRunningStatus.Exception && !showSelectedBorder,
|
||||
}
|
||||
@ -127,7 +120,7 @@ const BaseNode: FC<BaseNodeProps> = ({
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
'system-xs-medium mr-2 text-text-tertiary',
|
||||
'mr-2 text-text-tertiary system-xs-medium',
|
||||
data._runningStatus === NodeRunningStatus.Running && 'text-text-accent',
|
||||
)}
|
||||
>
|
||||
@ -167,7 +160,7 @@ const BaseNode: FC<BaseNodeProps> = ({
|
||||
{
|
||||
data.type === BlockEnum.DataSource && (
|
||||
<div className="absolute inset-[-2px] top-[-22px] z-[-1] rounded-[18px] bg-node-data-source-bg p-0.5 backdrop-blur-[6px]">
|
||||
<div className="system-2xs-semibold-uppercase flex h-5 items-center px-2.5 text-text-tertiary">
|
||||
<div className="flex h-5 items-center px-2.5 text-text-tertiary system-2xs-semibold-uppercase">
|
||||
{t('blocks.datasource', { ns: 'workflow' })}
|
||||
</div>
|
||||
</div>
|
||||
@ -252,7 +245,7 @@ const BaseNode: FC<BaseNodeProps> = ({
|
||||
/>
|
||||
<div
|
||||
title={data.title}
|
||||
className="system-sm-semibold-uppercase mr-1 flex grow items-center truncate text-text-primary"
|
||||
className="mr-1 flex grow items-center truncate text-text-primary system-sm-semibold-uppercase"
|
||||
>
|
||||
<div>
|
||||
{data.title}
|
||||
@ -268,7 +261,7 @@ const BaseNode: FC<BaseNodeProps> = ({
|
||||
</div>
|
||||
)}
|
||||
>
|
||||
<div className="system-2xs-medium-uppercase ml-1 flex items-center justify-center rounded-[5px] border-[1px] border-text-warning px-[5px] py-[3px] text-text-warning ">
|
||||
<div className="ml-1 flex items-center justify-center rounded-[5px] border-[1px] border-text-warning px-[5px] py-[3px] text-text-warning system-2xs-medium-uppercase">
|
||||
{t('nodes.iteration.parallelModeUpper', { ns: 'workflow' })}
|
||||
</div>
|
||||
</Tooltip>
|
||||
@ -288,26 +281,26 @@ const BaseNode: FC<BaseNodeProps> = ({
|
||||
!!(data.type === BlockEnum.Loop && data._loopIndex) && LoopIndex
|
||||
}
|
||||
{
|
||||
isLoading && <RiLoader2Line className="h-3.5 w-3.5 animate-spin text-text-accent" />
|
||||
isLoading && <span className="i-ri-loader-2-line h-3.5 w-3.5 animate-spin text-text-accent" />
|
||||
}
|
||||
{
|
||||
!isLoading && data._runningStatus === NodeRunningStatus.Failed && (
|
||||
<RiErrorWarningFill className="h-3.5 w-3.5 text-text-destructive" />
|
||||
<span className="i-ri-error-warning-fill h-3.5 w-3.5 text-text-destructive" />
|
||||
)
|
||||
}
|
||||
{
|
||||
!isLoading && data._runningStatus === NodeRunningStatus.Exception && (
|
||||
<RiAlertFill className="h-3.5 w-3.5 text-text-warning-secondary" />
|
||||
<span className="i-ri-alert-fill h-3.5 w-3.5 text-text-warning-secondary" />
|
||||
)
|
||||
}
|
||||
{
|
||||
!isLoading && (data._runningStatus === NodeRunningStatus.Succeeded || hasVarValue) && (
|
||||
<RiCheckboxCircleFill className="h-3.5 w-3.5 text-text-success" />
|
||||
!isLoading && (data._runningStatus === NodeRunningStatus.Succeeded || (hasVarValue && !data._runningStatus)) && (
|
||||
<span className="i-ri-checkbox-circle-fill h-3.5 w-3.5 text-text-success" />
|
||||
)
|
||||
}
|
||||
{
|
||||
!isLoading && data._runningStatus === NodeRunningStatus.Paused && (
|
||||
<RiPauseCircleFill className="h-3.5 w-3.5 text-text-warning-secondary" />
|
||||
<span className="i-ri-pause-circle-fill h-3.5 w-3.5 text-text-warning-secondary" />
|
||||
)
|
||||
}
|
||||
</div>
|
||||
@ -341,7 +334,7 @@ const BaseNode: FC<BaseNodeProps> = ({
|
||||
}
|
||||
{
|
||||
!!(data.desc && data.type !== BlockEnum.Iteration && data.type !== BlockEnum.Loop) && (
|
||||
<div className="system-xs-regular whitespace-pre-line break-words px-3 pb-2 pt-1 text-text-tertiary">
|
||||
<div className="whitespace-pre-line break-words px-3 pb-2 pt-1 text-text-tertiary system-xs-regular">
|
||||
{data.desc}
|
||||
</div>
|
||||
)
|
||||
|
||||
@ -0,0 +1,567 @@
|
||||
import type { ReactNode } from 'react'
|
||||
import type { HumanInputNodeType } from '@/app/components/workflow/nodes/human-input/types'
|
||||
import type {
|
||||
Edge,
|
||||
Node,
|
||||
} from '@/app/components/workflow/types'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { WORKFLOW_COMMON_NODES } from '@/app/components/workflow/constants/node'
|
||||
import humanInputDefault from '@/app/components/workflow/nodes/human-input/default'
|
||||
import HumanInputNode from '@/app/components/workflow/nodes/human-input/node'
|
||||
import {
|
||||
DeliveryMethodType,
|
||||
UserActionButtonType,
|
||||
} from '@/app/components/workflow/nodes/human-input/types'
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
import { initialNodes, preprocessNodesAndEdges } from '@/app/components/workflow/utils/workflow-init'
|
||||
|
||||
// Mock reactflow which is needed by initialNodes and NodeSourceHandle
|
||||
vi.mock('reactflow', async () => {
|
||||
const reactflow = await vi.importActual('reactflow')
|
||||
return {
|
||||
...reactflow,
|
||||
Handle: ({ children }: { children?: ReactNode }) => <div data-testid="handle">{children}</div>,
|
||||
}
|
||||
})
|
||||
|
||||
// Minimal store state mirroring the fields that NodeSourceHandle selects
|
||||
const mockStoreState = {
|
||||
shouldAutoOpenStartNodeSelector: false,
|
||||
setShouldAutoOpenStartNodeSelector: vi.fn(),
|
||||
setHasSelectedStartNode: vi.fn(),
|
||||
}
|
||||
|
||||
// Mock workflow store used by NodeSourceHandle
|
||||
// useStore accepts a selector and applies it to the state, so tests break
|
||||
// if the component starts selecting fields that aren't provided here.
|
||||
vi.mock('@/app/components/workflow/store', () => ({
|
||||
useStore: vi.fn((selector?: (s: typeof mockStoreState) => unknown) =>
|
||||
selector ? selector(mockStoreState) : mockStoreState,
|
||||
),
|
||||
useWorkflowStore: vi.fn(() => ({
|
||||
getState: () => ({
|
||||
getNodes: () => [],
|
||||
}),
|
||||
})),
|
||||
}))
|
||||
|
||||
// Mock workflow hooks barrel (used by NodeSourceHandle via ../../../hooks)
|
||||
vi.mock('@/app/components/workflow/hooks', () => ({
|
||||
useNodesInteractions: () => ({
|
||||
handleNodeAdd: vi.fn(),
|
||||
}),
|
||||
useNodesReadOnly: () => ({
|
||||
getNodesReadOnly: () => false,
|
||||
nodesReadOnly: false,
|
||||
}),
|
||||
useAvailableBlocks: () => ({
|
||||
availableNextBlocks: [],
|
||||
availablePrevBlocks: [],
|
||||
}),
|
||||
useIsChatMode: () => false,
|
||||
}))
|
||||
|
||||
// ── Factory: Build a realistic human-input node as it would appear after DSL import ──
|
||||
const createHumanInputNode = (overrides?: Partial<HumanInputNodeType>): Node => ({
|
||||
id: 'human-input-1',
|
||||
type: 'custom',
|
||||
position: { x: 400, y: 200 },
|
||||
data: {
|
||||
type: BlockEnum.HumanInput,
|
||||
title: 'Human Input',
|
||||
desc: 'Wait for human input',
|
||||
delivery_methods: [
|
||||
{
|
||||
id: 'dm-1',
|
||||
type: DeliveryMethodType.WebApp,
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
id: 'dm-2',
|
||||
type: DeliveryMethodType.Email,
|
||||
enabled: true,
|
||||
config: {
|
||||
recipients: { whole_workspace: false, items: [] },
|
||||
subject: 'Please review',
|
||||
body: 'Please review the form',
|
||||
debug_mode: false,
|
||||
},
|
||||
},
|
||||
],
|
||||
form_content: '# Review Form\nPlease fill in the details below.',
|
||||
inputs: [
|
||||
{
|
||||
type: 'text-input',
|
||||
output_variable_name: 'review_result',
|
||||
default: { selector: [], type: 'constant' as const, value: '' },
|
||||
},
|
||||
],
|
||||
user_actions: [
|
||||
{
|
||||
id: 'approve',
|
||||
title: 'Approve',
|
||||
button_style: UserActionButtonType.Primary,
|
||||
},
|
||||
{
|
||||
id: 'reject',
|
||||
title: 'Reject',
|
||||
button_style: UserActionButtonType.Default,
|
||||
},
|
||||
],
|
||||
timeout: 3,
|
||||
timeout_unit: 'day' as const,
|
||||
...overrides,
|
||||
} as HumanInputNodeType,
|
||||
})
|
||||
|
||||
const createStartNode = (): Node => ({
|
||||
id: 'start-1',
|
||||
type: 'custom',
|
||||
position: { x: 100, y: 200 },
|
||||
data: {
|
||||
type: BlockEnum.Start,
|
||||
title: 'Start',
|
||||
desc: '',
|
||||
} as Node['data'],
|
||||
})
|
||||
|
||||
const createEdge = (source: string, target: string, sourceHandle = 'source', targetHandle = 'target'): Edge => ({
|
||||
id: `${source}-${sourceHandle}-${target}-${targetHandle}`,
|
||||
type: 'custom',
|
||||
source,
|
||||
sourceHandle,
|
||||
target,
|
||||
targetHandle,
|
||||
data: {},
|
||||
} as Edge)
|
||||
|
||||
describe('DSL Import with Human Input Node', () => {
|
||||
// ── preprocessNodesAndEdges: human-input nodes pass through without error ──
|
||||
describe('preprocessNodesAndEdges', () => {
|
||||
it('should pass through a workflow containing a human-input node unchanged', () => {
|
||||
const humanInputNode = createHumanInputNode()
|
||||
const startNode = createStartNode()
|
||||
const nodes = [startNode, humanInputNode]
|
||||
const edges = [createEdge('start-1', 'human-input-1')]
|
||||
|
||||
const result = preprocessNodesAndEdges(nodes as Node[], edges as Edge[])
|
||||
|
||||
expect(result.nodes).toHaveLength(2)
|
||||
expect(result.edges).toHaveLength(1)
|
||||
expect(result.nodes).toEqual(nodes)
|
||||
expect(result.edges).toEqual(edges)
|
||||
})
|
||||
|
||||
it('should not treat human-input node as an iteration or loop node', () => {
|
||||
const humanInputNode = createHumanInputNode()
|
||||
const nodes = [humanInputNode]
|
||||
|
||||
const result = preprocessNodesAndEdges(nodes as Node[], [])
|
||||
|
||||
// No extra iteration/loop start nodes should be injected
|
||||
expect(result.nodes).toHaveLength(1)
|
||||
expect(result.nodes[0].data.type).toBe(BlockEnum.HumanInput)
|
||||
})
|
||||
})
|
||||
|
||||
// ── initialNodes: human-input nodes are properly initialized ──
|
||||
describe('initialNodes', () => {
|
||||
it('should initialize a human-input node with connected handle IDs', () => {
|
||||
const humanInputNode = createHumanInputNode()
|
||||
const startNode = createStartNode()
|
||||
const nodes = [startNode, humanInputNode]
|
||||
const edges = [createEdge('start-1', 'human-input-1')]
|
||||
|
||||
const result = initialNodes(nodes as Node[], edges as Edge[])
|
||||
|
||||
const processedHumanInput = result.find(n => n.id === 'human-input-1')
|
||||
expect(processedHumanInput).toBeDefined()
|
||||
expect(processedHumanInput!.data.type).toBe(BlockEnum.HumanInput)
|
||||
// initialNodes sets _connectedSourceHandleIds and _connectedTargetHandleIds
|
||||
expect(processedHumanInput!.data._connectedSourceHandleIds).toBeDefined()
|
||||
expect(processedHumanInput!.data._connectedTargetHandleIds).toBeDefined()
|
||||
})
|
||||
|
||||
it('should preserve human-input node data after initialization', () => {
|
||||
const humanInputNode = createHumanInputNode()
|
||||
const nodes = [humanInputNode]
|
||||
|
||||
const result = initialNodes(nodes as Node[], [])
|
||||
|
||||
const processed = result[0]
|
||||
const nodeData = processed.data as HumanInputNodeType
|
||||
expect(nodeData.delivery_methods).toHaveLength(2)
|
||||
expect(nodeData.user_actions).toHaveLength(2)
|
||||
expect(nodeData.form_content).toBe('# Review Form\nPlease fill in the details below.')
|
||||
expect(nodeData.timeout).toBe(3)
|
||||
expect(nodeData.timeout_unit).toBe('day')
|
||||
})
|
||||
|
||||
it('should set node type to custom if not set', () => {
|
||||
const humanInputNode = createHumanInputNode()
|
||||
delete (humanInputNode as Record<string, unknown>).type
|
||||
|
||||
const result = initialNodes([humanInputNode] as Node[], [])
|
||||
|
||||
expect(result[0].type).toBe('custom')
|
||||
})
|
||||
})
|
||||
|
||||
// ── Node component: renders without crashing for all data variations ──
|
||||
describe('HumanInputNode Component', () => {
|
||||
it('should render without crashing with full DSL data', () => {
|
||||
const node = createHumanInputNode()
|
||||
|
||||
expect(() => {
|
||||
render(
|
||||
<HumanInputNode
|
||||
id={node.id}
|
||||
data={node.data as HumanInputNodeType}
|
||||
/>,
|
||||
)
|
||||
}).not.toThrow()
|
||||
})
|
||||
|
||||
it('should display delivery method labels when methods are present', () => {
|
||||
const node = createHumanInputNode()
|
||||
|
||||
render(
|
||||
<HumanInputNode
|
||||
id={node.id}
|
||||
data={node.data as HumanInputNodeType}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Delivery method type labels are rendered in lowercase
|
||||
expect(screen.getByText('webapp')).toBeInTheDocument()
|
||||
expect(screen.getByText('email')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display user action IDs', () => {
|
||||
const node = createHumanInputNode()
|
||||
|
||||
render(
|
||||
<HumanInputNode
|
||||
id={node.id}
|
||||
data={node.data as HumanInputNodeType}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('approve')).toBeInTheDocument()
|
||||
expect(screen.getByText('reject')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should always display Timeout handle', () => {
|
||||
const node = createHumanInputNode()
|
||||
|
||||
render(
|
||||
<HumanInputNode
|
||||
id={node.id}
|
||||
data={node.data as HumanInputNodeType}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('Timeout')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render without crashing when delivery_methods is empty', () => {
|
||||
const node = createHumanInputNode({ delivery_methods: [] })
|
||||
|
||||
expect(() => {
|
||||
render(
|
||||
<HumanInputNode
|
||||
id={node.id}
|
||||
data={node.data as HumanInputNodeType}
|
||||
/>,
|
||||
)
|
||||
}).not.toThrow()
|
||||
|
||||
// Delivery method section should not be rendered
|
||||
expect(screen.queryByText('webapp')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('email')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render without crashing when user_actions is empty', () => {
|
||||
const node = createHumanInputNode({ user_actions: [] })
|
||||
|
||||
expect(() => {
|
||||
render(
|
||||
<HumanInputNode
|
||||
id={node.id}
|
||||
data={node.data as HumanInputNodeType}
|
||||
/>,
|
||||
)
|
||||
}).not.toThrow()
|
||||
|
||||
// Timeout handle should still exist
|
||||
expect(screen.getByText('Timeout')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render without crashing when both delivery_methods and user_actions are empty', () => {
|
||||
const node = createHumanInputNode({
|
||||
delivery_methods: [],
|
||||
user_actions: [],
|
||||
form_content: '',
|
||||
inputs: [],
|
||||
})
|
||||
|
||||
expect(() => {
|
||||
render(
|
||||
<HumanInputNode
|
||||
id={node.id}
|
||||
data={node.data as HumanInputNodeType}
|
||||
/>,
|
||||
)
|
||||
}).not.toThrow()
|
||||
})
|
||||
|
||||
it('should render with only webapp delivery method', () => {
|
||||
const node = createHumanInputNode({
|
||||
delivery_methods: [
|
||||
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: true },
|
||||
],
|
||||
})
|
||||
|
||||
render(
|
||||
<HumanInputNode
|
||||
id={node.id}
|
||||
data={node.data as HumanInputNodeType}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('webapp')).toBeInTheDocument()
|
||||
expect(screen.queryByText('email')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render with multiple user actions', () => {
|
||||
const node = createHumanInputNode({
|
||||
user_actions: [
|
||||
{ id: 'action_1', title: 'Approve', button_style: UserActionButtonType.Primary },
|
||||
{ id: 'action_2', title: 'Reject', button_style: UserActionButtonType.Default },
|
||||
{ id: 'action_3', title: 'Escalate', button_style: UserActionButtonType.Accent },
|
||||
],
|
||||
})
|
||||
|
||||
render(
|
||||
<HumanInputNode
|
||||
id={node.id}
|
||||
data={node.data as HumanInputNodeType}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('action_1')).toBeInTheDocument()
|
||||
expect(screen.getByText('action_2')).toBeInTheDocument()
|
||||
expect(screen.getByText('action_3')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ── Node registration: human-input is included in the workflow node registry ──
|
||||
// Verify via WORKFLOW_COMMON_NODES (lightweight metadata-only imports) instead
|
||||
// of NodeComponentMap/PanelComponentMap which pull in every node's heavy UI deps.
|
||||
describe('Node Registration', () => {
|
||||
it('should have HumanInput included in WORKFLOW_COMMON_NODES', () => {
|
||||
const entry = WORKFLOW_COMMON_NODES.find(
|
||||
n => n.metaData.type === BlockEnum.HumanInput,
|
||||
)
|
||||
expect(entry).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
// ── Default config & validation ──
|
||||
describe('HumanInput Default Configuration', () => {
|
||||
it('should provide default values for a new human-input node', () => {
|
||||
const defaultValue = humanInputDefault.defaultValue
|
||||
|
||||
expect(defaultValue.delivery_methods).toEqual([])
|
||||
expect(defaultValue.user_actions).toEqual([])
|
||||
expect(defaultValue.form_content).toBe('')
|
||||
expect(defaultValue.inputs).toEqual([])
|
||||
expect(defaultValue.timeout).toBe(3)
|
||||
expect(defaultValue.timeout_unit).toBe('day')
|
||||
})
|
||||
|
||||
it('should validate that delivery methods are required', () => {
|
||||
const t = (key: string) => key
|
||||
const payload = {
|
||||
...humanInputDefault.defaultValue,
|
||||
delivery_methods: [],
|
||||
} as HumanInputNodeType
|
||||
|
||||
const result = humanInputDefault.checkValid(payload, t)
|
||||
|
||||
expect(result.isValid).toBe(false)
|
||||
expect(result.errorMessage).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should validate that at least one delivery method is enabled', () => {
|
||||
const t = (key: string) => key
|
||||
const payload = {
|
||||
...humanInputDefault.defaultValue,
|
||||
delivery_methods: [
|
||||
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: false },
|
||||
],
|
||||
user_actions: [
|
||||
{ id: 'approve', title: 'Approve', button_style: UserActionButtonType.Primary },
|
||||
],
|
||||
} as HumanInputNodeType
|
||||
|
||||
const result = humanInputDefault.checkValid(payload, t)
|
||||
|
||||
expect(result.isValid).toBe(false)
|
||||
})
|
||||
|
||||
it('should validate that user actions are required', () => {
|
||||
const t = (key: string) => key
|
||||
const payload = {
|
||||
...humanInputDefault.defaultValue,
|
||||
delivery_methods: [
|
||||
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: true },
|
||||
],
|
||||
user_actions: [],
|
||||
} as HumanInputNodeType
|
||||
|
||||
const result = humanInputDefault.checkValid(payload, t)
|
||||
|
||||
expect(result.isValid).toBe(false)
|
||||
})
|
||||
|
||||
it('should validate that user action IDs are not duplicated', () => {
|
||||
const t = (key: string) => key
|
||||
const payload = {
|
||||
...humanInputDefault.defaultValue,
|
||||
delivery_methods: [
|
||||
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: true },
|
||||
],
|
||||
user_actions: [
|
||||
{ id: 'approve', title: 'Approve', button_style: UserActionButtonType.Primary },
|
||||
{ id: 'approve', title: 'Also Approve', button_style: UserActionButtonType.Default },
|
||||
],
|
||||
} as HumanInputNodeType
|
||||
|
||||
const result = humanInputDefault.checkValid(payload, t)
|
||||
|
||||
expect(result.isValid).toBe(false)
|
||||
})
|
||||
|
||||
it('should pass validation with correct configuration', () => {
|
||||
const t = (key: string) => key
|
||||
const payload = {
|
||||
...humanInputDefault.defaultValue,
|
||||
delivery_methods: [
|
||||
{ id: 'dm-1', type: DeliveryMethodType.WebApp, enabled: true },
|
||||
],
|
||||
user_actions: [
|
||||
{ id: 'approve', title: 'Approve', button_style: UserActionButtonType.Primary },
|
||||
{ id: 'reject', title: 'Reject', button_style: UserActionButtonType.Default },
|
||||
],
|
||||
} as HumanInputNodeType
|
||||
|
||||
const result = humanInputDefault.checkValid(payload, t)
|
||||
|
||||
expect(result.isValid).toBe(true)
|
||||
expect(result.errorMessage).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
// ── Output variables generation ──
|
||||
describe('HumanInput Output Variables', () => {
|
||||
it('should generate output variables from form inputs', () => {
|
||||
const payload = {
|
||||
...humanInputDefault.defaultValue,
|
||||
inputs: [
|
||||
{ type: 'text-input', output_variable_name: 'review_result', default: { selector: [], type: 'constant' as const, value: '' } },
|
||||
{ type: 'text-input', output_variable_name: 'comment', default: { selector: [], type: 'constant' as const, value: '' } },
|
||||
],
|
||||
} as HumanInputNodeType
|
||||
|
||||
const outputVars = humanInputDefault.getOutputVars!(payload, {}, [])
|
||||
|
||||
expect(outputVars).toEqual([
|
||||
{ variable: 'review_result', type: 'string' },
|
||||
{ variable: 'comment', type: 'string' },
|
||||
])
|
||||
})
|
||||
|
||||
it('should return empty output variables when no form inputs exist', () => {
|
||||
const payload = {
|
||||
...humanInputDefault.defaultValue,
|
||||
inputs: [],
|
||||
} as HumanInputNodeType
|
||||
|
||||
const outputVars = humanInputDefault.getOutputVars!(payload, {}, [])
|
||||
|
||||
expect(outputVars).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
// ── Full DSL import simulation: start → human-input → end ──
|
||||
describe('Full Workflow with Human Input Node', () => {
|
||||
it('should process a start → human-input → end workflow without errors', () => {
|
||||
const startNode = createStartNode()
|
||||
const humanInputNode = createHumanInputNode()
|
||||
const endNode: Node = {
|
||||
id: 'end-1',
|
||||
type: 'custom',
|
||||
position: { x: 700, y: 200 },
|
||||
data: {
|
||||
type: BlockEnum.End,
|
||||
title: 'End',
|
||||
desc: '',
|
||||
outputs: [],
|
||||
} as Node['data'],
|
||||
}
|
||||
|
||||
const nodes = [startNode, humanInputNode, endNode]
|
||||
const edges = [
|
||||
createEdge('start-1', 'human-input-1'),
|
||||
createEdge('human-input-1', 'end-1', 'approve', 'target'),
|
||||
]
|
||||
|
||||
const processed = preprocessNodesAndEdges(nodes as Node[], edges as Edge[])
|
||||
expect(processed.nodes).toHaveLength(3)
|
||||
expect(processed.edges).toHaveLength(2)
|
||||
|
||||
const initialized = initialNodes(nodes as Node[], edges as Edge[])
|
||||
expect(initialized).toHaveLength(3)
|
||||
|
||||
// All node types should be preserved
|
||||
const types = initialized.map(n => n.data.type)
|
||||
expect(types).toContain(BlockEnum.Start)
|
||||
expect(types).toContain(BlockEnum.HumanInput)
|
||||
expect(types).toContain(BlockEnum.End)
|
||||
})
|
||||
|
||||
it('should handle multiple branches from human-input user actions', () => {
|
||||
const startNode = createStartNode()
|
||||
const humanInputNode = createHumanInputNode()
|
||||
const approveEndNode: Node = {
|
||||
id: 'approve-end',
|
||||
type: 'custom',
|
||||
position: { x: 700, y: 100 },
|
||||
data: { type: BlockEnum.End, title: 'Approve End', desc: '', outputs: [] } as Node['data'],
|
||||
}
|
||||
const rejectEndNode: Node = {
|
||||
id: 'reject-end',
|
||||
type: 'custom',
|
||||
position: { x: 700, y: 300 },
|
||||
data: { type: BlockEnum.End, title: 'Reject End', desc: '', outputs: [] } as Node['data'],
|
||||
}
|
||||
|
||||
const nodes = [startNode, humanInputNode, approveEndNode, rejectEndNode]
|
||||
const edges = [
|
||||
createEdge('start-1', 'human-input-1'),
|
||||
createEdge('human-input-1', 'approve-end', 'approve', 'target'),
|
||||
createEdge('human-input-1', 'reject-end', 'reject', 'target'),
|
||||
]
|
||||
|
||||
const initialized = initialNodes(nodes as Node[], edges as Edge[])
|
||||
expect(initialized).toHaveLength(4)
|
||||
|
||||
// Human input node should still have correct data
|
||||
const hiNode = initialized.find(n => n.id === 'human-input-1')!
|
||||
expect((hiNode.data as HumanInputNodeType).user_actions).toHaveLength(2)
|
||||
expect((hiNode.data as HumanInputNodeType).delivery_methods).toHaveLength(2)
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -3,6 +3,7 @@ import { useTranslation } from 'react-i18next'
|
||||
import { InputNumber } from '@/app/components/base/input-number'
|
||||
import Switch from '@/app/components/base/switch'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { env } from '@/env'
|
||||
|
||||
export type TopKAndScoreThresholdProps = {
|
||||
topK: number
|
||||
@ -15,12 +16,7 @@ export type TopKAndScoreThresholdProps = {
|
||||
hiddenScoreThreshold?: boolean
|
||||
}
|
||||
|
||||
const maxTopK = (() => {
|
||||
const configValue = Number.parseInt(globalThis.document?.body?.getAttribute('data-public-top-k-max-value') || '', 10)
|
||||
if (configValue && !isNaN(configValue))
|
||||
return configValue
|
||||
return 10
|
||||
})()
|
||||
const maxTopK = env.NEXT_PUBLIC_TOP_K_MAX_VALUE
|
||||
const TOP_K_VALUE_LIMIT = {
|
||||
amount: 1,
|
||||
min: 1,
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { ValidationError } from 'jsonschema'
|
||||
import type { ArrayItems, Field, LLMNodeType } from './types'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import { draft07Validator, forbidBooleanProperties } from '@/utils/validators'
|
||||
import { ArrayType, Type } from './types'
|
||||
|
||||
|
||||
@ -1,7 +1,3 @@
|
||||
import {
|
||||
RiClipboardLine,
|
||||
RiCloseLine,
|
||||
} from '@remixicon/react'
|
||||
import copy from 'copy-to-clipboard'
|
||||
import {
|
||||
memo,
|
||||
@ -115,9 +111,9 @@ const WorkflowPreview = () => {
|
||||
onMouseDown={startResizing}
|
||||
/>
|
||||
<div className="flex items-center justify-between p-4 pb-1 text-base font-semibold text-text-primary">
|
||||
{`Test Run${formatWorkflowRunIdentifier(workflowRunningData?.result.finished_at)}`}
|
||||
{`Test Run${formatWorkflowRunIdentifier(workflowRunningData?.result.finished_at, workflowRunningData?.result.status)}`}
|
||||
<div className="cursor-pointer p-1" onClick={() => handleCancelDebugAndPreviewPanel()}>
|
||||
<RiCloseLine className="h-4 w-4 text-text-tertiary" />
|
||||
<span className="i-ri-close-line h-4 w-4 text-text-tertiary" />
|
||||
</div>
|
||||
</div>
|
||||
<div className="relative flex grow flex-col">
|
||||
@ -217,7 +213,7 @@ const WorkflowPreview = () => {
|
||||
Toast.notify({ type: 'success', message: t('actionMsg.copySuccessfully', { ns: 'common' }) })
|
||||
}}
|
||||
>
|
||||
<RiClipboardLine className="h-3.5 w-3.5" />
|
||||
<span className="i-ri-clipboard-line h-3.5 w-3.5" />
|
||||
<div>{t('operation.copy', { ns: 'common' })}</div>
|
||||
</Button>
|
||||
)}
|
||||
|
||||
@ -41,8 +41,10 @@ export const isEventTargetInputArea = (target: HTMLElement) => {
|
||||
* @returns Formatted string like " (14:30:25)" or " (Running)"
|
||||
*/
|
||||
export const formatWorkflowRunIdentifier = (finishedAt?: number, fallbackText = 'Running'): string => {
|
||||
if (!finishedAt)
|
||||
return ` (${fallbackText})`
|
||||
if (!finishedAt) {
|
||||
const capitalized = fallbackText.charAt(0).toUpperCase() + fallbackText.slice(1)
|
||||
return ` (${capitalized})`
|
||||
}
|
||||
|
||||
const date = new Date(finishedAt * 1000)
|
||||
const timeStr = date.toLocaleTimeString([], {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
|
||||
const arrayStringSchemaParttern = z.array(z.string())
|
||||
const arrayNumberSchemaParttern = z.array(z.number())
|
||||
@ -7,7 +7,7 @@ const arrayNumberSchemaParttern = z.array(z.number())
|
||||
const literalSchema = z.union([z.string(), z.number(), z.boolean(), z.null()])
|
||||
type Literal = z.infer<typeof literalSchema>
|
||||
type Json = Literal | { [key: string]: Json } | Json[]
|
||||
const jsonSchema: z.ZodType<Json> = z.lazy(() => z.union([literalSchema, z.array(jsonSchema), z.record(jsonSchema)]))
|
||||
const jsonSchema: z.ZodType<Json> = z.lazy(() => z.union([literalSchema, z.array(jsonSchema), z.record(z.string(), jsonSchema)]))
|
||||
const arrayJsonSchema: z.ZodType<Json[]> = z.lazy(() => z.array(jsonSchema))
|
||||
|
||||
export const validateJSONSchema = (schema: any, type: string) => {
|
||||
|
||||
@ -7,7 +7,7 @@ import { useRouter } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { formContext, useAppForm } from '@/app/components/base/form'
|
||||
import { zodSubmitValidator } from '@/app/components/base/form/utils/zod-submit-validator'
|
||||
@ -22,10 +22,10 @@ import Input from '../components/base/input'
|
||||
import Loading from '../components/base/loading'
|
||||
|
||||
const accountFormSchema = z.object({
|
||||
email: z
|
||||
.string()
|
||||
.min(1, { message: 'error.emailInValid' })
|
||||
.email('error.emailInValid'),
|
||||
email: z.email('error.emailInValid')
|
||||
.min(1, {
|
||||
error: 'error.emailInValid',
|
||||
}),
|
||||
})
|
||||
|
||||
const ForgotPasswordForm = () => {
|
||||
|
||||
@ -7,7 +7,7 @@ import { useRouter } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useEffect } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
import Button from '@/app/components/base/button'
|
||||
import { formContext, useAppForm } from '@/app/components/base/form'
|
||||
import { zodSubmitValidator } from '@/app/components/base/form/utils/zod-submit-validator'
|
||||
@ -22,13 +22,15 @@ import { encryptPassword as encodePassword } from '@/utils/encryption'
|
||||
import Loading from '../components/base/loading'
|
||||
|
||||
const accountFormSchema = z.object({
|
||||
email: z
|
||||
.string()
|
||||
.min(1, { message: 'error.emailInValid' })
|
||||
.email('error.emailInValid'),
|
||||
name: z.string().min(1, { message: 'error.nameEmpty' }),
|
||||
email: z.email('error.emailInValid')
|
||||
.min(1, {
|
||||
error: 'error.emailInValid',
|
||||
}),
|
||||
name: z.string().min(1, {
|
||||
error: 'error.nameEmpty',
|
||||
}),
|
||||
password: z.string().min(8, {
|
||||
message: 'error.passwordLengthInValid',
|
||||
error: 'error.passwordLengthInValid',
|
||||
}).regex(validPassword, 'error.passwordInvalid'),
|
||||
})
|
||||
|
||||
@ -197,7 +199,7 @@ const InstallForm = () => {
|
||||
</div>
|
||||
|
||||
<div className={cn('mt-1 text-xs text-text-secondary', {
|
||||
'text-red-400 !text-sm': passwordErrors && passwordErrors.length > 0,
|
||||
'!text-sm text-red-400': passwordErrors && passwordErrors.length > 0,
|
||||
})}
|
||||
>
|
||||
{t('error.passwordInvalid', { ns: 'login' })}
|
||||
|
||||
@ -5,8 +5,8 @@ import { Instrument_Serif } from 'next/font/google'
|
||||
import { NuqsAdapter } from 'nuqs/adapters/next/app'
|
||||
import GlobalPublicStoreProvider from '@/context/global-public-context'
|
||||
import { TanstackQueryInitializer } from '@/context/query-client'
|
||||
import { getDatasetMap } from '@/env'
|
||||
import { getLocaleOnServer } from '@/i18n-config/server'
|
||||
import { DatasetAttr } from '@/types/feature'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { ToastProvider } from './components/base/toast'
|
||||
import BrowserInitializer from './components/browser-initializer'
|
||||
@ -39,40 +39,7 @@ const LocaleLayout = async ({
|
||||
children: React.ReactNode
|
||||
}) => {
|
||||
const locale = await getLocaleOnServer()
|
||||
|
||||
const datasetMap: Record<DatasetAttr, string | undefined> = {
|
||||
[DatasetAttr.DATA_API_PREFIX]: process.env.NEXT_PUBLIC_API_PREFIX,
|
||||
[DatasetAttr.DATA_PUBLIC_API_PREFIX]: process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX,
|
||||
[DatasetAttr.DATA_MARKETPLACE_API_PREFIX]: process.env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX,
|
||||
[DatasetAttr.DATA_MARKETPLACE_URL_PREFIX]: process.env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX,
|
||||
[DatasetAttr.DATA_PUBLIC_EDITION]: process.env.NEXT_PUBLIC_EDITION,
|
||||
[DatasetAttr.DATA_PUBLIC_AMPLITUDE_API_KEY]: process.env.NEXT_PUBLIC_AMPLITUDE_API_KEY,
|
||||
[DatasetAttr.DATA_PUBLIC_COOKIE_DOMAIN]: process.env.NEXT_PUBLIC_COOKIE_DOMAIN,
|
||||
[DatasetAttr.DATA_PUBLIC_SUPPORT_MAIL_LOGIN]: process.env.NEXT_PUBLIC_SUPPORT_MAIL_LOGIN,
|
||||
[DatasetAttr.DATA_PUBLIC_SENTRY_DSN]: process.env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
[DatasetAttr.DATA_PUBLIC_MAINTENANCE_NOTICE]: process.env.NEXT_PUBLIC_MAINTENANCE_NOTICE,
|
||||
[DatasetAttr.DATA_PUBLIC_SITE_ABOUT]: process.env.NEXT_PUBLIC_SITE_ABOUT,
|
||||
[DatasetAttr.DATA_PUBLIC_TEXT_GENERATION_TIMEOUT_MS]: process.env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS,
|
||||
[DatasetAttr.DATA_PUBLIC_MAX_TOOLS_NUM]: process.env.NEXT_PUBLIC_MAX_TOOLS_NUM,
|
||||
[DatasetAttr.DATA_PUBLIC_MAX_PARALLEL_LIMIT]: process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT,
|
||||
[DatasetAttr.DATA_PUBLIC_TOP_K_MAX_VALUE]: process.env.NEXT_PUBLIC_TOP_K_MAX_VALUE,
|
||||
[DatasetAttr.DATA_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH]: process.env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH,
|
||||
[DatasetAttr.DATA_PUBLIC_LOOP_NODE_MAX_COUNT]: process.env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT,
|
||||
[DatasetAttr.DATA_PUBLIC_MAX_ITERATIONS_NUM]: process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM,
|
||||
[DatasetAttr.DATA_PUBLIC_MAX_TREE_DEPTH]: process.env.NEXT_PUBLIC_MAX_TREE_DEPTH,
|
||||
[DatasetAttr.DATA_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME]: process.env.NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME,
|
||||
[DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_JINAREADER]: process.env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER,
|
||||
[DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_FIRECRAWL]: process.env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL,
|
||||
[DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_WATERCRAWL]: process.env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL,
|
||||
[DatasetAttr.DATA_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX]: process.env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_WIDGET_KEY]: process.env.NEXT_PUBLIC_ZENDESK_WIDGET_KEY,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID,
|
||||
[DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN]: process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN,
|
||||
[DatasetAttr.DATA_PUBLIC_BATCH_CONCURRENCY]: process.env.NEXT_PUBLIC_BATCH_CONCURRENCY,
|
||||
}
|
||||
const datasetMap = getDatasetMap()
|
||||
|
||||
return (
|
||||
<html lang={locale ?? 'en'} className={cn('h-full', instrumentSerif.variable)} suppressHydrationWarning>
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import { createSerwistRoute } from '@serwist/turbopack'
|
||||
import { env } from '@/env'
|
||||
|
||||
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
|
||||
const basePath = env.NEXT_PUBLIC_BASE_PATH
|
||||
|
||||
export const { dynamic, dynamicParams, revalidate, generateStaticParams, GET } = createSerwistRoute({
|
||||
swSrc: 'app/sw.ts',
|
||||
|
||||
@ -1,101 +1,51 @@
|
||||
import type { ModelParameterRule } from '@/app/components/header/account-setting/model-provider-page/declarations'
|
||||
import { InputVarType } from '@/app/components/workflow/types'
|
||||
import { env } from '@/env'
|
||||
import { PromptRole } from '@/models/debug'
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
import { AgentStrategy } from '@/types/app'
|
||||
import { DatasetAttr } from '@/types/feature'
|
||||
import pkg from '../package.json'
|
||||
|
||||
const getBooleanConfig = (
|
||||
envVar: string | undefined,
|
||||
dataAttrKey: DatasetAttr,
|
||||
defaultValue: boolean = true,
|
||||
) => {
|
||||
if (envVar !== undefined && envVar !== '')
|
||||
return envVar === 'true'
|
||||
const attrValue = globalThis.document?.body?.getAttribute(dataAttrKey)
|
||||
if (attrValue !== undefined && attrValue !== '')
|
||||
return attrValue === 'true'
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
const getNumberConfig = (
|
||||
envVar: string | undefined,
|
||||
dataAttrKey: DatasetAttr,
|
||||
defaultValue: number,
|
||||
) => {
|
||||
if (envVar) {
|
||||
const parsed = Number.parseInt(envVar)
|
||||
if (!Number.isNaN(parsed) && parsed > 0)
|
||||
return parsed
|
||||
}
|
||||
|
||||
const attrValue = globalThis.document?.body?.getAttribute(dataAttrKey)
|
||||
if (attrValue) {
|
||||
const parsed = Number.parseInt(attrValue)
|
||||
if (!Number.isNaN(parsed) && parsed > 0)
|
||||
return parsed
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
const getStringConfig = (
|
||||
envVar: string | undefined,
|
||||
dataAttrKey: DatasetAttr,
|
||||
defaultValue: string,
|
||||
) => {
|
||||
if (envVar)
|
||||
return envVar
|
||||
|
||||
const attrValue = globalThis.document?.body?.getAttribute(dataAttrKey)
|
||||
if (attrValue)
|
||||
return attrValue
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
export const API_PREFIX = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_API_PREFIX,
|
||||
DatasetAttr.DATA_API_PREFIX,
|
||||
env.NEXT_PUBLIC_API_PREFIX,
|
||||
'http://localhost:5001/console/api',
|
||||
)
|
||||
export const PUBLIC_API_PREFIX = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX,
|
||||
DatasetAttr.DATA_PUBLIC_API_PREFIX,
|
||||
env.NEXT_PUBLIC_PUBLIC_API_PREFIX,
|
||||
'http://localhost:5001/api',
|
||||
)
|
||||
export const MARKETPLACE_API_PREFIX = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX,
|
||||
DatasetAttr.DATA_MARKETPLACE_API_PREFIX,
|
||||
env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX,
|
||||
'http://localhost:5002/api',
|
||||
)
|
||||
export const MARKETPLACE_URL_PREFIX = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX,
|
||||
DatasetAttr.DATA_MARKETPLACE_URL_PREFIX,
|
||||
env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX,
|
||||
'',
|
||||
)
|
||||
|
||||
const EDITION = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_EDITION,
|
||||
DatasetAttr.DATA_PUBLIC_EDITION,
|
||||
'SELF_HOSTED',
|
||||
)
|
||||
const EDITION = env.NEXT_PUBLIC_EDITION
|
||||
|
||||
export const IS_CE_EDITION = EDITION === 'SELF_HOSTED'
|
||||
export const IS_CLOUD_EDITION = EDITION === 'CLOUD'
|
||||
|
||||
export const AMPLITUDE_API_KEY = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_AMPLITUDE_API_KEY,
|
||||
DatasetAttr.DATA_PUBLIC_AMPLITUDE_API_KEY,
|
||||
env.NEXT_PUBLIC_AMPLITUDE_API_KEY,
|
||||
'',
|
||||
)
|
||||
|
||||
export const IS_DEV = process.env.NODE_ENV === 'development'
|
||||
export const IS_PROD = process.env.NODE_ENV === 'production'
|
||||
export const IS_DEV = env.NODE_ENV === 'development'
|
||||
export const IS_PROD = env.NODE_ENV === 'production'
|
||||
|
||||
export const SUPPORT_MAIL_LOGIN = !!(
|
||||
process.env.NEXT_PUBLIC_SUPPORT_MAIL_LOGIN
|
||||
|| globalThis.document?.body?.getAttribute('data-public-support-mail-login')
|
||||
)
|
||||
export const SUPPORT_MAIL_LOGIN = env.NEXT_PUBLIC_SUPPORT_MAIL_LOGIN
|
||||
|
||||
export const TONE_LIST = [
|
||||
{
|
||||
@ -161,16 +111,11 @@ export const getMaxToken = (modelId: string) => {
|
||||
export const LOCALE_COOKIE_NAME = 'locale'
|
||||
|
||||
const COOKIE_DOMAIN = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_COOKIE_DOMAIN,
|
||||
DatasetAttr.DATA_PUBLIC_COOKIE_DOMAIN,
|
||||
env.NEXT_PUBLIC_COOKIE_DOMAIN,
|
||||
'',
|
||||
).trim()
|
||||
|
||||
export const BATCH_CONCURRENCY = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_BATCH_CONCURRENCY,
|
||||
DatasetAttr.DATA_PUBLIC_BATCH_CONCURRENCY,
|
||||
5, // default
|
||||
)
|
||||
export const BATCH_CONCURRENCY = env.NEXT_PUBLIC_BATCH_CONCURRENCY
|
||||
|
||||
export const CSRF_COOKIE_NAME = () => {
|
||||
if (COOKIE_DOMAIN)
|
||||
@ -344,112 +289,62 @@ export const resetReg = () => (VAR_REGEX.lastIndex = 0)
|
||||
export const HITL_INPUT_REG = /\{\{(#\$output\.(?:[a-z_]\w{0,29}){1,10}#)\}\}/gi
|
||||
export const resetHITLInputReg = () => HITL_INPUT_REG.lastIndex = 0
|
||||
|
||||
export const DISABLE_UPLOAD_IMAGE_AS_ICON = process.env.NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON === 'true'
|
||||
export const DISABLE_UPLOAD_IMAGE_AS_ICON = env.NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON
|
||||
|
||||
export const GITHUB_ACCESS_TOKEN
|
||||
= process.env.NEXT_PUBLIC_GITHUB_ACCESS_TOKEN || ''
|
||||
= env.NEXT_PUBLIC_GITHUB_ACCESS_TOKEN
|
||||
|
||||
export const SUPPORT_INSTALL_LOCAL_FILE_EXTENSIONS = '.difypkg,.difybndl'
|
||||
export const FULL_DOC_PREVIEW_LENGTH = 50
|
||||
|
||||
export const JSON_SCHEMA_MAX_DEPTH = 10
|
||||
|
||||
export const MAX_TOOLS_NUM = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_MAX_TOOLS_NUM,
|
||||
DatasetAttr.DATA_PUBLIC_MAX_TOOLS_NUM,
|
||||
10,
|
||||
)
|
||||
export const MAX_PARALLEL_LIMIT = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT,
|
||||
DatasetAttr.DATA_PUBLIC_MAX_PARALLEL_LIMIT,
|
||||
10,
|
||||
)
|
||||
export const TEXT_GENERATION_TIMEOUT_MS = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS,
|
||||
DatasetAttr.DATA_PUBLIC_TEXT_GENERATION_TIMEOUT_MS,
|
||||
60000,
|
||||
)
|
||||
export const LOOP_NODE_MAX_COUNT = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT,
|
||||
DatasetAttr.DATA_PUBLIC_LOOP_NODE_MAX_COUNT,
|
||||
100,
|
||||
)
|
||||
export const MAX_ITERATIONS_NUM = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM,
|
||||
DatasetAttr.DATA_PUBLIC_MAX_ITERATIONS_NUM,
|
||||
99,
|
||||
)
|
||||
export const MAX_TREE_DEPTH = getNumberConfig(
|
||||
process.env.NEXT_PUBLIC_MAX_TREE_DEPTH,
|
||||
DatasetAttr.DATA_PUBLIC_MAX_TREE_DEPTH,
|
||||
50,
|
||||
)
|
||||
export const MAX_TOOLS_NUM = env.NEXT_PUBLIC_MAX_TOOLS_NUM
|
||||
export const MAX_PARALLEL_LIMIT = env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
export const TEXT_GENERATION_TIMEOUT_MS = env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS
|
||||
export const LOOP_NODE_MAX_COUNT = env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT
|
||||
export const MAX_ITERATIONS_NUM = env.NEXT_PUBLIC_MAX_ITERATIONS_NUM
|
||||
export const MAX_TREE_DEPTH = env.NEXT_PUBLIC_MAX_TREE_DEPTH
|
||||
|
||||
export const ALLOW_UNSAFE_DATA_SCHEME = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME,
|
||||
DatasetAttr.DATA_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME,
|
||||
false,
|
||||
)
|
||||
export const ENABLE_WEBSITE_JINAREADER = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER,
|
||||
DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_JINAREADER,
|
||||
true,
|
||||
)
|
||||
export const ENABLE_WEBSITE_FIRECRAWL = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL,
|
||||
DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_FIRECRAWL,
|
||||
true,
|
||||
)
|
||||
export const ENABLE_WEBSITE_WATERCRAWL = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL,
|
||||
DatasetAttr.DATA_PUBLIC_ENABLE_WEBSITE_WATERCRAWL,
|
||||
false,
|
||||
)
|
||||
export const ENABLE_SINGLE_DOLLAR_LATEX = getBooleanConfig(
|
||||
process.env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX,
|
||||
DatasetAttr.DATA_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX,
|
||||
false,
|
||||
)
|
||||
export const ALLOW_UNSAFE_DATA_SCHEME = env.NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME
|
||||
export const ENABLE_WEBSITE_JINAREADER = env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER
|
||||
export const ENABLE_WEBSITE_FIRECRAWL = env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL
|
||||
export const ENABLE_WEBSITE_WATERCRAWL = env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL
|
||||
export const ENABLE_SINGLE_DOLLAR_LATEX = env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX
|
||||
|
||||
export const VALUE_SELECTOR_DELIMITER = '@@@'
|
||||
|
||||
export const validPassword = /^(?=.*[a-z])(?=.*\d)\S{8,}$/i
|
||||
|
||||
export const ZENDESK_WIDGET_KEY = getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_WIDGET_KEY,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_WIDGET_KEY,
|
||||
env.NEXT_PUBLIC_ZENDESK_WIDGET_KEY,
|
||||
'',
|
||||
)
|
||||
export const ZENDESK_FIELD_IDS = {
|
||||
ENVIRONMENT: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT,
|
||||
'',
|
||||
),
|
||||
VERSION: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION,
|
||||
'',
|
||||
),
|
||||
EMAIL: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL,
|
||||
'',
|
||||
),
|
||||
WORKSPACE_ID: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID,
|
||||
'',
|
||||
),
|
||||
PLAN: getStringConfig(
|
||||
process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN,
|
||||
DatasetAttr.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN,
|
||||
env.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN,
|
||||
'',
|
||||
),
|
||||
}
|
||||
export const APP_VERSION = pkg.version
|
||||
|
||||
export const IS_MARKETPLACE = globalThis.document?.body?.getAttribute('data-is-marketplace') === 'true'
|
||||
export const IS_MARKETPLACE = env.NEXT_PUBLIC_IS_MARKETPLACE
|
||||
|
||||
export const RAG_PIPELINE_PREVIEW_CHUNK_NUM = 20
|
||||
|
||||
|
||||
@ -10,6 +10,7 @@ import { setUserId, setUserProperties } from '@/app/components/base/amplitude'
|
||||
import { setZendeskConversationFields } from '@/app/components/base/zendesk/utils'
|
||||
import MaintenanceNotice from '@/app/components/header/maintenance-notice'
|
||||
import { ZENDESK_FIELD_IDS } from '@/config'
|
||||
import { env } from '@/env'
|
||||
import {
|
||||
useCurrentWorkspace,
|
||||
useLangGeniusVersion,
|
||||
@ -204,7 +205,7 @@ export const AppContextProvider: FC<AppContextProviderProps> = ({ children }) =>
|
||||
}}
|
||||
>
|
||||
<div className="flex h-full flex-col overflow-y-auto">
|
||||
{globalThis.document?.body?.getAttribute('data-public-maintenance-notice') && <MaintenanceNotice />}
|
||||
{env.NEXT_PUBLIC_MAINTENANCE_NOTICE && <MaintenanceNotice />}
|
||||
<div className="relative flex grow flex-col overflow-y-auto overflow-x-hidden bg-background-body">
|
||||
{children}
|
||||
</div>
|
||||
|
||||
@ -4,7 +4,19 @@ import type { EventEmitter } from 'ahooks/lib/useEventEmitter'
|
||||
import { useEventEmitter } from 'ahooks'
|
||||
import { createContext, useContext } from 'use-context-selector'
|
||||
|
||||
const EventEmitterContext = createContext<{ eventEmitter: EventEmitter<string> | null }>({
|
||||
/**
|
||||
* Typed event object emitted via the shared EventEmitter.
|
||||
* Covers workflow updates, prompt-editor commands, DSL export checks, etc.
|
||||
*/
|
||||
export type EventEmitterMessage = {
|
||||
type: string
|
||||
payload?: unknown
|
||||
instanceId?: string
|
||||
}
|
||||
|
||||
export type EventEmitterValue = string | EventEmitterMessage
|
||||
|
||||
const EventEmitterContext = createContext<{ eventEmitter: EventEmitter<EventEmitterValue> | null }>({
|
||||
eventEmitter: null,
|
||||
})
|
||||
|
||||
@ -16,7 +28,7 @@ type EventEmitterContextProviderProps = {
|
||||
export const EventEmitterContextProvider = ({
|
||||
children,
|
||||
}: EventEmitterContextProviderProps) => {
|
||||
const eventEmitter = useEventEmitter<string>()
|
||||
const eventEmitter = useEventEmitter<EventEmitterValue>()
|
||||
|
||||
return (
|
||||
<EventEmitterContext.Provider value={{ eventEmitter }}>
|
||||
|
||||
235
web/env.ts
Normal file
235
web/env.ts
Normal file
@ -0,0 +1,235 @@
|
||||
import type { CamelCase, Replace } from 'string-ts'
|
||||
import { createEnv } from '@t3-oss/env-nextjs'
|
||||
import { concat, kebabCase, length, slice } from 'string-ts'
|
||||
import * as z from 'zod'
|
||||
import { isClient, isServer } from './utils/client'
|
||||
import { ObjectFromEntries, ObjectKeys } from './utils/object'
|
||||
|
||||
const CLIENT_ENV_PREFIX = 'NEXT_PUBLIC_'
|
||||
type ClientSchema = Record<`${typeof CLIENT_ENV_PREFIX}${string}`, z.ZodType>
|
||||
|
||||
const coercedBoolean = z.string()
|
||||
.refine(s => s === 'true' || s === 'false' || s === '0' || s === '1')
|
||||
.transform(s => s === 'true' || s === '1')
|
||||
const coercedNumber = z.coerce.number().int().positive()
|
||||
|
||||
/// keep-sorted
|
||||
const clientSchema = {
|
||||
/**
|
||||
* Default is not allow to embed into iframe to prevent Clickjacking: https://owasp.org/www-community/attacks/Clickjacking
|
||||
*/
|
||||
NEXT_PUBLIC_ALLOW_EMBED: coercedBoolean.default(false),
|
||||
/**
|
||||
* Allow rendering unsafe URLs which have "data:" scheme.
|
||||
*/
|
||||
NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME: coercedBoolean.default(false),
|
||||
/**
|
||||
* The API key of amplitude
|
||||
*/
|
||||
NEXT_PUBLIC_AMPLITUDE_API_KEY: z.string().optional(),
|
||||
/**
|
||||
* The base URL of console application, refers to the Console base URL of WEB service if console domain is
|
||||
* different from api or web app domain.
|
||||
* example: http://cloud.dify.ai/console/api
|
||||
*/
|
||||
NEXT_PUBLIC_API_PREFIX: z.string().optional(),
|
||||
/**
|
||||
* The base path for the application
|
||||
*/
|
||||
NEXT_PUBLIC_BASE_PATH: z.string().regex(/^\/.*[^/]$/).or(z.literal('')).default(''),
|
||||
/**
|
||||
* number of concurrency
|
||||
*/
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY: coercedNumber.default(5),
|
||||
/**
|
||||
* When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
|
||||
*/
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: z.string().optional(),
|
||||
/**
|
||||
* CSP https://developer.mozilla.org/en-US/docs/Web/HTTP/CSP
|
||||
*/
|
||||
NEXT_PUBLIC_CSP_WHITELIST: z.string().optional(),
|
||||
/**
|
||||
* For production release, change this to PRODUCTION
|
||||
*/
|
||||
NEXT_PUBLIC_DEPLOY_ENV: z.enum(['DEVELOPMENT', 'PRODUCTION', 'TESTING']).optional(),
|
||||
NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON: coercedBoolean.default(false),
|
||||
/**
|
||||
* The deployment edition, SELF_HOSTED
|
||||
*/
|
||||
NEXT_PUBLIC_EDITION: z.enum(['SELF_HOSTED', 'CLOUD']).default('SELF_HOSTED'),
|
||||
/**
|
||||
* Enable inline LaTeX rendering with single dollar signs ($...$)
|
||||
* Default is false for security reasons to prevent conflicts with regular text
|
||||
*/
|
||||
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: coercedBoolean.default(false),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL: coercedBoolean.default(true),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER: coercedBoolean.default(true),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL: coercedBoolean.default(false),
|
||||
/**
|
||||
* Github Access Token, used for invoking Github API
|
||||
*/
|
||||
NEXT_PUBLIC_GITHUB_ACCESS_TOKEN: z.string().optional(),
|
||||
/**
|
||||
* The maximum number of tokens for segmentation
|
||||
*/
|
||||
NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: coercedNumber.default(4000),
|
||||
NEXT_PUBLIC_IS_MARKETPLACE: coercedBoolean.default(false),
|
||||
/**
|
||||
* Maximum loop count in the workflow
|
||||
*/
|
||||
NEXT_PUBLIC_LOOP_NODE_MAX_COUNT: coercedNumber.default(100),
|
||||
NEXT_PUBLIC_MAINTENANCE_NOTICE: z.string().optional(),
|
||||
/**
|
||||
* The API PREFIX for MARKETPLACE
|
||||
*/
|
||||
NEXT_PUBLIC_MARKETPLACE_API_PREFIX: z.url().optional(),
|
||||
/**
|
||||
* The URL for MARKETPLACE
|
||||
*/
|
||||
NEXT_PUBLIC_MARKETPLACE_URL_PREFIX: z.url().optional(),
|
||||
/**
|
||||
* The maximum number of iterations for agent setting
|
||||
*/
|
||||
NEXT_PUBLIC_MAX_ITERATIONS_NUM: coercedNumber.default(99),
|
||||
/**
|
||||
* Maximum number of Parallelism branches in the workflow
|
||||
*/
|
||||
NEXT_PUBLIC_MAX_PARALLEL_LIMIT: coercedNumber.default(10),
|
||||
/**
|
||||
* Maximum number of tools in the agent/workflow
|
||||
*/
|
||||
NEXT_PUBLIC_MAX_TOOLS_NUM: coercedNumber.default(10),
|
||||
/**
|
||||
* The maximum number of tree node depth for workflow
|
||||
*/
|
||||
NEXT_PUBLIC_MAX_TREE_DEPTH: coercedNumber.default(50),
|
||||
/**
|
||||
* The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
|
||||
* console or api domain.
|
||||
* example: http://udify.app/api
|
||||
*/
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX: z.string().optional(),
|
||||
/**
|
||||
* SENTRY
|
||||
*/
|
||||
NEXT_PUBLIC_SENTRY_DSN: z.string().optional(),
|
||||
NEXT_PUBLIC_SITE_ABOUT: z.string().optional(),
|
||||
NEXT_PUBLIC_SUPPORT_MAIL_LOGIN: coercedBoolean.default(false),
|
||||
/**
|
||||
* The timeout for the text generation in millisecond
|
||||
*/
|
||||
NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS: coercedNumber.default(60000),
|
||||
/**
|
||||
* The maximum number of top-k value for RAG.
|
||||
*/
|
||||
NEXT_PUBLIC_TOP_K_MAX_VALUE: coercedNumber.default(10),
|
||||
/**
|
||||
* Disable Upload Image as WebApp icon default is false
|
||||
*/
|
||||
NEXT_PUBLIC_UPLOAD_IMAGE_AS_ICON: coercedBoolean.default(false),
|
||||
NEXT_PUBLIC_WEB_PREFIX: z.url().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID: z.string().optional(),
|
||||
NEXT_PUBLIC_ZENDESK_WIDGET_KEY: z.string().optional(),
|
||||
} satisfies ClientSchema
|
||||
|
||||
export const env = createEnv({
|
||||
server: {
|
||||
/**
|
||||
* Maximum length of segmentation tokens for indexing
|
||||
*/
|
||||
INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: coercedNumber.default(4000),
|
||||
/**
|
||||
* Disable Next.js Telemetry (https://nextjs.org/telemetry)
|
||||
*/
|
||||
NEXT_TELEMETRY_DISABLED: coercedBoolean.optional(),
|
||||
PORT: coercedNumber.default(3000),
|
||||
/**
|
||||
* The timeout for the text generation in millisecond
|
||||
*/
|
||||
TEXT_GENERATION_TIMEOUT_MS: coercedNumber.default(60000),
|
||||
},
|
||||
shared: {
|
||||
NODE_ENV: z.enum(['development', 'test', 'production']).default('development'),
|
||||
},
|
||||
client: clientSchema,
|
||||
experimental__runtimeEnv: {
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
NEXT_PUBLIC_ALLOW_EMBED: isServer ? process.env.NEXT_PUBLIC_ALLOW_EMBED : getRuntimeEnvFromBody('allowEmbed'),
|
||||
NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME: isServer ? process.env.NEXT_PUBLIC_ALLOW_UNSAFE_DATA_SCHEME : getRuntimeEnvFromBody('allowUnsafeDataScheme'),
|
||||
NEXT_PUBLIC_AMPLITUDE_API_KEY: isServer ? process.env.NEXT_PUBLIC_AMPLITUDE_API_KEY : getRuntimeEnvFromBody('amplitudeApiKey'),
|
||||
NEXT_PUBLIC_API_PREFIX: isServer ? process.env.NEXT_PUBLIC_API_PREFIX : getRuntimeEnvFromBody('apiPrefix'),
|
||||
NEXT_PUBLIC_BASE_PATH: isServer ? process.env.NEXT_PUBLIC_BASE_PATH : getRuntimeEnvFromBody('basePath'),
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY: isServer ? process.env.NEXT_PUBLIC_BATCH_CONCURRENCY : getRuntimeEnvFromBody('batchConcurrency'),
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: isServer ? process.env.NEXT_PUBLIC_COOKIE_DOMAIN : getRuntimeEnvFromBody('cookieDomain'),
|
||||
NEXT_PUBLIC_CSP_WHITELIST: isServer ? process.env.NEXT_PUBLIC_CSP_WHITELIST : getRuntimeEnvFromBody('cspWhitelist'),
|
||||
NEXT_PUBLIC_DEPLOY_ENV: isServer ? process.env.NEXT_PUBLIC_DEPLOY_ENV : getRuntimeEnvFromBody('deployEnv'),
|
||||
NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON: isServer ? process.env.NEXT_PUBLIC_DISABLE_UPLOAD_IMAGE_AS_ICON : getRuntimeEnvFromBody('disableUploadImageAsIcon'),
|
||||
NEXT_PUBLIC_EDITION: isServer ? process.env.NEXT_PUBLIC_EDITION : getRuntimeEnvFromBody('edition'),
|
||||
NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX: isServer ? process.env.NEXT_PUBLIC_ENABLE_SINGLE_DOLLAR_LATEX : getRuntimeEnvFromBody('enableSingleDollarLatex'),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL: isServer ? process.env.NEXT_PUBLIC_ENABLE_WEBSITE_FIRECRAWL : getRuntimeEnvFromBody('enableWebsiteFirecrawl'),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER: isServer ? process.env.NEXT_PUBLIC_ENABLE_WEBSITE_JINAREADER : getRuntimeEnvFromBody('enableWebsiteJinareader'),
|
||||
NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL: isServer ? process.env.NEXT_PUBLIC_ENABLE_WEBSITE_WATERCRAWL : getRuntimeEnvFromBody('enableWebsiteWatercrawl'),
|
||||
NEXT_PUBLIC_GITHUB_ACCESS_TOKEN: isServer ? process.env.NEXT_PUBLIC_GITHUB_ACCESS_TOKEN : getRuntimeEnvFromBody('githubAccessToken'),
|
||||
NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: isServer ? process.env.NEXT_PUBLIC_INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH : getRuntimeEnvFromBody('indexingMaxSegmentationTokensLength'),
|
||||
NEXT_PUBLIC_IS_MARKETPLACE: isServer ? process.env.NEXT_PUBLIC_IS_MARKETPLACE : getRuntimeEnvFromBody('isMarketplace'),
|
||||
NEXT_PUBLIC_LOOP_NODE_MAX_COUNT: isServer ? process.env.NEXT_PUBLIC_LOOP_NODE_MAX_COUNT : getRuntimeEnvFromBody('loopNodeMaxCount'),
|
||||
NEXT_PUBLIC_MAINTENANCE_NOTICE: isServer ? process.env.NEXT_PUBLIC_MAINTENANCE_NOTICE : getRuntimeEnvFromBody('maintenanceNotice'),
|
||||
NEXT_PUBLIC_MARKETPLACE_API_PREFIX: isServer ? process.env.NEXT_PUBLIC_MARKETPLACE_API_PREFIX : getRuntimeEnvFromBody('marketplaceApiPrefix'),
|
||||
NEXT_PUBLIC_MARKETPLACE_URL_PREFIX: isServer ? process.env.NEXT_PUBLIC_MARKETPLACE_URL_PREFIX : getRuntimeEnvFromBody('marketplaceUrlPrefix'),
|
||||
NEXT_PUBLIC_MAX_ITERATIONS_NUM: isServer ? process.env.NEXT_PUBLIC_MAX_ITERATIONS_NUM : getRuntimeEnvFromBody('maxIterationsNum'),
|
||||
NEXT_PUBLIC_MAX_PARALLEL_LIMIT: isServer ? process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT : getRuntimeEnvFromBody('maxParallelLimit'),
|
||||
NEXT_PUBLIC_MAX_TOOLS_NUM: isServer ? process.env.NEXT_PUBLIC_MAX_TOOLS_NUM : getRuntimeEnvFromBody('maxToolsNum'),
|
||||
NEXT_PUBLIC_MAX_TREE_DEPTH: isServer ? process.env.NEXT_PUBLIC_MAX_TREE_DEPTH : getRuntimeEnvFromBody('maxTreeDepth'),
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX: isServer ? process.env.NEXT_PUBLIC_PUBLIC_API_PREFIX : getRuntimeEnvFromBody('publicApiPrefix'),
|
||||
NEXT_PUBLIC_SENTRY_DSN: isServer ? process.env.NEXT_PUBLIC_SENTRY_DSN : getRuntimeEnvFromBody('sentryDsn'),
|
||||
NEXT_PUBLIC_SITE_ABOUT: isServer ? process.env.NEXT_PUBLIC_SITE_ABOUT : getRuntimeEnvFromBody('siteAbout'),
|
||||
NEXT_PUBLIC_SUPPORT_MAIL_LOGIN: isServer ? process.env.NEXT_PUBLIC_SUPPORT_MAIL_LOGIN : getRuntimeEnvFromBody('supportMailLogin'),
|
||||
NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS: isServer ? process.env.NEXT_PUBLIC_TEXT_GENERATION_TIMEOUT_MS : getRuntimeEnvFromBody('textGenerationTimeoutMs'),
|
||||
NEXT_PUBLIC_TOP_K_MAX_VALUE: isServer ? process.env.NEXT_PUBLIC_TOP_K_MAX_VALUE : getRuntimeEnvFromBody('topKMaxValue'),
|
||||
NEXT_PUBLIC_UPLOAD_IMAGE_AS_ICON: isServer ? process.env.NEXT_PUBLIC_UPLOAD_IMAGE_AS_ICON : getRuntimeEnvFromBody('uploadImageAsIcon'),
|
||||
NEXT_PUBLIC_WEB_PREFIX: isServer ? process.env.NEXT_PUBLIC_WEB_PREFIX : getRuntimeEnvFromBody('webPrefix'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_EMAIL : getRuntimeEnvFromBody('zendeskFieldIdEmail'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_ENVIRONMENT : getRuntimeEnvFromBody('zendeskFieldIdEnvironment'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_PLAN : getRuntimeEnvFromBody('zendeskFieldIdPlan'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_VERSION : getRuntimeEnvFromBody('zendeskFieldIdVersion'),
|
||||
NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID: isServer ? process.env.NEXT_PUBLIC_ZENDESK_FIELD_ID_WORKSPACE_ID : getRuntimeEnvFromBody('zendeskFieldIdWorkspaceId'),
|
||||
NEXT_PUBLIC_ZENDESK_WIDGET_KEY: isServer ? process.env.NEXT_PUBLIC_ZENDESK_WIDGET_KEY : getRuntimeEnvFromBody('zendeskWidgetKey'),
|
||||
},
|
||||
emptyStringAsUndefined: true,
|
||||
})
|
||||
|
||||
type ClientEnvKey = keyof typeof clientSchema
|
||||
type DatasetKey = CamelCase<Replace<ClientEnvKey, typeof CLIENT_ENV_PREFIX>>
|
||||
|
||||
/**
|
||||
* Browser-only function to get runtime env value from HTML body dataset.
|
||||
*/
|
||||
function getRuntimeEnvFromBody(key: DatasetKey) {
|
||||
if (typeof window === 'undefined') {
|
||||
throw new TypeError('getRuntimeEnvFromBody can only be called in the browser')
|
||||
}
|
||||
|
||||
const value = document.body.dataset[key]
|
||||
return value || undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Server-only function to get dataset map for embedding into the HTML body.
|
||||
*/
|
||||
export function getDatasetMap() {
|
||||
if (isClient) {
|
||||
throw new TypeError('getDatasetMap can only be called on the server')
|
||||
}
|
||||
return ObjectFromEntries(
|
||||
ObjectKeys(clientSchema)
|
||||
.map(envKey => [
|
||||
concat('data-', kebabCase(slice(envKey, length(CLIENT_ENV_PREFIX)))),
|
||||
env[envKey],
|
||||
]),
|
||||
)
|
||||
}
|
||||
@ -2299,7 +2299,7 @@
|
||||
},
|
||||
"app/components/base/markdown-blocks/code-block.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 10
|
||||
"count": 7
|
||||
},
|
||||
"tailwindcss/enforce-consistent-class-order": {
|
||||
"count": 1
|
||||
@ -2512,11 +2512,6 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/param-item/top-k-item.tsx": {
|
||||
"unicorn/prefer-number-properties": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/base/portal-to-follow-elem/index.tsx": {
|
||||
"react-refresh/only-export-components": {
|
||||
"count": 2
|
||||
@ -3822,14 +3817,6 @@
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/detail/metadata/index.tsx": {
|
||||
"react-hooks-extra/no-direct-set-state-in-use-effect": {
|
||||
"count": 4
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/datasets/documents/detail/new-segment.tsx": {
|
||||
"tailwindcss/enforce-consistent-class-order": {
|
||||
"count": 3
|
||||
@ -5664,10 +5651,12 @@
|
||||
},
|
||||
"app/components/rag-pipeline/components/update-dsl-modal.tsx": {
|
||||
"tailwindcss/enforce-consistent-class-order": {
|
||||
"count": 5
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 1
|
||||
"count": 3
|
||||
}
|
||||
},
|
||||
"app/components/rag-pipeline/components/version-mismatch-modal.tsx": {
|
||||
"tailwindcss/enforce-consistent-class-order": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"app/components/rag-pipeline/hooks/use-DSL.ts": {
|
||||
@ -6811,12 +6800,6 @@
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/_base/node.tsx": {
|
||||
"tailwindcss/enforce-consistent-class-order": {
|
||||
"count": 5
|
||||
},
|
||||
"tailwindcss/no-unnecessary-whitespace": {
|
||||
"count": 1
|
||||
},
|
||||
"ts/no-explicit-any": {
|
||||
"count": 3
|
||||
}
|
||||
@ -7272,9 +7255,6 @@
|
||||
"app/components/workflow/nodes/knowledge-base/components/retrieval-setting/top-k-and-score-threshold.tsx": {
|
||||
"tailwindcss/enforce-consistent-class-order": {
|
||||
"count": 2
|
||||
},
|
||||
"unicorn/prefer-number-properties": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/components/workflow/nodes/knowledge-base/components/retrieval-setting/type.ts": {
|
||||
@ -8590,11 +8570,6 @@
|
||||
"count": 7
|
||||
}
|
||||
},
|
||||
"app/install/installForm.tsx": {
|
||||
"tailwindcss/enforce-consistent-class-order": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"app/reset-password/check-code/page.tsx": {
|
||||
"tailwindcss/enforce-consistent-class-order": {
|
||||
"count": 4
|
||||
|
||||
@ -10,6 +10,7 @@ import 'dayjs/locale/fr'
|
||||
import 'dayjs/locale/hi'
|
||||
import 'dayjs/locale/id'
|
||||
import 'dayjs/locale/it'
|
||||
import 'dayjs/locale/nl'
|
||||
import 'dayjs/locale/ja'
|
||||
import 'dayjs/locale/ko'
|
||||
import 'dayjs/locale/pl'
|
||||
|
||||
@ -46,6 +46,7 @@ export const localeMap: Record<Locale, string> = {
|
||||
'it-IT': 'it',
|
||||
'th-TH': 'th',
|
||||
'id-ID': 'id',
|
||||
'nl-NL': 'nl',
|
||||
'uk-UA': 'uk',
|
||||
'vi-VN': 'vi',
|
||||
'ro-RO': 'ro',
|
||||
|
||||
@ -147,6 +147,13 @@ const data = {
|
||||
example: 'Halo, Dify!',
|
||||
supported: true,
|
||||
},
|
||||
{
|
||||
value: 'nl-NL',
|
||||
name: 'Nederlands (Nederland)',
|
||||
prompt_name: 'Dutch',
|
||||
example: 'Hallo, Dify!',
|
||||
supported: true,
|
||||
},
|
||||
{
|
||||
value: 'ar-TN',
|
||||
name: 'العربية (تونس)',
|
||||
|
||||
70
web/i18n/nl-NL/app-annotation.json
Normal file
70
web/i18n/nl-NL/app-annotation.json
Normal file
@ -0,0 +1,70 @@
|
||||
{
|
||||
"addModal.answerName": "Answer",
|
||||
"addModal.answerPlaceholder": "Type answer here",
|
||||
"addModal.createNext": "Add another annotated response",
|
||||
"addModal.queryName": "Question",
|
||||
"addModal.queryPlaceholder": "Type query here",
|
||||
"addModal.title": "Add Annotation Reply",
|
||||
"batchAction.cancel": "Cancel",
|
||||
"batchAction.delete": "Delete",
|
||||
"batchAction.selected": "Selected",
|
||||
"batchModal.answer": "answer",
|
||||
"batchModal.browse": "browse",
|
||||
"batchModal.cancel": "Cancel",
|
||||
"batchModal.completed": "Import completed",
|
||||
"batchModal.content": "content",
|
||||
"batchModal.contentTitle": "chunk content",
|
||||
"batchModal.csvUploadTitle": "Drag and drop your CSV file here, or ",
|
||||
"batchModal.error": "Import Error",
|
||||
"batchModal.ok": "OK",
|
||||
"batchModal.processing": "In batch processing",
|
||||
"batchModal.question": "question",
|
||||
"batchModal.run": "Run Batch",
|
||||
"batchModal.runError": "Run batch failed",
|
||||
"batchModal.template": "Download the template here",
|
||||
"batchModal.tip": "The CSV file must conform to the following structure:",
|
||||
"batchModal.title": "Bulk Import",
|
||||
"editBy": "Answer edited by {{author}}",
|
||||
"editModal.answerName": "Storyteller Bot",
|
||||
"editModal.answerPlaceholder": "Type your answer here",
|
||||
"editModal.createdAt": "Created At",
|
||||
"editModal.queryName": "User Query",
|
||||
"editModal.queryPlaceholder": "Type your query here",
|
||||
"editModal.removeThisCache": "Remove this Annotation",
|
||||
"editModal.title": "Edit Annotation Reply",
|
||||
"editModal.yourAnswer": "Your Answer",
|
||||
"editModal.yourQuery": "Your Query",
|
||||
"embeddingModelSwitchTip": "Annotation text vectorization model, switching models will be re-embedded, resulting in additional costs.",
|
||||
"errorMessage.answerRequired": "Answer is required",
|
||||
"errorMessage.queryRequired": "Question is required",
|
||||
"hitHistoryTable.match": "Match",
|
||||
"hitHistoryTable.query": "Query",
|
||||
"hitHistoryTable.response": "Response",
|
||||
"hitHistoryTable.score": "Score",
|
||||
"hitHistoryTable.source": "Source",
|
||||
"hitHistoryTable.time": "Time",
|
||||
"initSetup.configConfirmBtn": "Save",
|
||||
"initSetup.configTitle": "Annotation Reply Setup",
|
||||
"initSetup.confirmBtn": "Save & Enable",
|
||||
"initSetup.title": "Annotation Reply Initial Setup",
|
||||
"list.delete.title": "Are you sure Delete?",
|
||||
"name": "Annotation Reply",
|
||||
"noData.description": "You can edit annotations during app debugging or import annotations in bulk here for a high-quality response.",
|
||||
"noData.title": "No annotations",
|
||||
"table.header.actions": "actions",
|
||||
"table.header.addAnnotation": "Add Annotation",
|
||||
"table.header.answer": "answer",
|
||||
"table.header.bulkExport": "Bulk Export",
|
||||
"table.header.bulkImport": "Bulk Import",
|
||||
"table.header.clearAll": "Delete All",
|
||||
"table.header.clearAllConfirm": "Delete all annotations?",
|
||||
"table.header.createdAt": "created at",
|
||||
"table.header.hits": "hits",
|
||||
"table.header.question": "question",
|
||||
"title": "Annotations",
|
||||
"viewModal.annotatedResponse": "Annotation Reply",
|
||||
"viewModal.hit": "Hit",
|
||||
"viewModal.hitHistory": "Hit History",
|
||||
"viewModal.hits": "Hits",
|
||||
"viewModal.noHitHistory": "No hit history"
|
||||
}
|
||||
72
web/i18n/nl-NL/app-api.json
Normal file
72
web/i18n/nl-NL/app-api.json
Normal file
@ -0,0 +1,72 @@
|
||||
{
|
||||
"actionMsg.deleteConfirmTips": "This action cannot be undone.",
|
||||
"actionMsg.deleteConfirmTitle": "Delete this secret key?",
|
||||
"actionMsg.ok": "OK",
|
||||
"apiKey": "API Key",
|
||||
"apiKeyModal.apiSecretKey": "API Secret key",
|
||||
"apiKeyModal.apiSecretKeyTips": "To prevent API abuse, protect your API Key. Avoid using it as plain text in front-end code. :)",
|
||||
"apiKeyModal.createNewSecretKey": "Create new Secret key",
|
||||
"apiKeyModal.created": "CREATED",
|
||||
"apiKeyModal.generateTips": "Keep this key in a secure and accessible place.",
|
||||
"apiKeyModal.lastUsed": "LAST USED",
|
||||
"apiKeyModal.secretKey": "Secret Key",
|
||||
"apiServer": "API Server",
|
||||
"chatMode.blocking": "Blocking type, waiting for execution to complete and returning results. (Requests may be interrupted if the process is long)",
|
||||
"chatMode.chatMsgHistoryApi": "Get the chat history message",
|
||||
"chatMode.chatMsgHistoryApiTip": "The first page returns the latest `limit` bar, which is in reverse order.",
|
||||
"chatMode.chatMsgHistoryConversationIdTip": "Conversation ID",
|
||||
"chatMode.chatMsgHistoryFirstId": "ID of the first chat record on the current page. The default is none.",
|
||||
"chatMode.chatMsgHistoryLimit": "How many chats are returned in one request",
|
||||
"chatMode.conversationIdTip": "(Optional) Conversation ID: leave empty for first-time conversation; pass conversation_id from context to continue dialogue.",
|
||||
"chatMode.conversationRenamingApi": "Conversation renaming",
|
||||
"chatMode.conversationRenamingApiTip": "Rename conversations; the name is displayed in multi-session client interfaces.",
|
||||
"chatMode.conversationRenamingNameTip": "New name",
|
||||
"chatMode.conversationsListApi": "Get conversation list",
|
||||
"chatMode.conversationsListApiTip": "Gets the session list of the current user. By default, the last 20 sessions are returned.",
|
||||
"chatMode.conversationsListFirstIdTip": "The ID of the last record on the current page, default none.",
|
||||
"chatMode.conversationsListLimitTip": "How many chats are returned in one request",
|
||||
"chatMode.createChatApi": "Create chat message",
|
||||
"chatMode.createChatApiTip": "Create a new conversation message or continue an existing dialogue.",
|
||||
"chatMode.info": "For versatile conversational apps using a Q&A format, call the chat-messages API to initiate dialogue. Maintain ongoing conversations by passing the returned conversation_id. Response parameters and templates depend on Dify Prompt Eng. settings.",
|
||||
"chatMode.inputsTips": "(Optional) Provide user input fields as key-value pairs, corresponding to variables in Prompt Eng. Key is the variable name, Value is the parameter value. If the field type is Select, the submitted Value must be one of the preset choices.",
|
||||
"chatMode.messageFeedbackApi": "Message terminal user feedback, like",
|
||||
"chatMode.messageFeedbackApiTip": "Rate received messages on behalf of end-users with likes or dislikes. This data is visible in the Logs & Annotations page and used for future model fine-tuning.",
|
||||
"chatMode.messageIDTip": "Message ID",
|
||||
"chatMode.parametersApi": "Obtain application parameter information",
|
||||
"chatMode.parametersApiTip": "Retrieve configured Input parameters, including variable names, field names, types, and default values. Typically used for displaying these fields in a form or filling in default values after the client loads.",
|
||||
"chatMode.queryTips": "User input/question content",
|
||||
"chatMode.ratingTip": "like or dislike, null is undo",
|
||||
"chatMode.streaming": "streaming returns. Implementation of streaming return based on SSE (Server-Sent Events).",
|
||||
"chatMode.title": "Chat App API",
|
||||
"completionMode.blocking": "Blocking type, waiting for execution to complete and returning results. (Requests may be interrupted if the process is long)",
|
||||
"completionMode.createCompletionApi": "Create Completion Message",
|
||||
"completionMode.createCompletionApiTip": "Create a Completion Message to support the question-and-answer mode.",
|
||||
"completionMode.info": "For high-quality text generation, such as articles, summaries, and translations, use the completion-messages API with user input. Text generation relies on the model parameters and prompt templates set in Dify Prompt Engineering.",
|
||||
"completionMode.inputsTips": "(Optional) Provide user input fields as key-value pairs, corresponding to variables in Prompt Eng. Key is the variable name, Value is the parameter value. If the field type is Select, the submitted Value must be one of the preset choices.",
|
||||
"completionMode.messageFeedbackApi": "Message feedback (like)",
|
||||
"completionMode.messageFeedbackApiTip": "Rate received messages on behalf of end-users with likes or dislikes. This data is visible in the Logs & Annotations page and used for future model fine-tuning.",
|
||||
"completionMode.messageIDTip": "Message ID",
|
||||
"completionMode.parametersApi": "Obtain application parameter information",
|
||||
"completionMode.parametersApiTip": "Retrieve configured Input parameters, including variable names, field names, types, and default values. Typically used for displaying these fields in a form or filling in default values after the client loads.",
|
||||
"completionMode.queryTips": "User input text content.",
|
||||
"completionMode.ratingTip": "like or dislike, null is undo",
|
||||
"completionMode.streaming": "streaming returns. Implementation of streaming return based on SSE (Server-Sent Events).",
|
||||
"completionMode.title": "Completion App API",
|
||||
"copied": "Copied",
|
||||
"copy": "Copy",
|
||||
"develop.noContent": "No content",
|
||||
"develop.pathParams": "Path Params",
|
||||
"develop.query": "Query",
|
||||
"develop.requestBody": "Request Body",
|
||||
"develop.toc": "Contents",
|
||||
"disabled": "Disabled",
|
||||
"loading": "Loading",
|
||||
"merMaid.rerender": "Redo Rerender",
|
||||
"never": "Never",
|
||||
"ok": "In Service",
|
||||
"pause": "Pause",
|
||||
"play": "Play",
|
||||
"playing": "Playing",
|
||||
"regenerate": "Regenerate",
|
||||
"status": "Status"
|
||||
}
|
||||
393
web/i18n/nl-NL/app-debug.json
Normal file
393
web/i18n/nl-NL/app-debug.json
Normal file
@ -0,0 +1,393 @@
|
||||
{
|
||||
"agent.agentMode": "Agent Mode",
|
||||
"agent.agentModeDes": "Set the type of inference mode for the agent",
|
||||
"agent.agentModeType.ReACT": "ReAct",
|
||||
"agent.agentModeType.functionCall": "Function Calling",
|
||||
"agent.buildInPrompt": "Build-In Prompt",
|
||||
"agent.firstPrompt": "First Prompt",
|
||||
"agent.nextIteration": "Next Iteration",
|
||||
"agent.promptPlaceholder": "Write your prompt here",
|
||||
"agent.setting.description": "Agent Assistant settings allow setting agent mode and advanced features like built-in prompts, only available in Agent type.",
|
||||
"agent.setting.maximumIterations.description": "Limit the number of iterations an agent assistant can execute",
|
||||
"agent.setting.maximumIterations.name": "Maximum Iterations",
|
||||
"agent.setting.name": "Agent Settings",
|
||||
"agent.tools.description": "Using tools can extend the capabilities of LLM, such as searching the internet or performing scientific calculations",
|
||||
"agent.tools.enabled": "Enabled",
|
||||
"agent.tools.name": "Tools",
|
||||
"assistantType.agentAssistant.description": "Build an intelligent Agent which can autonomously choose tools to complete the tasks",
|
||||
"assistantType.agentAssistant.name": "Agent Assistant",
|
||||
"assistantType.chatAssistant.description": "Build a chat-based assistant using a Large Language Model",
|
||||
"assistantType.chatAssistant.name": "Basic Assistant",
|
||||
"assistantType.name": "Assistant Type",
|
||||
"autoAddVar": "Undefined variables referenced in pre-prompt, are you want to add them in user input form?",
|
||||
"chatSubTitle": "Instructions",
|
||||
"code.instruction": "Instruction",
|
||||
"codegen.apply": "Apply",
|
||||
"codegen.applyChanges": "Apply Changes",
|
||||
"codegen.description": "The Code Generator uses configured models to generate high-quality code based on your instructions. Please provide clear and detailed instructions.",
|
||||
"codegen.generate": "Generate",
|
||||
"codegen.generatedCodeTitle": "Generated Code",
|
||||
"codegen.instruction": "Instructions",
|
||||
"codegen.instructionPlaceholder": "Enter detailed description of the code you want to generate.",
|
||||
"codegen.loading": "Generating code...",
|
||||
"codegen.noDataLine1": "Describe your use case on the left,",
|
||||
"codegen.noDataLine2": "the code preview will show here.",
|
||||
"codegen.overwriteConfirmMessage": "This action will overwrite the existing code. Do you want to continue?",
|
||||
"codegen.overwriteConfirmTitle": "Overwrite existing code?",
|
||||
"codegen.resTitle": "Generated Code",
|
||||
"codegen.title": "Code Generator",
|
||||
"completionSubTitle": "Prefix Prompt",
|
||||
"datasetConfig.embeddingModelRequired": "A configured Embedding Model is required",
|
||||
"datasetConfig.knowledgeTip": "Click the “+” button to add knowledge",
|
||||
"datasetConfig.params": "Params",
|
||||
"datasetConfig.rerankModelRequired": "A configured Rerank Model is required",
|
||||
"datasetConfig.retrieveChangeTip": "Modifying the index mode and retrieval mode may affect applications associated with this Knowledge.",
|
||||
"datasetConfig.retrieveMultiWay.description": "Based on user intent, queries across all Knowledge, retrieves relevant text from multi-sources, and selects the best results matching the user query after reranking.",
|
||||
"datasetConfig.retrieveMultiWay.title": "Multi-path retrieval",
|
||||
"datasetConfig.retrieveOneWay.description": "Based on user intent and Knowledge descriptions, the Agent autonomously selects the best Knowledge for querying. Best for applications with distinct, limited Knowledge.",
|
||||
"datasetConfig.retrieveOneWay.title": "N-to-1 retrieval",
|
||||
"datasetConfig.score_threshold": "Score Threshold",
|
||||
"datasetConfig.score_thresholdTip": "Used to set the similarity threshold for chunks filtering.",
|
||||
"datasetConfig.settingTitle": "Retrieval settings",
|
||||
"datasetConfig.top_k": "Top K",
|
||||
"datasetConfig.top_kTip": "Used to filter chunks that are most similar to user questions. The system will also dynamically adjust the value of Top K, according to max_tokens of the selected model.",
|
||||
"debugAsMultipleModel": "Debug as Multiple Models",
|
||||
"debugAsSingleModel": "Debug as Single Model",
|
||||
"duplicateModel": "Duplicate",
|
||||
"errorMessage.nameOfKeyRequired": "name of the key: {{key}} required",
|
||||
"errorMessage.notSelectModel": "Please choose a model",
|
||||
"errorMessage.queryRequired": "Request text is required.",
|
||||
"errorMessage.valueOfVarRequired": "{{key}} value can not be empty",
|
||||
"errorMessage.waitForBatchResponse": "Please wait for the response to the batch task to complete.",
|
||||
"errorMessage.waitForFileUpload": "Please wait for the file/files to upload",
|
||||
"errorMessage.waitForImgUpload": "Please wait for the image to upload",
|
||||
"errorMessage.waitForResponse": "Please wait for the response to the previous message to complete.",
|
||||
"feature.annotation.add": "Add annotation",
|
||||
"feature.annotation.cacheManagement": "Annotations",
|
||||
"feature.annotation.cached": "Annotated",
|
||||
"feature.annotation.description": "You can manually add high-quality response to the cache for prioritized matching with similar user questions.",
|
||||
"feature.annotation.edit": "Edit annotation",
|
||||
"feature.annotation.matchVariable.choosePlaceholder": "Choose match variable",
|
||||
"feature.annotation.matchVariable.title": "Match Variable",
|
||||
"feature.annotation.remove": "Remove",
|
||||
"feature.annotation.removeConfirm": "Delete this annotation ?",
|
||||
"feature.annotation.resDes": "Annotation Response is enabled",
|
||||
"feature.annotation.scoreThreshold.accurateMatch": "Accurate Match",
|
||||
"feature.annotation.scoreThreshold.description": "Used to set the similarity threshold for annotation reply.",
|
||||
"feature.annotation.scoreThreshold.easyMatch": "Easy Match",
|
||||
"feature.annotation.scoreThreshold.title": "Score Threshold",
|
||||
"feature.annotation.title": "Annotation Reply",
|
||||
"feature.audioUpload.description": "Enable Audio will allow the model to process audio files for transcription and analysis.",
|
||||
"feature.audioUpload.title": "Audio",
|
||||
"feature.bar.empty": "Enable feature to enhance web app user experience",
|
||||
"feature.bar.enableText": "Features Enabled",
|
||||
"feature.bar.manage": "Manage",
|
||||
"feature.citation.description": "Show source document and attributed section of the generated content.",
|
||||
"feature.citation.resDes": "Citations and Attributions is enabled",
|
||||
"feature.citation.title": "Citations and Attributions",
|
||||
"feature.conversationHistory.description": "Set prefix names for conversation roles",
|
||||
"feature.conversationHistory.editModal.assistantPrefix": "Assistant prefix",
|
||||
"feature.conversationHistory.editModal.title": "Edit Conversation Role Names",
|
||||
"feature.conversationHistory.editModal.userPrefix": "User prefix",
|
||||
"feature.conversationHistory.learnMore": "Learn more",
|
||||
"feature.conversationHistory.tip": "The Conversation History is not enabled, please add <histories> in the prompt above.",
|
||||
"feature.conversationHistory.title": "Conversation History",
|
||||
"feature.conversationOpener.description": "In a chat app, the first sentence that the AI actively speaks to the user is usually used as a welcome.",
|
||||
"feature.conversationOpener.title": "Conversation Opener",
|
||||
"feature.dataSet.noData": "You can import Knowledge as context",
|
||||
"feature.dataSet.noDataSet": "No Knowledge found",
|
||||
"feature.dataSet.notSupportSelectMulti": "Currently only support one Knowledge",
|
||||
"feature.dataSet.queryVariable.choosePlaceholder": "Choose query variable",
|
||||
"feature.dataSet.queryVariable.contextVarNotEmpty": "context query variable can not be empty",
|
||||
"feature.dataSet.queryVariable.deleteContextVarTip": "This variable has been set as a context query variable, and removing it will impact the normal use of the Knowledge. If you still need to delete it, please reselect it in the context section.",
|
||||
"feature.dataSet.queryVariable.deleteContextVarTitle": "Delete variable “{{varName}}”?",
|
||||
"feature.dataSet.queryVariable.noVar": "No variables",
|
||||
"feature.dataSet.queryVariable.noVarTip": "please create a variable under the Variables section",
|
||||
"feature.dataSet.queryVariable.ok": "OK",
|
||||
"feature.dataSet.queryVariable.tip": "This variable will be used as the query input for context retrieval, obtaining context information related to the input of this variable.",
|
||||
"feature.dataSet.queryVariable.title": "Query variable",
|
||||
"feature.dataSet.queryVariable.unableToQueryDataSet": "Unable to query the Knowledge",
|
||||
"feature.dataSet.queryVariable.unableToQueryDataSetTip": "Unable to query the Knowledge successfully, please choose a context query variable in the context section.",
|
||||
"feature.dataSet.selectTitle": "Select reference Knowledge",
|
||||
"feature.dataSet.selected": "Knowledge selected",
|
||||
"feature.dataSet.title": "Knowledge",
|
||||
"feature.dataSet.toCreate": "Go to create",
|
||||
"feature.documentUpload.description": "Enable Document will allows the model to take in documents and answer questions about them.",
|
||||
"feature.documentUpload.title": "Document",
|
||||
"feature.fileUpload.description": "The chat input box allows uploading of images, documents, and other files.",
|
||||
"feature.fileUpload.modalTitle": "File Upload Setting",
|
||||
"feature.fileUpload.numberLimit": "Max uploads",
|
||||
"feature.fileUpload.supportedTypes": "Support File Types",
|
||||
"feature.fileUpload.title": "File Upload",
|
||||
"feature.groupChat.description": "Add pre-conversation settings for apps can enhance user experience.",
|
||||
"feature.groupChat.title": "Chat enhance",
|
||||
"feature.groupExperience.title": "Experience enhance",
|
||||
"feature.imageUpload.description": "Allow uploading images.",
|
||||
"feature.imageUpload.modalTitle": "Image Upload Setting",
|
||||
"feature.imageUpload.numberLimit": "Max uploads",
|
||||
"feature.imageUpload.supportedTypes": "Support File Types",
|
||||
"feature.imageUpload.title": "Image Upload",
|
||||
"feature.moderation.allEnabled": "INPUT & OUTPUT",
|
||||
"feature.moderation.contentEnableLabel": "Content moderation enabled",
|
||||
"feature.moderation.description": "Secure model output by using moderation API or maintaining a sensitive word list.",
|
||||
"feature.moderation.inputEnabled": "INPUT",
|
||||
"feature.moderation.modal.content.condition": "Moderate INPUT and OUTPUT Content enabled at least one",
|
||||
"feature.moderation.modal.content.errorMessage": "Preset replies cannot be empty",
|
||||
"feature.moderation.modal.content.fromApi": "Preset replies are returned by API",
|
||||
"feature.moderation.modal.content.input": "Moderate INPUT Content",
|
||||
"feature.moderation.modal.content.output": "Moderate OUTPUT Content",
|
||||
"feature.moderation.modal.content.placeholder": "Preset replies content here",
|
||||
"feature.moderation.modal.content.preset": "Preset replies",
|
||||
"feature.moderation.modal.content.supportMarkdown": "Markdown supported",
|
||||
"feature.moderation.modal.keywords.line": "Line",
|
||||
"feature.moderation.modal.keywords.placeholder": "One per line, separated by line breaks",
|
||||
"feature.moderation.modal.keywords.tip": "One per line, separated by line breaks. Up to 100 characters per line.",
|
||||
"feature.moderation.modal.openaiNotConfig.after": "",
|
||||
"feature.moderation.modal.openaiNotConfig.before": "OpenAI Moderation requires an OpenAI API key configured in the",
|
||||
"feature.moderation.modal.provider.keywords": "Keywords",
|
||||
"feature.moderation.modal.provider.openai": "OpenAI Moderation",
|
||||
"feature.moderation.modal.provider.openaiTip.prefix": "OpenAI Moderation requires an OpenAI API key configured in the ",
|
||||
"feature.moderation.modal.provider.openaiTip.suffix": ".",
|
||||
"feature.moderation.modal.provider.title": "Provider",
|
||||
"feature.moderation.modal.title": "Content moderation settings",
|
||||
"feature.moderation.outputEnabled": "OUTPUT",
|
||||
"feature.moderation.title": "Content moderation",
|
||||
"feature.moreLikeThis.description": "Generate multiple texts at once, and then edit and continue to generate",
|
||||
"feature.moreLikeThis.generateNumTip": "Number of each generated times",
|
||||
"feature.moreLikeThis.tip": "Using this feature will incur additional tokens overhead",
|
||||
"feature.moreLikeThis.title": "More like this",
|
||||
"feature.speechToText.description": "Voice input can be used in chat.",
|
||||
"feature.speechToText.resDes": "Voice input is enabled",
|
||||
"feature.speechToText.title": "Speech to Text",
|
||||
"feature.suggestedQuestionsAfterAnswer.description": "Setting up next questions suggestion can give users a better chat.",
|
||||
"feature.suggestedQuestionsAfterAnswer.resDes": "3 suggestions for user next question.",
|
||||
"feature.suggestedQuestionsAfterAnswer.title": "Follow-up",
|
||||
"feature.suggestedQuestionsAfterAnswer.tryToAsk": "Try to ask",
|
||||
"feature.textToSpeech.description": "Conversation messages can be converted to speech.",
|
||||
"feature.textToSpeech.resDes": "Text to Audio is enabled",
|
||||
"feature.textToSpeech.title": "Text to Speech",
|
||||
"feature.toolbox.title": "TOOLBOX",
|
||||
"feature.tools.modal.name.placeholder": "Please enter the name",
|
||||
"feature.tools.modal.name.title": "Name",
|
||||
"feature.tools.modal.title": "Tool",
|
||||
"feature.tools.modal.toolType.placeholder": "Please select the tool type",
|
||||
"feature.tools.modal.toolType.title": "Tool Type",
|
||||
"feature.tools.modal.variableName.placeholder": "Please enter the variable name",
|
||||
"feature.tools.modal.variableName.title": "Variable Name",
|
||||
"feature.tools.tips": "Tools provide a standard API call method, taking user input or variables as request parameters for querying external data as context.",
|
||||
"feature.tools.title": "Tools",
|
||||
"feature.tools.toolsInUse": "{{count}} tools in use",
|
||||
"formattingChangedText": "Modifying the formatting will reset the debug area, are you sure?",
|
||||
"formattingChangedTitle": "Formatting changed",
|
||||
"generate.apply": "Apply",
|
||||
"generate.codeGenInstructionPlaceHolderLine": "The more detailed the feedback, such as the data types of input and output as well as how variables are processed, the more accurate the code generation will be.",
|
||||
"generate.description": "The Prompt Generator uses the configured model to optimize prompts for higher quality and better structure. Please write clear and detailed instructions.",
|
||||
"generate.dismiss": "Dismiss",
|
||||
"generate.generate": "Generate",
|
||||
"generate.idealOutput": "Ideal Output",
|
||||
"generate.idealOutputPlaceholder": "Describe your ideal response format, length, tone, and content requirements...",
|
||||
"generate.insertContext": "insert context",
|
||||
"generate.instruction": "Instructions",
|
||||
"generate.instructionPlaceHolderLine1": "Make the output more concise, retaining the core points.",
|
||||
"generate.instructionPlaceHolderLine2": "The output format is incorrect, please strictly follow the JSON format.",
|
||||
"generate.instructionPlaceHolderLine3": "The tone is too harsh, please make it more friendly.",
|
||||
"generate.instructionPlaceHolderTitle": "Describe how you would like to improve this Prompt. For example:",
|
||||
"generate.latest": "Latest",
|
||||
"generate.loading": "Orchestrating the application for you...",
|
||||
"generate.newNoDataLine1": "Write a instruction in the left column, and click Generate to see response. ",
|
||||
"generate.optimizationNote": "Optimization Note",
|
||||
"generate.optimizePromptTooltip": "Optimize in Prompt Generator",
|
||||
"generate.optional": "Optional",
|
||||
"generate.overwriteMessage": "Applying this prompt will override existing configuration.",
|
||||
"generate.overwriteTitle": "Override existing configuration?",
|
||||
"generate.press": "Press",
|
||||
"generate.resTitle": "Generated Prompt",
|
||||
"generate.template.GitGud.instruction": "Generate appropriate Git commands based on user described version control actions",
|
||||
"generate.template.GitGud.name": "Git gud",
|
||||
"generate.template.SQLSorcerer.instruction": "Transform everyday language into SQL queries",
|
||||
"generate.template.SQLSorcerer.name": "SQL sorcerer",
|
||||
"generate.template.excelFormulaExpert.instruction": "A chatbot that can help novice users understand, use and create Excel formulas based on user instructions",
|
||||
"generate.template.excelFormulaExpert.name": "Excel formula expert",
|
||||
"generate.template.meetingTakeaways.instruction": "Distill meetings into concise summaries including discussion topics, key takeaways, and action items",
|
||||
"generate.template.meetingTakeaways.name": "Meeting takeaways",
|
||||
"generate.template.professionalAnalyst.instruction": "Extract insights, identify risk and distill key information from long reports into single memo",
|
||||
"generate.template.professionalAnalyst.name": "Professional analyst",
|
||||
"generate.template.pythonDebugger.instruction": "A bot that can generate and debug your code based on your instruction",
|
||||
"generate.template.pythonDebugger.name": "Python debugger",
|
||||
"generate.template.translation.instruction": "A translator that can translate multiple languages",
|
||||
"generate.template.translation.name": "Translation",
|
||||
"generate.template.travelPlanning.instruction": "The Travel Planning Assistant is an intelligent tool designed to help users effortlessly plan their trips",
|
||||
"generate.template.travelPlanning.name": "Travel planning",
|
||||
"generate.template.writingsPolisher.instruction": "Use advanced copyediting techniques to improve your writings",
|
||||
"generate.template.writingsPolisher.name": "Writing polisher",
|
||||
"generate.title": "Prompt Generator",
|
||||
"generate.to": "to ",
|
||||
"generate.tryIt": "Try it",
|
||||
"generate.version": "Version",
|
||||
"generate.versions": "Versions",
|
||||
"inputs.chatVarTip": "Fill in the value of the variable, which will be automatically replaced in the prompt word every time a new session is started",
|
||||
"inputs.completionVarTip": "Fill in the value of the variable, which will be automatically replaced in the prompt words every time a question is submitted.",
|
||||
"inputs.noPrompt": "Try write some prompt in pre-prompt input",
|
||||
"inputs.noVar": "Fill in the value of the variable, which will be automatically replaced in the prompt word every time a new session is started.",
|
||||
"inputs.previewTitle": "Prompt preview",
|
||||
"inputs.queryPlaceholder": "Please enter the request text.",
|
||||
"inputs.queryTitle": "Query content",
|
||||
"inputs.run": "RUN",
|
||||
"inputs.title": "Debug & Preview",
|
||||
"inputs.userInputField": "User Input Field",
|
||||
"modelConfig.modeType.chat": "Chat",
|
||||
"modelConfig.modeType.completion": "Complete",
|
||||
"modelConfig.model": "Model",
|
||||
"modelConfig.setTone": "Set tone of responses",
|
||||
"modelConfig.title": "Model and Parameters",
|
||||
"noResult": "Output will be displayed here.",
|
||||
"notSetAPIKey.description": "The LLM provider key has not been set, and it needs to be set before debugging.",
|
||||
"notSetAPIKey.settingBtn": "Go to settings",
|
||||
"notSetAPIKey.title": "LLM provider key has not been set",
|
||||
"notSetAPIKey.trailFinished": "Trail finished",
|
||||
"notSetVar": "Variables allow users to introduce prompt words or opening remarks when filling out forms. You can try entering \"{{input}}\" in the prompt words.",
|
||||
"openingStatement.add": "Add",
|
||||
"openingStatement.noDataPlaceHolder": "Starting the conversation with the user can help AI establish a closer connection with them in conversational applications.",
|
||||
"openingStatement.notIncludeKey": "The initial prompt does not include the variable: {{key}}. Please add it to the initial prompt.",
|
||||
"openingStatement.openingQuestion": "Opening Questions",
|
||||
"openingStatement.openingQuestionPlaceholder": "You can use variables, try typing {{variable}}.",
|
||||
"openingStatement.placeholder": "Write your opener message here, you can use variables, try type {{variable}}.",
|
||||
"openingStatement.title": "Conversation Opener",
|
||||
"openingStatement.tooShort": "At least 20 words of initial prompt are required to generate an opening remarks for the conversation.",
|
||||
"openingStatement.varTip": "You can use variables, try type {{variable}}",
|
||||
"openingStatement.writeOpener": "Edit opener",
|
||||
"operation.addFeature": "Add Feature",
|
||||
"operation.agree": "like",
|
||||
"operation.applyConfig": "Publish",
|
||||
"operation.automatic": "Generate",
|
||||
"operation.cancelAgree": "Cancel like",
|
||||
"operation.cancelDisagree": "Cancel dislike",
|
||||
"operation.debugConfig": "Debug",
|
||||
"operation.disagree": "dislike",
|
||||
"operation.resetConfig": "Reset",
|
||||
"operation.stopResponding": "Stop responding",
|
||||
"operation.userAction": "User ",
|
||||
"orchestrate": "Orchestrate",
|
||||
"otherError.historyNoBeEmpty": "Conversation history must be set in the prompt",
|
||||
"otherError.promptNoBeEmpty": "Prompt can not be empty",
|
||||
"otherError.queryNoBeEmpty": "Query must be set in the prompt",
|
||||
"pageTitle.line1": "PROMPT",
|
||||
"pageTitle.line2": "Engineering",
|
||||
"promptMode.advanced": "Expert Mode",
|
||||
"promptMode.advancedWarning.description": "In Expert Mode, you can edit whole PROMPT.",
|
||||
"promptMode.advancedWarning.learnMore": "Learn more",
|
||||
"promptMode.advancedWarning.ok": "OK",
|
||||
"promptMode.advancedWarning.title": "You have switched to Expert Mode, and once you modify the PROMPT, you CANNOT return to the basic mode.",
|
||||
"promptMode.contextMissing": "Context component missed, the effectiveness of the prompt may not be good.",
|
||||
"promptMode.operation.addMessage": "Add Message",
|
||||
"promptMode.simple": "Switch to Expert Mode to edit the whole PROMPT",
|
||||
"promptMode.switchBack": "Switch back",
|
||||
"promptTip": "Prompts guide AI responses with instructions and constraints. Insert variables like {{input}}. This prompt won't be visible to users.",
|
||||
"publishAs": "Publish as",
|
||||
"resetConfig.message": "Reset discards changes, restoring the last published configuration.",
|
||||
"resetConfig.title": "Confirm reset?",
|
||||
"result": "Output Text",
|
||||
"trailUseGPT4Info.description": "Use gpt-4, please set API Key.",
|
||||
"trailUseGPT4Info.title": "Does not support gpt-4 now",
|
||||
"varKeyError.canNoBeEmpty": "{{key}} is required",
|
||||
"varKeyError.keyAlreadyExists": "{{key}} already exists",
|
||||
"varKeyError.notStartWithNumber": "{{key}} can not start with a number",
|
||||
"varKeyError.notValid": "{{key}} is invalid. Can only contain letters, numbers, and underscores",
|
||||
"varKeyError.tooLong": "{{key}} is too length. Can not be longer then 30 characters",
|
||||
"variableConfig.addModalTitle": "Add Input Field",
|
||||
"variableConfig.addOption": "Add option",
|
||||
"variableConfig.apiBasedVar": "API-based Variable",
|
||||
"variableConfig.both": "Both",
|
||||
"variableConfig.checkbox": "Checkbox",
|
||||
"variableConfig.content": "Content",
|
||||
"variableConfig.defaultValue": "Default Value",
|
||||
"variableConfig.defaultValuePlaceholder": "Enter default value to pre-populate the field",
|
||||
"variableConfig.description": "Setting for variable {{varName}}",
|
||||
"variableConfig.displayName": "Display Name",
|
||||
"variableConfig.editModalTitle": "Edit Input Field",
|
||||
"variableConfig.errorMsg.atLeastOneOption": "At least one option is required",
|
||||
"variableConfig.errorMsg.jsonSchemaInvalid": "JSON Schema is not valid JSON",
|
||||
"variableConfig.errorMsg.jsonSchemaMustBeObject": "JSON Schema must have type \"object\"",
|
||||
"variableConfig.errorMsg.labelNameRequired": "Label name is required",
|
||||
"variableConfig.errorMsg.optionRepeat": "Has repeat options",
|
||||
"variableConfig.errorMsg.varNameCanBeRepeat": "Variable name can not be repeated",
|
||||
"variableConfig.fieldType": "Field Type",
|
||||
"variableConfig.file.audio.name": "Audio",
|
||||
"variableConfig.file.custom.createPlaceholder": "+ File extension, e.g .doc",
|
||||
"variableConfig.file.custom.description": "Specify other file types.",
|
||||
"variableConfig.file.custom.name": "Other file types",
|
||||
"variableConfig.file.document.name": "Document",
|
||||
"variableConfig.file.image.name": "Image",
|
||||
"variableConfig.file.supportFileTypes": "Support File Types",
|
||||
"variableConfig.file.video.name": "Video",
|
||||
"variableConfig.hide": "Hide",
|
||||
"variableConfig.inputPlaceholder": "Please input",
|
||||
"variableConfig.json": "JSON Code",
|
||||
"variableConfig.jsonSchema": "JSON Schema",
|
||||
"variableConfig.labelName": "Label Name",
|
||||
"variableConfig.localUpload": "Local Upload",
|
||||
"variableConfig.maxLength": "Max Length",
|
||||
"variableConfig.maxNumberOfUploads": "Max number of uploads",
|
||||
"variableConfig.maxNumberTip": "Document < {{docLimit}}, image < {{imgLimit}}, audio < {{audioLimit}}, video < {{videoLimit}}",
|
||||
"variableConfig.multi-files": "File List",
|
||||
"variableConfig.noDefaultSelected": "Don't select",
|
||||
"variableConfig.noDefaultValue": "No default value",
|
||||
"variableConfig.notSet": "Not set, try typing {{input}} in the prefix prompt",
|
||||
"variableConfig.number": "Number",
|
||||
"variableConfig.optional": "optional",
|
||||
"variableConfig.options": "Options",
|
||||
"variableConfig.paragraph": "Paragraph",
|
||||
"variableConfig.placeholder": "Placeholder",
|
||||
"variableConfig.placeholderPlaceholder": "Enter text to display when the field is empty",
|
||||
"variableConfig.required": "Required",
|
||||
"variableConfig.select": "Select",
|
||||
"variableConfig.selectDefaultValue": "Select default value",
|
||||
"variableConfig.showAllSettings": "Show All Settings",
|
||||
"variableConfig.single-file": "Single File",
|
||||
"variableConfig.startChecked": "Start checked",
|
||||
"variableConfig.startSelectedOption": "Start selected option",
|
||||
"variableConfig.string": "Short Text",
|
||||
"variableConfig.stringTitle": "Form text box options",
|
||||
"variableConfig.text-input": "Short Text",
|
||||
"variableConfig.tooltips": "Tooltips",
|
||||
"variableConfig.tooltipsPlaceholder": "Enter helpful text shown when hovering over the label",
|
||||
"variableConfig.unit": "Unit",
|
||||
"variableConfig.unitPlaceholder": "Display units after numbers, e.g. tokens",
|
||||
"variableConfig.uploadFileTypes": "Upload File Types",
|
||||
"variableConfig.uploadMethod": "Upload Method",
|
||||
"variableConfig.varName": "Variable Name",
|
||||
"variableTable.action": "Actions",
|
||||
"variableTable.key": "Variable Key",
|
||||
"variableTable.name": "User Input Field Name",
|
||||
"variableTable.type": "Input Type",
|
||||
"variableTable.typeSelect": "Select",
|
||||
"variableTable.typeString": "String",
|
||||
"variableTip": "Users fill variables in a form, automatically replacing variables in the prompt.",
|
||||
"variableTitle": "Variables",
|
||||
"vision.description": "Enable Vision will allows the model to take in images and answer questions about them.",
|
||||
"vision.name": "Vision",
|
||||
"vision.onlySupportVisionModelTip": "Only supports vision models",
|
||||
"vision.settings": "Settings",
|
||||
"vision.visionSettings.both": "Both",
|
||||
"vision.visionSettings.high": "High",
|
||||
"vision.visionSettings.localUpload": "Local Upload",
|
||||
"vision.visionSettings.low": "Low",
|
||||
"vision.visionSettings.resolution": "Resolution",
|
||||
"vision.visionSettings.resolutionTooltip": "low res will allow model receive a low-res 512 x 512 version of the image, and represent the image with a budget of 65 tokens. This allows the API to return faster responses and consume fewer input tokens for use cases that do not require high detail.\nhigh res will first allows the model to see the low res image and then creates detailed crops of input images as 512px squares based on the input image size. Each of the detailed crops uses twice the token budget for a total of 129 tokens.",
|
||||
"vision.visionSettings.title": "Vision Settings",
|
||||
"vision.visionSettings.uploadLimit": "Upload Limit",
|
||||
"vision.visionSettings.uploadMethod": "Upload Method",
|
||||
"vision.visionSettings.url": "URL",
|
||||
"voice.defaultDisplay": "Default Voice",
|
||||
"voice.description": "Text to speech voice Settings",
|
||||
"voice.name": "Voice",
|
||||
"voice.settings": "Settings",
|
||||
"voice.voiceSettings.autoPlay": "Auto Play",
|
||||
"voice.voiceSettings.autoPlayDisabled": "Off",
|
||||
"voice.voiceSettings.autoPlayEnabled": "On",
|
||||
"voice.voiceSettings.language": "Language",
|
||||
"voice.voiceSettings.resolutionTooltip": "Text-to-speech voice support language。",
|
||||
"voice.voiceSettings.title": "Voice Settings",
|
||||
"voice.voiceSettings.voice": "Voice",
|
||||
"warningMessage.timeoutExceeded": "Results are not displayed due to timeout. Please refer to the logs to gather complete results."
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user