mirror of
https://github.com/langgenius/dify.git
synced 2026-02-12 22:35:46 +08:00
Compare commits
41 Commits
feat/notif
...
test/add-t
| Author | SHA1 | Date | |
|---|---|---|---|
| f3768516b7 | |||
| 62b2523326 | |||
| 046953a630 | |||
| 3fd1eea4d7 | |||
| b65678bd4c | |||
| bfdc39510b | |||
| 80e6312807 | |||
| d6b025e91e | |||
| 10f85074e8 | |||
| f953331f91 | |||
| 32350f7a04 | |||
| c730fec1e4 | |||
| b4fec9b7aa | |||
| 7e0bccbbf0 | |||
| 2f87ecc0ce | |||
| 5b4c7b2a40 | |||
| 378a1d7d08 | |||
| ce0192620d | |||
| e9feeedc01 | |||
| e32490f54e | |||
| e9db50f781 | |||
| 0310f631ee | |||
| abc5a61e98 | |||
| 5f1698add6 | |||
| 36e50f277f | |||
| 704ee40caa | |||
| 3119c99979 | |||
| 16b8733886 | |||
| 83f64104fd | |||
| 5077879886 | |||
| 697b57631a | |||
| 6015f23e79 | |||
| f355c8d595 | |||
| 0142001fc2 | |||
| 4058e9ae23 | |||
| 95310561ec | |||
| de33561a52 | |||
| 6d9665578b | |||
| 18f14c04dc | |||
| 14251b249d | |||
| 1819bd72ef |
@ -553,6 +553,8 @@ WORKFLOW_LOG_CLEANUP_ENABLED=false
|
||||
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
# Batch size for workflow log cleanup operations (default: 100)
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
# Comma-separated list of workflow IDs to clean logs for
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS=
|
||||
|
||||
# App configuration
|
||||
APP_MAX_EXECUTION_TIME=1200
|
||||
@ -715,6 +717,7 @@ ANNOTATION_IMPORT_MAX_CONCURRENT=5
|
||||
# Sandbox expired records clean configuration
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_TASK_LOCK_TTL=90000
|
||||
|
||||
|
||||
2
api/.vscode/launch.json.example
vendored
2
api/.vscode/launch.json.example
vendored
@ -54,7 +54,7 @@
|
||||
"--loglevel",
|
||||
"DEBUG",
|
||||
"-Q",
|
||||
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
|
||||
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,workflow_based_app_execution,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
@ -1314,6 +1314,9 @@ class WorkflowLogConfig(BaseSettings):
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: int = Field(
|
||||
default=100, description="Batch size for workflow run log cleanup operations"
|
||||
)
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS: str = Field(
|
||||
default="", description="Comma-separated list of workflow IDs to clean logs for"
|
||||
)
|
||||
|
||||
|
||||
class SwaggerUIConfig(BaseSettings):
|
||||
@ -1344,6 +1347,10 @@ class SandboxExpiredRecordsCleanConfig(BaseSettings):
|
||||
description="Maximum number of records to process in each batch",
|
||||
default=1000,
|
||||
)
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: PositiveInt = Field(
|
||||
description="Maximum interval in milliseconds between batches",
|
||||
default=200,
|
||||
)
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: PositiveInt = Field(
|
||||
description="Retention days for sandbox expired workflow_run records and message records",
|
||||
default=30,
|
||||
|
||||
@ -259,11 +259,20 @@ class CeleryConfig(DatabaseConfig):
|
||||
description="Password of the Redis Sentinel master.",
|
||||
default=None,
|
||||
)
|
||||
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: PositiveFloat | None = Field(
|
||||
description="Timeout for Redis Sentinel socket operations in seconds.",
|
||||
default=0.1,
|
||||
)
|
||||
|
||||
CELERY_TASK_ANNOTATIONS: dict[str, Any] | None = Field(
|
||||
description=(
|
||||
"Annotations for Celery tasks as a JSON mapping of task name -> options "
|
||||
"(for example, rate limits or other task-specific settings)."
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
|
||||
@computed_field
|
||||
def CELERY_RESULT_BACKEND(self) -> str | None:
|
||||
if self.CELERY_BACKEND in ("database", "rabbitmq"):
|
||||
|
||||
@ -21,6 +21,7 @@ language_timezone_mapping = {
|
||||
"th-TH": "Asia/Bangkok",
|
||||
"id-ID": "Asia/Jakarta",
|
||||
"ar-TN": "Africa/Tunis",
|
||||
"nl-NL": "Europe/Amsterdam",
|
||||
}
|
||||
|
||||
languages = list(language_timezone_mapping.keys())
|
||||
|
||||
@ -39,7 +39,6 @@ from . import (
|
||||
feature,
|
||||
human_input_form,
|
||||
init_validate,
|
||||
notification,
|
||||
ping,
|
||||
setup,
|
||||
spec,
|
||||
@ -185,7 +184,6 @@ __all__ = [
|
||||
"model_config",
|
||||
"model_providers",
|
||||
"models",
|
||||
"notification",
|
||||
"oauth",
|
||||
"oauth_server",
|
||||
"ops_trace",
|
||||
|
||||
@ -1,5 +1,3 @@
|
||||
import csv
|
||||
import io
|
||||
from collections.abc import Callable
|
||||
from functools import wraps
|
||||
from typing import ParamSpec, TypeVar
|
||||
@ -8,7 +6,7 @@ from flask import request
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
from sqlalchemy import select
|
||||
from werkzeug.exceptions import BadRequest, NotFound, Unauthorized
|
||||
from werkzeug.exceptions import NotFound, Unauthorized
|
||||
|
||||
from configs import dify_config
|
||||
from constants.languages import supported_language
|
||||
@ -18,7 +16,6 @@ from core.db.session_factory import session_factory
|
||||
from extensions.ext_database import db
|
||||
from libs.token import extract_access_token
|
||||
from models.model import App, ExporleBanner, InstalledApp, RecommendedApp, TrialApp
|
||||
from services.billing_service import BillingService
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
@ -280,115 +277,3 @@ class DeleteExploreBannerApi(Resource):
|
||||
db.session.commit()
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
class SaveNotificationContentPayload(BaseModel):
|
||||
content: str = Field(...)
|
||||
|
||||
|
||||
class SaveNotificationUserPayload(BaseModel):
|
||||
user_email: list[str] = Field(...)
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
SaveNotificationContentPayload.__name__,
|
||||
SaveNotificationContentPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
|
||||
console_ns.schema_model(
|
||||
SaveNotificationUserPayload.__name__,
|
||||
SaveNotificationUserPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/admin/save_notification_content")
|
||||
class SaveNotificationContentApi(Resource):
|
||||
@console_ns.doc("save_notification_content")
|
||||
@console_ns.doc(description="Save a notification content")
|
||||
@console_ns.expect(console_ns.models[SaveNotificationContentPayload.__name__])
|
||||
@console_ns.response(200, "Notification content saved successfully")
|
||||
@only_edition_cloud
|
||||
@admin_required
|
||||
def post(self):
|
||||
payload = SaveNotificationContentPayload.model_validate(console_ns.payload)
|
||||
BillingService.save_notification_content(payload.content)
|
||||
return {"result": "success"}, 200
|
||||
|
||||
|
||||
@console_ns.route("/admin/save_notification_user")
|
||||
class SaveNotificationUserApi(Resource):
|
||||
@console_ns.doc("save_notification_user")
|
||||
@console_ns.doc(
|
||||
description="Save notification users via JSON body or file upload. "
|
||||
'JSON: {"user_email": ["a@example.com", ...]}. '
|
||||
"File: multipart/form-data with a 'file' field (CSV or TXT, one email per line)."
|
||||
)
|
||||
@console_ns.response(200, "Notification users saved successfully")
|
||||
@only_edition_cloud
|
||||
@admin_required
|
||||
def post(self):
|
||||
# Determine input mode: file upload or JSON body
|
||||
if "file" in request.files:
|
||||
emails = self._parse_emails_from_file()
|
||||
else:
|
||||
payload = SaveNotificationUserPayload.model_validate(console_ns.payload)
|
||||
emails = payload.user_email
|
||||
|
||||
if not emails:
|
||||
raise BadRequest("No valid email addresses provided.")
|
||||
|
||||
# Use batch API for bulk insert (chunks of 1000 per request to billing service)
|
||||
result = BillingService.save_notification_users_batch(emails)
|
||||
|
||||
return {
|
||||
"result": "success",
|
||||
"total": len(emails),
|
||||
"succeeded": result["succeeded"],
|
||||
"failed_chunks": result["failed_chunks"],
|
||||
}, 200
|
||||
|
||||
@staticmethod
|
||||
def _parse_emails_from_file() -> list[str]:
|
||||
"""Parse email addresses from an uploaded CSV or TXT file."""
|
||||
file = request.files["file"]
|
||||
|
||||
if not file.filename:
|
||||
raise BadRequest("Uploaded file has no filename.")
|
||||
|
||||
filename_lower = file.filename.lower()
|
||||
if not filename_lower.endswith((".csv", ".txt")):
|
||||
raise BadRequest("Invalid file type. Only CSV (.csv) and TXT (.txt) files are allowed.")
|
||||
|
||||
# Read file content
|
||||
try:
|
||||
content = file.read().decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
file.seek(0)
|
||||
content = file.read().decode("gbk")
|
||||
except UnicodeDecodeError:
|
||||
raise BadRequest("Unable to decode the file. Please use UTF-8 or GBK encoding.")
|
||||
|
||||
emails: list[str] = []
|
||||
if filename_lower.endswith(".csv"):
|
||||
reader = csv.reader(io.StringIO(content))
|
||||
for row in reader:
|
||||
for cell in row:
|
||||
cell = cell.strip()
|
||||
emails.append(cell)
|
||||
else:
|
||||
# TXT file: one email per line
|
||||
for line in content.splitlines():
|
||||
line = line.strip()
|
||||
emails.append(line)
|
||||
|
||||
# Deduplicate while preserving order
|
||||
seen: set[str] = set()
|
||||
unique_emails: list[str] = []
|
||||
for email in emails:
|
||||
email_lower = email.lower()
|
||||
if email_lower not in seen:
|
||||
seen.add(email_lower)
|
||||
unique_emails.append(email)
|
||||
|
||||
return unique_emails
|
||||
|
||||
@ -599,7 +599,12 @@ def _get_conversation(app_model, conversation_id):
|
||||
db.session.execute(
|
||||
sa.update(Conversation)
|
||||
.where(Conversation.id == conversation_id, Conversation.read_at.is_(None))
|
||||
.values(read_at=naive_utc_now(), read_account_id=current_user.id)
|
||||
# Keep updated_at unchanged when only marking a conversation as read.
|
||||
.values(
|
||||
read_at=naive_utc_now(),
|
||||
read_account_id=current_user.id,
|
||||
updated_at=Conversation.updated_at,
|
||||
)
|
||||
)
|
||||
db.session.commit()
|
||||
db.session.refresh(conversation)
|
||||
|
||||
@ -1,26 +0,0 @@
|
||||
from flask_restx import Resource
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from services.billing_service import BillingService
|
||||
|
||||
|
||||
@console_ns.route("/notification")
|
||||
class NotificationApi(Resource):
|
||||
@console_ns.doc("get_notification")
|
||||
@console_ns.doc(description="Get notification for the current user")
|
||||
@console_ns.doc(
|
||||
responses={
|
||||
200: "Success",
|
||||
401: "Unauthorized",
|
||||
}
|
||||
)
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@only_edition_cloud
|
||||
def get(self):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
notification = BillingService.read_notification(current_user.email)
|
||||
return notification
|
||||
@ -1,6 +1,7 @@
|
||||
import urllib.parse
|
||||
|
||||
import httpx
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
import services
|
||||
@ -10,12 +11,12 @@ from controllers.common.errors import (
|
||||
RemoteFileUploadError,
|
||||
UnsupportedFileTypeError,
|
||||
)
|
||||
from controllers.fastopenapi import console_router
|
||||
from controllers.console import console_ns
|
||||
from core.file import helpers as file_helpers
|
||||
from core.helper import ssrf_proxy
|
||||
from extensions.ext_database import db
|
||||
from fields.file_fields import FileWithSignedUrl, RemoteFileInfo
|
||||
from libs.login import current_account_with_tenant
|
||||
from libs.login import current_account_with_tenant, login_required
|
||||
from services.file_service import FileService
|
||||
|
||||
|
||||
@ -23,69 +24,73 @@ class RemoteFileUploadPayload(BaseModel):
|
||||
url: str = Field(..., description="URL to fetch")
|
||||
|
||||
|
||||
@console_router.get(
|
||||
"/remote-files/<path:url>",
|
||||
response_model=RemoteFileInfo,
|
||||
tags=["console"],
|
||||
)
|
||||
def get_remote_file_info(url: str) -> RemoteFileInfo:
|
||||
decoded_url = urllib.parse.unquote(url)
|
||||
resp = ssrf_proxy.head(decoded_url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(decoded_url, timeout=3)
|
||||
resp.raise_for_status()
|
||||
return RemoteFileInfo(
|
||||
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
|
||||
file_length=int(resp.headers.get("Content-Length", 0)),
|
||||
)
|
||||
|
||||
|
||||
@console_router.post(
|
||||
"/remote-files/upload",
|
||||
response_model=FileWithSignedUrl,
|
||||
tags=["console"],
|
||||
status_code=201,
|
||||
)
|
||||
def upload_remote_file(payload: RemoteFileUploadPayload) -> FileWithSignedUrl:
|
||||
url = payload.url
|
||||
|
||||
try:
|
||||
resp = ssrf_proxy.head(url=url)
|
||||
@console_ns.route("/remote-files/<path:url>")
|
||||
class GetRemoteFileInfo(Resource):
|
||||
@login_required
|
||||
def get(self, url: str):
|
||||
decoded_url = urllib.parse.unquote(url)
|
||||
resp = ssrf_proxy.head(decoded_url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
|
||||
except httpx.RequestError as e:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
|
||||
resp = ssrf_proxy.get(decoded_url, timeout=3)
|
||||
resp.raise_for_status()
|
||||
return RemoteFileInfo(
|
||||
file_type=resp.headers.get("Content-Type", "application/octet-stream"),
|
||||
file_length=int(resp.headers.get("Content-Length", 0)),
|
||||
).model_dump(mode="json")
|
||||
|
||||
file_info = helpers.guess_file_info_from_response(resp)
|
||||
|
||||
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
|
||||
raise FileTooLargeError
|
||||
@console_ns.route("/remote-files/upload")
|
||||
class RemoteFileUpload(Resource):
|
||||
@login_required
|
||||
def post(self):
|
||||
payload = RemoteFileUploadPayload.model_validate(console_ns.payload)
|
||||
url = payload.url
|
||||
|
||||
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
|
||||
# Try to fetch remote file metadata/content first
|
||||
try:
|
||||
resp = ssrf_proxy.head(url=url)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
resp = ssrf_proxy.get(url=url, timeout=3, follow_redirects=True)
|
||||
if resp.status_code != httpx.codes.OK:
|
||||
# Normalize into a user-friendly error message expected by tests
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {resp.text}")
|
||||
except httpx.RequestError as e:
|
||||
raise RemoteFileUploadError(f"Failed to fetch file from {url}: {str(e)}")
|
||||
|
||||
try:
|
||||
user, _ = current_account_with_tenant()
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file_info.filename,
|
||||
content=content,
|
||||
mimetype=file_info.mimetype,
|
||||
user=user,
|
||||
source_url=url,
|
||||
file_info = helpers.guess_file_info_from_response(resp)
|
||||
|
||||
# Enforce file size limit with 400 (Bad Request) per tests' expectation
|
||||
if not FileService.is_file_size_within_limit(extension=file_info.extension, file_size=file_info.size):
|
||||
raise FileTooLargeError()
|
||||
|
||||
# Load content if needed
|
||||
content = resp.content if resp.request.method == "GET" else ssrf_proxy.get(url).content
|
||||
|
||||
try:
|
||||
user, _ = current_account_with_tenant()
|
||||
upload_file = FileService(db.engine).upload_file(
|
||||
filename=file_info.filename,
|
||||
content=content,
|
||||
mimetype=file_info.mimetype,
|
||||
user=user,
|
||||
source_url=url,
|
||||
)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
# Success: return created resource with 201 status
|
||||
return (
|
||||
FileWithSignedUrl(
|
||||
id=upload_file.id,
|
||||
name=upload_file.name,
|
||||
size=upload_file.size,
|
||||
extension=upload_file.extension,
|
||||
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
|
||||
mime_type=upload_file.mime_type,
|
||||
created_by=upload_file.created_by,
|
||||
created_at=int(upload_file.created_at.timestamp()),
|
||||
).model_dump(mode="json"),
|
||||
201,
|
||||
)
|
||||
except services.errors.file.FileTooLargeError as file_too_large_error:
|
||||
raise FileTooLargeError(file_too_large_error.description)
|
||||
except services.errors.file.UnsupportedFileTypeError:
|
||||
raise UnsupportedFileTypeError()
|
||||
|
||||
return FileWithSignedUrl(
|
||||
id=upload_file.id,
|
||||
name=upload_file.name,
|
||||
size=upload_file.size,
|
||||
extension=upload_file.extension,
|
||||
url=file_helpers.get_signed_file_url(upload_file_id=upload_file.id),
|
||||
mime_type=upload_file.mime_type,
|
||||
created_by=upload_file.created_by,
|
||||
created_at=int(upload_file.created_at.timestamp()),
|
||||
)
|
||||
|
||||
@ -42,7 +42,15 @@ class SetupResponse(BaseModel):
|
||||
tags=["console"],
|
||||
)
|
||||
def get_setup_status_api() -> SetupStatusResponse:
|
||||
"""Get system setup status."""
|
||||
"""Get system setup status.
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design.
|
||||
|
||||
During first-time bootstrap there is no admin account yet, so frontend initialization must be
|
||||
able to query setup progress before any login flow exists.
|
||||
|
||||
Only bootstrap-safe status information should be returned by this endpoint.
|
||||
"""
|
||||
if dify_config.EDITION == "SELF_HOSTED":
|
||||
setup_status = get_setup_status()
|
||||
if setup_status and not isinstance(setup_status, bool):
|
||||
@ -61,7 +69,12 @@ def get_setup_status_api() -> SetupStatusResponse:
|
||||
)
|
||||
@only_edition_self_hosted
|
||||
def setup_system(payload: SetupRequestPayload) -> SetupResponse:
|
||||
"""Initialize system setup with admin account."""
|
||||
"""Initialize system setup with admin account.
|
||||
|
||||
NOTE: This endpoint is unauthenticated by design for first-time bootstrap.
|
||||
Access is restricted by deployment mode (`SELF_HOSTED`), one-time setup guards,
|
||||
and init-password validation rather than user session authentication.
|
||||
"""
|
||||
if get_setup_status():
|
||||
raise AlreadySetupError()
|
||||
|
||||
|
||||
@ -34,7 +34,7 @@ def stream_topic_events(
|
||||
on_subscribe()
|
||||
while True:
|
||||
try:
|
||||
msg = sub.receive(timeout=0.1)
|
||||
msg = sub.receive(timeout=1)
|
||||
except SubscriptionClosedError:
|
||||
return
|
||||
if msg is None:
|
||||
|
||||
@ -45,6 +45,8 @@ from core.app.entities.task_entities import (
|
||||
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
|
||||
from core.app.task_pipeline.message_cycle_manager import MessageCycleManager
|
||||
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
|
||||
from core.file import helpers as file_helpers
|
||||
from core.file.enums import FileTransferMethod
|
||||
from core.model_manager import ModelInstance
|
||||
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
|
||||
from core.model_runtime.entities.message_entities import (
|
||||
@ -56,10 +58,11 @@ from core.ops.entities.trace_entity import TraceTaskName
|
||||
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
|
||||
from core.prompt.utils.prompt_message_util import PromptMessageUtil
|
||||
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
|
||||
from core.tools.signature import sign_tool_file
|
||||
from events.message_event import message_was_created
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models.model import AppMode, Conversation, Message, MessageAgentThought
|
||||
from models.model import AppMode, Conversation, Message, MessageAgentThought, MessageFile, UploadFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -463,6 +466,85 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
||||
metadata=metadata_dict,
|
||||
)
|
||||
|
||||
def _record_files(self):
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
message_files = session.scalars(select(MessageFile).where(MessageFile.message_id == self._message_id)).all()
|
||||
if not message_files:
|
||||
return None
|
||||
|
||||
files_list = []
|
||||
upload_file_ids = [
|
||||
mf.upload_file_id
|
||||
for mf in message_files
|
||||
if mf.transfer_method == FileTransferMethod.LOCAL_FILE and mf.upload_file_id
|
||||
]
|
||||
upload_files_map = {}
|
||||
if upload_file_ids:
|
||||
upload_files = session.scalars(select(UploadFile).where(UploadFile.id.in_(upload_file_ids))).all()
|
||||
upload_files_map = {uf.id: uf for uf in upload_files}
|
||||
|
||||
for message_file in message_files:
|
||||
upload_file = None
|
||||
if message_file.transfer_method == FileTransferMethod.LOCAL_FILE and message_file.upload_file_id:
|
||||
upload_file = upload_files_map.get(message_file.upload_file_id)
|
||||
|
||||
url = None
|
||||
filename = "file"
|
||||
mime_type = "application/octet-stream"
|
||||
size = 0
|
||||
extension = ""
|
||||
|
||||
if message_file.transfer_method == FileTransferMethod.REMOTE_URL:
|
||||
url = message_file.url
|
||||
if message_file.url:
|
||||
filename = message_file.url.split("/")[-1].split("?")[0] # Remove query params
|
||||
elif message_file.transfer_method == FileTransferMethod.LOCAL_FILE:
|
||||
if upload_file:
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(upload_file.id))
|
||||
filename = upload_file.name
|
||||
mime_type = upload_file.mime_type or "application/octet-stream"
|
||||
size = upload_file.size or 0
|
||||
extension = f".{upload_file.extension}" if upload_file.extension else ""
|
||||
elif message_file.upload_file_id:
|
||||
# Fallback: generate URL even if upload_file not found
|
||||
url = file_helpers.get_signed_file_url(upload_file_id=str(message_file.upload_file_id))
|
||||
elif message_file.transfer_method == FileTransferMethod.TOOL_FILE and message_file.url:
|
||||
# For tool files, use URL directly if it's HTTP, otherwise sign it
|
||||
if message_file.url.startswith("http"):
|
||||
url = message_file.url
|
||||
filename = message_file.url.split("/")[-1].split("?")[0]
|
||||
else:
|
||||
# Extract tool file id and extension from URL
|
||||
url_parts = message_file.url.split("/")
|
||||
if url_parts:
|
||||
file_part = url_parts[-1].split("?")[0] # Remove query params first
|
||||
# Use rsplit to correctly handle filenames with multiple dots
|
||||
if "." in file_part:
|
||||
tool_file_id, ext = file_part.rsplit(".", 1)
|
||||
extension = f".{ext}"
|
||||
else:
|
||||
tool_file_id = file_part
|
||||
extension = ".bin"
|
||||
url = sign_tool_file(tool_file_id=tool_file_id, extension=extension)
|
||||
filename = file_part
|
||||
|
||||
transfer_method_value = message_file.transfer_method
|
||||
remote_url = message_file.url if message_file.transfer_method == FileTransferMethod.REMOTE_URL else ""
|
||||
file_dict = {
|
||||
"related_id": message_file.id,
|
||||
"extension": extension,
|
||||
"filename": filename,
|
||||
"size": size,
|
||||
"mime_type": mime_type,
|
||||
"transfer_method": transfer_method_value,
|
||||
"type": message_file.type,
|
||||
"url": url or "",
|
||||
"upload_file_id": message_file.upload_file_id or message_file.id,
|
||||
"remote_url": remote_url,
|
||||
}
|
||||
files_list.append(file_dict)
|
||||
return files_list or None
|
||||
|
||||
def _agent_message_to_stream_response(self, answer: str, message_id: str) -> AgentMessageStreamResponse:
|
||||
"""
|
||||
Agent message to stream response.
|
||||
|
||||
@ -64,7 +64,13 @@ class MessageCycleManager:
|
||||
|
||||
# Use SQLAlchemy 2.x style session.scalar(select(...))
|
||||
with session_factory.create_session() as session:
|
||||
message_file = session.scalar(select(MessageFile).where(MessageFile.message_id == message_id))
|
||||
message_file = session.scalar(
|
||||
select(MessageFile)
|
||||
.where(
|
||||
MessageFile.message_id == message_id,
|
||||
)
|
||||
.where(MessageFile.belongs_to == "assistant")
|
||||
)
|
||||
|
||||
if message_file:
|
||||
self._message_has_file.add(message_id)
|
||||
|
||||
@ -5,7 +5,7 @@ from collections.abc import Generator
|
||||
from copy import deepcopy
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
if TYPE_CHECKING:
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from models.model import File
|
||||
|
||||
from core.tools.__base.tool_runtime import ToolRuntime
|
||||
@ -171,7 +171,7 @@ class Tool(ABC):
|
||||
def create_file_message(self, file: File) -> ToolInvokeMessage:
|
||||
return ToolInvokeMessage(
|
||||
type=ToolInvokeMessage.MessageType.FILE,
|
||||
message=ToolInvokeMessage.FileMessage(),
|
||||
message=ToolInvokeMessage.FileMessage(file_marker="file_marker"),
|
||||
meta={"file": file},
|
||||
)
|
||||
|
||||
|
||||
@ -80,8 +80,14 @@ def init_app(app: DifyApp) -> Celery:
|
||||
worker_hijack_root_logger=False,
|
||||
timezone=pytz.timezone(dify_config.LOG_TZ or "UTC"),
|
||||
task_ignore_result=True,
|
||||
task_annotations=dify_config.CELERY_TASK_ANNOTATIONS,
|
||||
)
|
||||
|
||||
if dify_config.CELERY_BACKEND == "redis":
|
||||
celery_app.conf.update(
|
||||
result_backend_transport_options=broker_transport_options,
|
||||
)
|
||||
|
||||
# Apply SSL configuration if enabled
|
||||
ssl_options = _get_celery_ssl_options()
|
||||
if ssl_options:
|
||||
|
||||
@ -119,7 +119,7 @@ class RedisClientWrapper:
|
||||
|
||||
|
||||
redis_client: RedisClientWrapper = RedisClientWrapper()
|
||||
pubsub_redis_client: RedisClientWrapper = RedisClientWrapper()
|
||||
_pubsub_redis_client: redis.Redis | RedisCluster | None = None
|
||||
|
||||
|
||||
def _get_ssl_configuration() -> tuple[type[Union[Connection, SSLConnection]], dict[str, Any]]:
|
||||
@ -232,7 +232,7 @@ def _create_standalone_client(redis_params: dict[str, Any]) -> Union[redis.Redis
|
||||
return client
|
||||
|
||||
|
||||
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> Union[redis.Redis, RedisCluster]:
|
||||
def _create_pubsub_client(pubsub_url: str, use_clusters: bool) -> redis.Redis | RedisCluster:
|
||||
if use_clusters:
|
||||
return RedisCluster.from_url(pubsub_url)
|
||||
return redis.Redis.from_url(pubsub_url)
|
||||
@ -256,23 +256,19 @@ def init_app(app: DifyApp):
|
||||
redis_client.initialize(client)
|
||||
app.extensions["redis"] = redis_client
|
||||
|
||||
pubsub_client = client
|
||||
global _pubsub_redis_client
|
||||
_pubsub_redis_client = client
|
||||
if dify_config.normalized_pubsub_redis_url:
|
||||
pubsub_client = _create_pubsub_client(
|
||||
_pubsub_redis_client = _create_pubsub_client(
|
||||
dify_config.normalized_pubsub_redis_url, dify_config.PUBSUB_REDIS_USE_CLUSTERS
|
||||
)
|
||||
pubsub_redis_client.initialize(pubsub_client)
|
||||
|
||||
|
||||
def get_pubsub_redis_client() -> RedisClientWrapper:
|
||||
return pubsub_redis_client
|
||||
|
||||
|
||||
def get_pubsub_broadcast_channel() -> BroadcastChannelProtocol:
|
||||
redis_conn = get_pubsub_redis_client()
|
||||
assert _pubsub_redis_client is not None, "PubSub redis Client should be initialized here."
|
||||
if dify_config.PUBSUB_REDIS_CHANNEL_TYPE == "sharded":
|
||||
return ShardedRedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
|
||||
return RedisBroadcastChannel(redis_conn) # pyright: ignore[reportArgumentType]
|
||||
return ShardedRedisBroadcastChannel(_pubsub_redis_client)
|
||||
return RedisBroadcastChannel(_pubsub_redis_client)
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
|
||||
@ -152,7 +152,7 @@ class RedisSubscriptionBase(Subscription):
|
||||
"""Iterator for consuming messages from the subscription."""
|
||||
while not self._closed.is_set():
|
||||
try:
|
||||
item = self._queue.get(timeout=0.1)
|
||||
item = self._queue.get(timeout=1)
|
||||
except queue.Empty:
|
||||
continue
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from libs.broadcast_channel.channel import Producer, Subscriber, Subscription
|
||||
from redis import Redis
|
||||
from redis import Redis, RedisCluster
|
||||
|
||||
from ._subscription import RedisSubscriptionBase
|
||||
|
||||
@ -18,7 +18,7 @@ class BroadcastChannel:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
redis_client: Redis,
|
||||
redis_client: Redis | RedisCluster,
|
||||
):
|
||||
self._client = redis_client
|
||||
|
||||
@ -27,7 +27,7 @@ class BroadcastChannel:
|
||||
|
||||
|
||||
class Topic:
|
||||
def __init__(self, redis_client: Redis, topic: str):
|
||||
def __init__(self, redis_client: Redis | RedisCluster, topic: str):
|
||||
self._client = redis_client
|
||||
self._topic = topic
|
||||
|
||||
|
||||
@ -70,8 +70,9 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
|
||||
# Since we have already filtered at the caller's site, we can safely set
|
||||
# `ignore_subscribe_messages=False`.
|
||||
if isinstance(self._client, RedisCluster):
|
||||
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message`
|
||||
# would use busy-looping to wait for incoming message, consuming excessive CPU quota.
|
||||
# NOTE(QuantumGhost): due to an issue in upstream code, calling `get_sharded_message` without
|
||||
# specifying the `target_node` argument would use busy-looping to wait
|
||||
# for incoming message, consuming excessive CPU quota.
|
||||
#
|
||||
# Here we specify the `target_node` to mitigate this problem.
|
||||
node = self._client.get_node_from_key(self._topic)
|
||||
@ -80,8 +81,10 @@ class _RedisShardedSubscription(RedisSubscriptionBase):
|
||||
timeout=1,
|
||||
target_node=node,
|
||||
)
|
||||
else:
|
||||
elif isinstance(self._client, Redis):
|
||||
return self._pubsub.get_sharded_message(ignore_subscribe_messages=False, timeout=1) # type: ignore[attr-defined]
|
||||
else:
|
||||
raise AssertionError("client should be either Redis or RedisCluster.")
|
||||
|
||||
def _get_message_type(self) -> str:
|
||||
return "smessage"
|
||||
|
||||
@ -0,0 +1,59 @@
|
||||
"""add unique constraint to tenant_default_models
|
||||
|
||||
Revision ID: fix_tenant_default_model_unique
|
||||
Revises: 9d77545f524e
|
||||
Create Date: 2026-01-19 15:07:00.000000
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def _is_pg(conn):
|
||||
return conn.dialect.name == "postgresql"
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f55813ffe2c8'
|
||||
down_revision = 'c3df22613c99'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# First, remove duplicate records keeping only the most recent one per (tenant_id, model_type)
|
||||
# This is necessary before adding the unique constraint
|
||||
conn = op.get_bind()
|
||||
|
||||
# Delete duplicates: keep the record with the latest updated_at for each (tenant_id, model_type)
|
||||
# If updated_at is the same, keep the one with the largest id as tiebreaker
|
||||
if _is_pg(conn):
|
||||
# PostgreSQL: Use DISTINCT ON for efficient deduplication
|
||||
conn.execute(sa.text("""
|
||||
DELETE FROM tenant_default_models
|
||||
WHERE id NOT IN (
|
||||
SELECT DISTINCT ON (tenant_id, model_type) id
|
||||
FROM tenant_default_models
|
||||
ORDER BY tenant_id, model_type, updated_at DESC, id DESC
|
||||
)
|
||||
"""))
|
||||
else:
|
||||
# MySQL: Use self-join to find and delete duplicates
|
||||
# Keep the record with latest updated_at (or largest id if updated_at is equal)
|
||||
conn.execute(sa.text("""
|
||||
DELETE t1 FROM tenant_default_models t1
|
||||
INNER JOIN tenant_default_models t2
|
||||
ON t1.tenant_id = t2.tenant_id
|
||||
AND t1.model_type = t2.model_type
|
||||
AND (t1.updated_at < t2.updated_at
|
||||
OR (t1.updated_at = t2.updated_at AND t1.id < t2.id))
|
||||
"""))
|
||||
|
||||
# Now add the unique constraint
|
||||
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
|
||||
batch_op.create_unique_constraint('unique_tenant_default_model_type', ['tenant_id', 'model_type'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.batch_alter_table('tenant_default_models', schema=None) as batch_op:
|
||||
batch_op.drop_constraint('unique_tenant_default_model_type', type_='unique')
|
||||
@ -0,0 +1,39 @@
|
||||
"""fix index to optimize message clean job performance
|
||||
|
||||
Revision ID: fce013ca180e
|
||||
Revises: f55813ffe2c8
|
||||
Create Date: 2026-02-11 15:49:17.603638
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'fce013ca180e'
|
||||
down_revision = 'f55813ffe2c8'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('message_created_at_idx'))
|
||||
|
||||
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
|
||||
batch_op.create_index('saved_message_message_id_idx', ['message_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('saved_messages', schema=None) as batch_op:
|
||||
batch_op.drop_index('saved_message_message_id_idx')
|
||||
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('message_created_at_idx'), ['created_at'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@ -227,7 +227,7 @@ class App(Base):
|
||||
with Session(db.engine) as session:
|
||||
if api_provider_ids:
|
||||
existing_api_providers = [
|
||||
api_provider.id
|
||||
str(api_provider.id)
|
||||
for api_provider in session.execute(
|
||||
text("SELECT id FROM tool_api_providers WHERE id IN :provider_ids"),
|
||||
{"provider_ids": tuple(api_provider_ids)},
|
||||
@ -1040,7 +1040,6 @@ class Message(Base):
|
||||
Index("message_end_user_idx", "app_id", "from_source", "from_end_user_id"),
|
||||
Index("message_account_idx", "app_id", "from_source", "from_account_id"),
|
||||
Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
|
||||
Index("message_created_at_idx", "created_at"),
|
||||
Index("message_app_mode_idx", "app_mode"),
|
||||
Index("message_created_at_id_idx", "created_at", "id"),
|
||||
)
|
||||
|
||||
@ -181,6 +181,7 @@ class TenantDefaultModel(TypeBase):
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="tenant_default_model_pkey"),
|
||||
sa.Index("tenant_default_model_tenant_id_provider_type_idx", "tenant_id", "provider_name", "model_type"),
|
||||
sa.UniqueConstraint("tenant_id", "model_type", name="unique_tenant_default_model_type"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
|
||||
@ -16,6 +16,7 @@ class SavedMessage(TypeBase):
|
||||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="saved_message_pkey"),
|
||||
sa.Index("saved_message_message_idx", "app_id", "message_id", "created_by_role", "created_by"),
|
||||
sa.Index("saved_message_message_id_idx", "message_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.12.1"
|
||||
version = "1.13.0"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
@ -23,7 +23,7 @@ dependencies = [
|
||||
"gevent~=25.9.1",
|
||||
"gmpy2~=2.2.1",
|
||||
"google-api-core==2.18.0",
|
||||
"google-api-python-client==2.90.0",
|
||||
"google-api-python-client==2.189.0",
|
||||
"google-auth==2.29.0",
|
||||
"google-auth-httplib2==0.2.0",
|
||||
"google-cloud-aiplatform==1.49.0",
|
||||
|
||||
@ -264,9 +264,15 @@ class APIWorkflowRunRepository(WorkflowExecutionRepository, Protocol):
|
||||
batch_size: int,
|
||||
run_types: Sequence[WorkflowType] | None = None,
|
||||
tenant_ids: Sequence[str] | None = None,
|
||||
workflow_ids: Sequence[str] | None = None,
|
||||
) -> Sequence[WorkflowRun]:
|
||||
"""
|
||||
Fetch ended workflow runs in a time window for archival and clean batching.
|
||||
|
||||
Optional filters:
|
||||
- run_types
|
||||
- tenant_ids
|
||||
- workflow_ids
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
@ -386,6 +386,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
batch_size: int,
|
||||
run_types: Sequence[WorkflowType] | None = None,
|
||||
tenant_ids: Sequence[str] | None = None,
|
||||
workflow_ids: Sequence[str] | None = None,
|
||||
) -> Sequence[WorkflowRun]:
|
||||
"""
|
||||
Fetch ended workflow runs in a time window for archival and clean batching.
|
||||
@ -394,7 +395,7 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
- created_at in [start_from, end_before)
|
||||
- type in run_types (when provided)
|
||||
- status is an ended state
|
||||
- optional tenant_id filter and cursor (last_seen) for pagination
|
||||
- optional tenant_id, workflow_id filters and cursor (last_seen) for pagination
|
||||
"""
|
||||
with self._session_maker() as session:
|
||||
stmt = (
|
||||
@ -417,6 +418,9 @@ class DifyAPISQLAlchemyWorkflowRunRepository(APIWorkflowRunRepository):
|
||||
if tenant_ids:
|
||||
stmt = stmt.where(WorkflowRun.tenant_id.in_(tenant_ids))
|
||||
|
||||
if workflow_ids:
|
||||
stmt = stmt.where(WorkflowRun.workflow_id.in_(workflow_ids))
|
||||
|
||||
if last_seen:
|
||||
stmt = stmt.where(
|
||||
or_(
|
||||
|
||||
@ -4,7 +4,6 @@ import time
|
||||
from collections.abc import Sequence
|
||||
|
||||
import click
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session, sessionmaker
|
||||
|
||||
import app
|
||||
@ -13,6 +12,7 @@ from extensions.ext_database import db
|
||||
from models.model import (
|
||||
AppAnnotationHitHistory,
|
||||
Conversation,
|
||||
DatasetRetrieverResource,
|
||||
Message,
|
||||
MessageAgentThought,
|
||||
MessageAnnotation,
|
||||
@ -20,7 +20,10 @@ from models.model import (
|
||||
MessageFeedback,
|
||||
MessageFile,
|
||||
)
|
||||
from models.workflow import ConversationVariable, WorkflowAppLog, WorkflowNodeExecutionModel, WorkflowRun
|
||||
from models.web import SavedMessage
|
||||
from models.workflow import ConversationVariable, WorkflowRun
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from repositories.sqlalchemy_workflow_trigger_log_repository import SQLAlchemyWorkflowTriggerLogRepository
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -29,8 +32,15 @@ MAX_RETRIES = 3
|
||||
BATCH_SIZE = dify_config.WORKFLOW_LOG_CLEANUP_BATCH_SIZE
|
||||
|
||||
|
||||
@app.celery.task(queue="dataset")
|
||||
def clean_workflow_runlogs_precise():
|
||||
def _get_specific_workflow_ids() -> list[str]:
|
||||
workflow_ids_str = dify_config.WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS.strip()
|
||||
if not workflow_ids_str:
|
||||
return []
|
||||
return [wid.strip() for wid in workflow_ids_str.split(",") if wid.strip()]
|
||||
|
||||
|
||||
@app.celery.task(queue="retention")
|
||||
def clean_workflow_runlogs_precise() -> None:
|
||||
"""Clean expired workflow run logs with retry mechanism and complete message cascade"""
|
||||
|
||||
click.echo(click.style("Start clean workflow run logs (precise mode with complete cascade).", fg="green"))
|
||||
@ -39,48 +49,48 @@ def clean_workflow_runlogs_precise():
|
||||
retention_days = dify_config.WORKFLOW_LOG_RETENTION_DAYS
|
||||
cutoff_date = datetime.datetime.now() - datetime.timedelta(days=retention_days)
|
||||
session_factory = sessionmaker(db.engine, expire_on_commit=False)
|
||||
workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_factory)
|
||||
workflow_ids = _get_specific_workflow_ids()
|
||||
workflow_ids_filter = workflow_ids or None
|
||||
|
||||
try:
|
||||
with session_factory.begin() as session:
|
||||
total_workflow_runs = session.query(WorkflowRun).where(WorkflowRun.created_at < cutoff_date).count()
|
||||
if total_workflow_runs == 0:
|
||||
logger.info("No expired workflow run logs found")
|
||||
return
|
||||
logger.info("Found %s expired workflow run logs to clean", total_workflow_runs)
|
||||
|
||||
total_deleted = 0
|
||||
failed_batches = 0
|
||||
batch_count = 0
|
||||
last_seen: tuple[datetime.datetime, str] | None = None
|
||||
while True:
|
||||
run_rows = workflow_run_repo.get_runs_batch_by_time_range(
|
||||
start_from=None,
|
||||
end_before=cutoff_date,
|
||||
last_seen=last_seen,
|
||||
batch_size=BATCH_SIZE,
|
||||
workflow_ids=workflow_ids_filter,
|
||||
)
|
||||
|
||||
if not run_rows:
|
||||
if batch_count == 0:
|
||||
logger.info("No expired workflow run logs found")
|
||||
break
|
||||
|
||||
last_seen = (run_rows[-1].created_at, run_rows[-1].id)
|
||||
batch_count += 1
|
||||
with session_factory.begin() as session:
|
||||
workflow_run_ids = session.scalars(
|
||||
select(WorkflowRun.id)
|
||||
.where(WorkflowRun.created_at < cutoff_date)
|
||||
.order_by(WorkflowRun.created_at, WorkflowRun.id)
|
||||
.limit(BATCH_SIZE)
|
||||
).all()
|
||||
success = _delete_batch(session, workflow_run_repo, run_rows, failed_batches)
|
||||
|
||||
if not workflow_run_ids:
|
||||
if success:
|
||||
total_deleted += len(run_rows)
|
||||
failed_batches = 0
|
||||
else:
|
||||
failed_batches += 1
|
||||
if failed_batches >= MAX_RETRIES:
|
||||
logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES)
|
||||
break
|
||||
|
||||
batch_count += 1
|
||||
|
||||
success = _delete_batch(session, workflow_run_ids, failed_batches)
|
||||
|
||||
if success:
|
||||
total_deleted += len(workflow_run_ids)
|
||||
failed_batches = 0
|
||||
else:
|
||||
failed_batches += 1
|
||||
if failed_batches >= MAX_RETRIES:
|
||||
logger.error("Failed to delete batch after %s retries, aborting cleanup for today", MAX_RETRIES)
|
||||
break
|
||||
else:
|
||||
# Calculate incremental delay times: 5, 10, 15 minutes
|
||||
retry_delay_minutes = failed_batches * 5
|
||||
logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes)
|
||||
time.sleep(retry_delay_minutes * 60)
|
||||
continue
|
||||
# Calculate incremental delay times: 5, 10, 15 minutes
|
||||
retry_delay_minutes = failed_batches * 5
|
||||
logger.warning("Batch deletion failed, retrying in %s minutes...", retry_delay_minutes)
|
||||
time.sleep(retry_delay_minutes * 60)
|
||||
continue
|
||||
|
||||
logger.info("Cleanup completed: %s expired workflow run logs deleted", total_deleted)
|
||||
|
||||
@ -93,10 +103,16 @@ def clean_workflow_runlogs_precise():
|
||||
click.echo(click.style(f"Cleaned workflow run logs from db success latency: {execution_time:.2f}s", fg="green"))
|
||||
|
||||
|
||||
def _delete_batch(session: Session, workflow_run_ids: Sequence[str], attempt_count: int) -> bool:
|
||||
def _delete_batch(
|
||||
session: Session,
|
||||
workflow_run_repo,
|
||||
workflow_runs: Sequence[WorkflowRun],
|
||||
attempt_count: int,
|
||||
) -> bool:
|
||||
"""Delete a single batch of workflow runs and all related data within a nested transaction."""
|
||||
try:
|
||||
with session.begin_nested():
|
||||
workflow_run_ids = [run.id for run in workflow_runs]
|
||||
message_data = (
|
||||
session.query(Message.id, Message.conversation_id)
|
||||
.where(Message.workflow_run_id.in_(workflow_run_ids))
|
||||
@ -107,11 +123,13 @@ def _delete_batch(session: Session, workflow_run_ids: Sequence[str], attempt_cou
|
||||
if message_id_list:
|
||||
message_related_models = [
|
||||
AppAnnotationHitHistory,
|
||||
DatasetRetrieverResource,
|
||||
MessageAgentThought,
|
||||
MessageChain,
|
||||
MessageFile,
|
||||
MessageAnnotation,
|
||||
MessageFeedback,
|
||||
SavedMessage,
|
||||
]
|
||||
for model in message_related_models:
|
||||
session.query(model).where(model.message_id.in_(message_id_list)).delete(synchronize_session=False) # type: ignore
|
||||
@ -122,14 +140,6 @@ def _delete_batch(session: Session, workflow_run_ids: Sequence[str], attempt_cou
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
session.query(WorkflowAppLog).where(WorkflowAppLog.workflow_run_id.in_(workflow_run_ids)).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
session.query(WorkflowNodeExecutionModel).where(
|
||||
WorkflowNodeExecutionModel.workflow_run_id.in_(workflow_run_ids)
|
||||
).delete(synchronize_session=False)
|
||||
|
||||
if conversation_id_list:
|
||||
session.query(ConversationVariable).where(
|
||||
ConversationVariable.conversation_id.in_(conversation_id_list)
|
||||
@ -139,7 +149,22 @@ def _delete_batch(session: Session, workflow_run_ids: Sequence[str], attempt_cou
|
||||
synchronize_session=False
|
||||
)
|
||||
|
||||
session.query(WorkflowRun).where(WorkflowRun.id.in_(workflow_run_ids)).delete(synchronize_session=False)
|
||||
def _delete_node_executions(active_session: Session, runs: Sequence[WorkflowRun]) -> tuple[int, int]:
|
||||
run_ids = [run.id for run in runs]
|
||||
repo = DifyAPIRepositoryFactory.create_api_workflow_node_execution_repository(
|
||||
session_maker=sessionmaker(bind=active_session.get_bind(), expire_on_commit=False)
|
||||
)
|
||||
return repo.delete_by_runs(active_session, run_ids)
|
||||
|
||||
def _delete_trigger_logs(active_session: Session, run_ids: Sequence[str]) -> int:
|
||||
trigger_repo = SQLAlchemyWorkflowTriggerLogRepository(active_session)
|
||||
return trigger_repo.delete_by_run_ids(run_ids)
|
||||
|
||||
workflow_run_repo.delete_runs_with_related(
|
||||
workflow_runs,
|
||||
delete_node_executions=_delete_node_executions,
|
||||
delete_trigger_logs=_delete_trigger_logs,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@ -393,35 +393,3 @@ class BillingService:
|
||||
for item in data:
|
||||
tenant_whitelist.append(item["tenant_id"])
|
||||
return tenant_whitelist
|
||||
|
||||
@classmethod
|
||||
def read_notification(cls, user_email: str):
|
||||
params = {"user_email": user_email}
|
||||
return cls._send_request("GET", "/notification/read", params=params)
|
||||
|
||||
@classmethod
|
||||
def save_notification_user(cls, user_email: str):
|
||||
json = {"user_email": user_email}
|
||||
return cls._send_request("POST", "/notification/new-notification-user", json=json)
|
||||
|
||||
@classmethod
|
||||
def save_notification_users_batch(cls, user_emails: list[str]) -> dict:
|
||||
"""Batch save notification users in chunks of 1000."""
|
||||
chunk_size = 1000
|
||||
total_succeeded = 0
|
||||
failed_chunks: list[dict] = []
|
||||
|
||||
for i in range(0, len(user_emails), chunk_size):
|
||||
chunk = user_emails[i : i + chunk_size]
|
||||
try:
|
||||
resp = cls._send_request("POST", "/notification/batch-notification-users", json={"user_emails": chunk})
|
||||
total_succeeded += resp.get("count", len(chunk))
|
||||
except Exception as e:
|
||||
failed_chunks.append({"offset": i, "count": len(chunk), "error": str(e)})
|
||||
|
||||
return {"succeeded": total_succeeded, "failed_chunks": failed_chunks}
|
||||
|
||||
@classmethod
|
||||
def save_notification_content(cls, content: str):
|
||||
json = {"content": content}
|
||||
return cls._send_request("POST", "/notification/new-notification", json=json)
|
||||
|
||||
@ -22,7 +22,7 @@ from libs.exception import BaseHTTPException
|
||||
from models.human_input import RecipientType
|
||||
from models.model import App, AppMode
|
||||
from repositories.factory import DifyAPIRepositoryFactory
|
||||
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE, resume_app_execution
|
||||
from tasks.app_generate.workflow_execute_task import resume_app_execution
|
||||
|
||||
|
||||
class Form:
|
||||
@ -230,7 +230,6 @@ class HumanInputService:
|
||||
try:
|
||||
resume_app_execution.apply_async(
|
||||
kwargs={"payload": payload},
|
||||
queue=WORKFLOW_BASED_APP_EXECUTION_QUEUE,
|
||||
)
|
||||
except Exception: # pragma: no cover
|
||||
logger.exception("Failed to enqueue resume task for workflow run %s", workflow_run_id)
|
||||
|
||||
@ -1,10 +1,13 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import time
|
||||
from collections.abc import Sequence
|
||||
from typing import cast
|
||||
|
||||
from sqlalchemy import delete, select
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import delete, select, tuple_
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@ -193,11 +196,15 @@ class MessagesCleanService:
|
||||
self._end_before,
|
||||
)
|
||||
|
||||
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
|
||||
|
||||
while True:
|
||||
stats["batches"] += 1
|
||||
batch_start = time.monotonic()
|
||||
|
||||
# Step 1: Fetch a batch of messages using cursor
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
fetch_messages_start = time.monotonic()
|
||||
msg_stmt = (
|
||||
select(Message.id, Message.app_id, Message.created_at)
|
||||
.where(Message.created_at < self._end_before)
|
||||
@ -209,13 +216,13 @@ class MessagesCleanService:
|
||||
msg_stmt = msg_stmt.where(Message.created_at >= self._start_from)
|
||||
|
||||
# Apply cursor condition: (created_at, id) > (last_created_at, last_message_id)
|
||||
# This translates to:
|
||||
# created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id)
|
||||
if _cursor:
|
||||
# Continuing from previous batch
|
||||
msg_stmt = msg_stmt.where(
|
||||
(Message.created_at > _cursor[0])
|
||||
| ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1]))
|
||||
tuple_(Message.created_at, Message.id)
|
||||
> tuple_(
|
||||
sa.literal(_cursor[0], type_=sa.DateTime()),
|
||||
sa.literal(_cursor[1], type_=Message.id.type),
|
||||
)
|
||||
)
|
||||
|
||||
raw_messages = list(session.execute(msg_stmt).all())
|
||||
@ -223,6 +230,12 @@ class MessagesCleanService:
|
||||
SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at)
|
||||
for msg_id, app_id, msg_created_at in raw_messages
|
||||
]
|
||||
logger.info(
|
||||
"clean_messages (batch %s): fetched %s messages in %sms",
|
||||
stats["batches"],
|
||||
len(messages),
|
||||
int((time.monotonic() - fetch_messages_start) * 1000),
|
||||
)
|
||||
|
||||
# Track total messages fetched across all batches
|
||||
stats["total_messages"] += len(messages)
|
||||
@ -241,8 +254,16 @@ class MessagesCleanService:
|
||||
logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
fetch_apps_start = time.monotonic()
|
||||
app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids))
|
||||
apps = list(session.execute(app_stmt).all())
|
||||
logger.info(
|
||||
"clean_messages (batch %s): fetched %s apps for %s app_ids in %sms",
|
||||
stats["batches"],
|
||||
len(apps),
|
||||
len(app_ids),
|
||||
int((time.monotonic() - fetch_apps_start) * 1000),
|
||||
)
|
||||
|
||||
if not apps:
|
||||
logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"])
|
||||
@ -252,7 +273,15 @@ class MessagesCleanService:
|
||||
app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps}
|
||||
|
||||
# Step 3: Delegate to policy to determine which messages to delete
|
||||
policy_start = time.monotonic()
|
||||
message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant)
|
||||
logger.info(
|
||||
"clean_messages (batch %s): policy selected %s/%s messages in %sms",
|
||||
stats["batches"],
|
||||
len(message_ids_to_delete),
|
||||
len(messages),
|
||||
int((time.monotonic() - policy_start) * 1000),
|
||||
)
|
||||
|
||||
if not message_ids_to_delete:
|
||||
logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"])
|
||||
@ -263,14 +292,20 @@ class MessagesCleanService:
|
||||
# Step 4: Batch delete messages and their relations
|
||||
if not self._dry_run:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
delete_relations_start = time.monotonic()
|
||||
# Delete related records first
|
||||
self._batch_delete_message_relations(session, message_ids_to_delete)
|
||||
delete_relations_ms = int((time.monotonic() - delete_relations_start) * 1000)
|
||||
|
||||
# Delete messages
|
||||
delete_messages_start = time.monotonic()
|
||||
delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete))
|
||||
delete_result = cast(CursorResult, session.execute(delete_stmt))
|
||||
messages_deleted = delete_result.rowcount
|
||||
delete_messages_ms = int((time.monotonic() - delete_messages_start) * 1000)
|
||||
commit_start = time.monotonic()
|
||||
session.commit()
|
||||
commit_ms = int((time.monotonic() - commit_start) * 1000)
|
||||
|
||||
stats["total_deleted"] += messages_deleted
|
||||
|
||||
@ -280,6 +315,19 @@ class MessagesCleanService:
|
||||
len(messages),
|
||||
messages_deleted,
|
||||
)
|
||||
logger.info(
|
||||
"clean_messages (batch %s): relations %sms, messages %sms, commit %sms, batch total %sms",
|
||||
stats["batches"],
|
||||
delete_relations_ms,
|
||||
delete_messages_ms,
|
||||
commit_ms,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
|
||||
# Random sleep between batches to avoid overwhelming the database
|
||||
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
|
||||
logger.info("clean_messages (batch %s): sleeping for %.2fms", stats["batches"], sleep_ms)
|
||||
time.sleep(sleep_ms / 1000)
|
||||
else:
|
||||
# Log random sample of message IDs that would be deleted (up to 10)
|
||||
sample_size = min(10, len(message_ids_to_delete))
|
||||
|
||||
@ -1,5 +1,8 @@
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import time
|
||||
from collections.abc import Iterable, Sequence
|
||||
|
||||
import click
|
||||
@ -72,7 +75,12 @@ class WorkflowRunCleanup:
|
||||
batch_index = 0
|
||||
last_seen: tuple[datetime.datetime, str] | None = None
|
||||
|
||||
max_batch_interval_ms = int(os.environ.get("SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL", 200))
|
||||
|
||||
while True:
|
||||
batch_start = time.monotonic()
|
||||
|
||||
fetch_start = time.monotonic()
|
||||
run_rows = self.workflow_run_repo.get_runs_batch_by_time_range(
|
||||
start_from=self.window_start,
|
||||
end_before=self.window_end,
|
||||
@ -80,12 +88,30 @@ class WorkflowRunCleanup:
|
||||
batch_size=self.batch_size,
|
||||
)
|
||||
if not run_rows:
|
||||
logger.info("workflow_run_cleanup (batch #%s): no more rows to process", batch_index + 1)
|
||||
break
|
||||
|
||||
batch_index += 1
|
||||
last_seen = (run_rows[-1].created_at, run_rows[-1].id)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): fetched %s rows in %sms",
|
||||
batch_index,
|
||||
len(run_rows),
|
||||
int((time.monotonic() - fetch_start) * 1000),
|
||||
)
|
||||
|
||||
tenant_ids = {row.tenant_id for row in run_rows}
|
||||
|
||||
filter_start = time.monotonic()
|
||||
free_tenants = self._filter_free_tenants(tenant_ids)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): filtered %s free tenants from %s tenants in %sms",
|
||||
batch_index,
|
||||
len(free_tenants),
|
||||
len(tenant_ids),
|
||||
int((time.monotonic() - filter_start) * 1000),
|
||||
)
|
||||
|
||||
free_runs = [row for row in run_rows if row.tenant_id in free_tenants]
|
||||
paid_or_skipped = len(run_rows) - len(free_runs)
|
||||
|
||||
@ -104,11 +130,17 @@ class WorkflowRunCleanup:
|
||||
total_runs_targeted += len(free_runs)
|
||||
|
||||
if self.dry_run:
|
||||
count_start = time.monotonic()
|
||||
batch_counts = self.workflow_run_repo.count_runs_with_related(
|
||||
free_runs,
|
||||
count_node_executions=self._count_node_executions,
|
||||
count_trigger_logs=self._count_trigger_logs,
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s, dry_run): counted related records in %sms",
|
||||
batch_index,
|
||||
int((time.monotonic() - count_start) * 1000),
|
||||
)
|
||||
if related_totals is not None:
|
||||
for key in related_totals:
|
||||
related_totals[key] += batch_counts.get(key, 0)
|
||||
@ -120,14 +152,21 @@ class WorkflowRunCleanup:
|
||||
fg="yellow",
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s, dry_run): batch total %sms",
|
||||
batch_index,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
delete_start = time.monotonic()
|
||||
counts = self.workflow_run_repo.delete_runs_with_related(
|
||||
free_runs,
|
||||
delete_node_executions=self._delete_node_executions,
|
||||
delete_trigger_logs=self._delete_trigger_logs,
|
||||
)
|
||||
delete_ms = int((time.monotonic() - delete_start) * 1000)
|
||||
except Exception:
|
||||
logger.exception("Failed to delete workflow runs batch ending at %s", last_seen[0])
|
||||
raise
|
||||
@ -143,6 +182,17 @@ class WorkflowRunCleanup:
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
"workflow_run_cleanup (batch #%s): delete %sms, batch total %sms",
|
||||
batch_index,
|
||||
delete_ms,
|
||||
int((time.monotonic() - batch_start) * 1000),
|
||||
)
|
||||
|
||||
# Random sleep between batches to avoid overwhelming the database
|
||||
sleep_ms = random.uniform(0, max_batch_interval_ms) # noqa: S311
|
||||
logger.info("workflow_run_cleanup (batch #%s): sleeping for %.2fms", batch_index, sleep_ms)
|
||||
time.sleep(sleep_ms / 1000)
|
||||
|
||||
if self.dry_run:
|
||||
if self.window_start:
|
||||
|
||||
@ -129,15 +129,15 @@ def build_workflow_event_stream(
|
||||
return
|
||||
|
||||
try:
|
||||
event = buffer_state.queue.get(timeout=0.1)
|
||||
event = buffer_state.queue.get(timeout=1)
|
||||
except queue.Empty:
|
||||
current_time = time.time()
|
||||
if current_time - last_msg_time > idle_timeout:
|
||||
logger.debug(
|
||||
"No workflow events received for %s seconds, keeping stream open",
|
||||
"Idle timeout of %s seconds reached, closing workflow event stream.",
|
||||
idle_timeout,
|
||||
)
|
||||
last_msg_time = current_time
|
||||
return
|
||||
if current_time - last_ping_time >= ping_interval:
|
||||
yield StreamEvent.PING.value
|
||||
last_ping_time = current_time
|
||||
@ -405,7 +405,7 @@ def _start_buffering(subscription) -> BufferState:
|
||||
dropped_count = 0
|
||||
try:
|
||||
while not buffer_state.stop_event.is_set():
|
||||
msg = subscription.receive(timeout=0.1)
|
||||
msg = subscription.receive(timeout=1)
|
||||
if msg is None:
|
||||
continue
|
||||
event = _parse_event_message(msg)
|
||||
|
||||
@ -51,7 +51,7 @@ def _patch_redis_clients_on_loaded_modules():
|
||||
continue
|
||||
if hasattr(module, "redis_client"):
|
||||
module.redis_client = redis_mock
|
||||
if hasattr(module, "pubsub_redis_client"):
|
||||
if hasattr(module, "_pubsub_redis_client"):
|
||||
module.pubsub_redis_client = redis_mock
|
||||
|
||||
|
||||
@ -72,7 +72,7 @@ def _patch_redis_clients():
|
||||
|
||||
with (
|
||||
patch.object(ext_redis, "redis_client", redis_mock),
|
||||
patch.object(ext_redis, "pubsub_redis_client", redis_mock),
|
||||
patch.object(ext_redis, "_pubsub_redis_client", redis_mock),
|
||||
):
|
||||
_patch_redis_clients_on_loaded_modules()
|
||||
yield
|
||||
|
||||
@ -0,0 +1,34 @@
|
||||
from datetime import datetime
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from controllers.console.app.conversation import _get_conversation
|
||||
|
||||
|
||||
def test_get_conversation_mark_read_keeps_updated_at_unchanged():
|
||||
app_model = SimpleNamespace(id="app-id")
|
||||
account = SimpleNamespace(id="account-id")
|
||||
conversation = MagicMock()
|
||||
conversation.id = "conversation-id"
|
||||
|
||||
with (
|
||||
patch("controllers.console.app.conversation.current_account_with_tenant", return_value=(account, None)),
|
||||
patch("controllers.console.app.conversation.naive_utc_now", return_value=datetime(2026, 2, 9, 0, 0, 0)),
|
||||
patch("controllers.console.app.conversation.db.session") as mock_session,
|
||||
):
|
||||
mock_session.query.return_value.where.return_value.first.return_value = conversation
|
||||
|
||||
_get_conversation(app_model, "conversation-id")
|
||||
|
||||
statement = mock_session.execute.call_args[0][0]
|
||||
compiled = statement.compile()
|
||||
sql_text = str(compiled).lower()
|
||||
compact_sql_text = sql_text.replace(" ", "")
|
||||
params = compiled.params
|
||||
|
||||
assert "updated_at=current_timestamp" not in compact_sql_text
|
||||
assert "updated_at=conversations.updated_at" in compact_sql_text
|
||||
assert "read_at=:read_at" in compact_sql_text
|
||||
assert "read_account_id=:read_account_id" in compact_sql_text
|
||||
assert params["read_at"] == datetime(2026, 2, 9, 0, 0, 0)
|
||||
assert params["read_account_id"] == "account-id"
|
||||
@ -1,92 +1,286 @@
|
||||
import builtins
|
||||
"""Tests for remote file upload API endpoints using Flask-RESTX."""
|
||||
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from flask import Flask
|
||||
from flask.views import MethodView
|
||||
|
||||
from extensions import ext_fastopenapi
|
||||
|
||||
if not hasattr(builtins, "MethodView"):
|
||||
builtins.MethodView = MethodView # type: ignore[attr-defined]
|
||||
from flask import Flask, g
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app() -> Flask:
|
||||
"""Create Flask app for testing."""
|
||||
app = Flask(__name__)
|
||||
app.config["TESTING"] = True
|
||||
app.config["SECRET_KEY"] = "test-secret-key"
|
||||
return app
|
||||
|
||||
|
||||
def test_console_remote_files_fastopenapi_get_info(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
@pytest.fixture
|
||||
def client(app):
|
||||
"""Create test client with console blueprint registered."""
|
||||
from controllers.console import bp
|
||||
|
||||
response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.txt"),
|
||||
headers={"Content-Type": "text/plain", "Content-Length": "10"},
|
||||
)
|
||||
|
||||
with patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response):
|
||||
client = app.test_client()
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert resp.get_json() == {"file_type": "text/plain", "file_length": 10}
|
||||
app.register_blueprint(bp)
|
||||
return app.test_client()
|
||||
|
||||
|
||||
def test_console_remote_files_fastopenapi_upload(app: Flask):
|
||||
ext_fastopenapi.init_app(app)
|
||||
@pytest.fixture
|
||||
def mock_account():
|
||||
"""Create a mock account for testing."""
|
||||
from models import Account
|
||||
|
||||
head_response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", "http://example.com/file.txt"),
|
||||
content=b"hello",
|
||||
)
|
||||
file_info = SimpleNamespace(
|
||||
extension="txt",
|
||||
size=5,
|
||||
filename="file.txt",
|
||||
mimetype="text/plain",
|
||||
)
|
||||
uploaded = SimpleNamespace(
|
||||
id="file-id",
|
||||
name="file.txt",
|
||||
size=5,
|
||||
extension="txt",
|
||||
mime_type="text/plain",
|
||||
created_by="user-id",
|
||||
created_at=datetime(2024, 1, 1),
|
||||
)
|
||||
account = Mock(spec=Account)
|
||||
account.id = "test-account-id"
|
||||
account.current_tenant_id = "test-tenant-id"
|
||||
return account
|
||||
|
||||
with (
|
||||
patch("controllers.console.remote_files.db", new=SimpleNamespace(engine=object())),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
|
||||
patch("controllers.console.remote_files.helpers.guess_file_info_from_response", return_value=file_info),
|
||||
patch("controllers.console.remote_files.FileService.is_file_size_within_limit", return_value=True),
|
||||
patch("controllers.console.remote_files.FileService.__init__", return_value=None),
|
||||
patch("controllers.console.remote_files.current_account_with_tenant", return_value=(object(), "tenant-id")),
|
||||
patch("controllers.console.remote_files.FileService.upload_file", return_value=uploaded),
|
||||
patch("controllers.console.remote_files.file_helpers.get_signed_file_url", return_value="signed-url"),
|
||||
):
|
||||
client = app.test_client()
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": "http://example.com/file.txt"},
|
||||
|
||||
@pytest.fixture
|
||||
def auth_ctx(app, mock_account):
|
||||
"""Context manager to set auth/tenant context in flask.g for a request."""
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _ctx():
|
||||
with app.test_request_context():
|
||||
g._login_user = mock_account
|
||||
g._current_tenant = mock_account.current_tenant_id
|
||||
yield
|
||||
|
||||
return _ctx
|
||||
|
||||
|
||||
class TestGetRemoteFileInfo:
|
||||
"""Test GET /console/api/remote-files/<path:url> endpoint."""
|
||||
|
||||
def test_get_remote_file_info_success(self, app, client, mock_account):
|
||||
"""Test successful retrieval of remote file info."""
|
||||
response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.txt"),
|
||||
headers={"Content-Type": "text/plain", "Content-Length": "1024"},
|
||||
)
|
||||
|
||||
assert resp.status_code == 201
|
||||
assert resp.get_json() == {
|
||||
"id": "file-id",
|
||||
"name": "file.txt",
|
||||
"size": 5,
|
||||
"extension": "txt",
|
||||
"url": "signed-url",
|
||||
"mime_type": "text/plain",
|
||||
"created_by": "user-id",
|
||||
"created_at": int(uploaded.created_at.timestamp()),
|
||||
}
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=response),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
with app.test_request_context():
|
||||
g._login_user = mock_account
|
||||
g._current_tenant = mock_account.current_tenant_id
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.txt"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
assert resp.status_code == 200
|
||||
data = resp.get_json()
|
||||
assert data["file_type"] == "text/plain"
|
||||
assert data["file_length"] == 1024
|
||||
|
||||
def test_get_remote_file_info_fallback_to_get_on_head_failure(self, app, client, mock_account):
|
||||
"""Test fallback to GET when HEAD returns non-200 status."""
|
||||
head_response = httpx.Response(
|
||||
404,
|
||||
request=httpx.Request("HEAD", "http://example.com/file.pdf"),
|
||||
)
|
||||
get_response = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", "http://example.com/file.pdf"),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "2048"},
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_response),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_response),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
with app.test_request_context():
|
||||
g._login_user = mock_account
|
||||
g._current_tenant = mock_account.current_tenant_id
|
||||
encoded_url = "http%3A%2F%2Fexample.com%2Ffile.pdf"
|
||||
resp = client.get(f"/console/api/remote-files/{encoded_url}")
|
||||
|
||||
assert resp.status_code == 200
|
||||
data = resp.get_json()
|
||||
assert data["file_type"] == "application/pdf"
|
||||
assert data["file_length"] == 2048
|
||||
|
||||
|
||||
class TestRemoteFileUpload:
|
||||
"""Test POST /console/api/remote-files/upload endpoint."""
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("head_status", "use_get"),
|
||||
[
|
||||
(200, False), # HEAD succeeds
|
||||
(405, True), # HEAD fails -> fallback GET
|
||||
],
|
||||
)
|
||||
def test_upload_remote_file_success_paths(self, client, mock_account, auth_ctx, head_status, use_get):
|
||||
url = "http://example.com/file.pdf"
|
||||
head_resp = httpx.Response(
|
||||
head_status,
|
||||
request=httpx.Request("HEAD", url),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
|
||||
)
|
||||
get_resp = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("GET", url),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "1024"},
|
||||
content=b"file content",
|
||||
)
|
||||
|
||||
file_info = SimpleNamespace(
|
||||
extension="pdf",
|
||||
size=1024,
|
||||
filename="file.pdf",
|
||||
mimetype="application/pdf",
|
||||
)
|
||||
uploaded_file = SimpleNamespace(
|
||||
id="uploaded-file-id",
|
||||
name="file.pdf",
|
||||
size=1024,
|
||||
extension="pdf",
|
||||
mime_type="application/pdf",
|
||||
created_by="test-account-id",
|
||||
created_at=datetime(2024, 1, 1, 12, 0, 0),
|
||||
)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp) as p_head,
|
||||
patch("controllers.console.remote_files.ssrf_proxy.get", return_value=get_resp) as p_get,
|
||||
patch(
|
||||
"controllers.console.remote_files.helpers.guess_file_info_from_response",
|
||||
return_value=file_info,
|
||||
),
|
||||
patch(
|
||||
"controllers.console.remote_files.FileService.is_file_size_within_limit",
|
||||
return_value=True,
|
||||
),
|
||||
patch("controllers.console.remote_files.db", spec=["engine"]),
|
||||
patch("controllers.console.remote_files.FileService") as mock_file_service,
|
||||
patch(
|
||||
"controllers.console.remote_files.file_helpers.get_signed_file_url",
|
||||
return_value="http://example.com/signed-url",
|
||||
),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
mock_file_service.return_value.upload_file.return_value = uploaded_file
|
||||
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": url},
|
||||
)
|
||||
|
||||
assert resp.status_code == 201
|
||||
p_head.assert_called_once()
|
||||
# GET is used either for fallback (HEAD fails) or to fetch content after HEAD succeeds
|
||||
p_get.assert_called_once()
|
||||
mock_file_service.return_value.upload_file.assert_called_once()
|
||||
|
||||
data = resp.get_json()
|
||||
assert data["id"] == "uploaded-file-id"
|
||||
assert data["name"] == "file.pdf"
|
||||
assert data["size"] == 1024
|
||||
assert data["extension"] == "pdf"
|
||||
assert data["url"] == "http://example.com/signed-url"
|
||||
assert data["mime_type"] == "application/pdf"
|
||||
assert data["created_by"] == "test-account-id"
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("size_ok", "raises", "expected_status", "expected_msg"),
|
||||
[
|
||||
# When size check fails in controller, API returns 413 with message "File size exceeded..."
|
||||
(False, None, 413, "file size exceeded"),
|
||||
# When service raises unsupported type, controller maps to 415 with message "File type not allowed."
|
||||
(True, "unsupported", 415, "file type not allowed"),
|
||||
],
|
||||
)
|
||||
def test_upload_remote_file_errors(
|
||||
self, client, mock_account, auth_ctx, size_ok, raises, expected_status, expected_msg
|
||||
):
|
||||
url = "http://example.com/x.pdf"
|
||||
head_resp = httpx.Response(
|
||||
200,
|
||||
request=httpx.Request("HEAD", url),
|
||||
headers={"Content-Type": "application/pdf", "Content-Length": "9"},
|
||||
)
|
||||
file_info = SimpleNamespace(extension="pdf", size=9, filename="x.pdf", mimetype="application/pdf")
|
||||
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch("controllers.console.remote_files.ssrf_proxy.head", return_value=head_resp),
|
||||
patch(
|
||||
"controllers.console.remote_files.helpers.guess_file_info_from_response",
|
||||
return_value=file_info,
|
||||
),
|
||||
patch(
|
||||
"controllers.console.remote_files.FileService.is_file_size_within_limit",
|
||||
return_value=size_ok,
|
||||
),
|
||||
patch("controllers.console.remote_files.db", spec=["engine"]),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
if raises == "unsupported":
|
||||
from services.errors.file import UnsupportedFileTypeError
|
||||
|
||||
with patch("controllers.console.remote_files.FileService") as mock_file_service:
|
||||
mock_file_service.return_value.upload_file.side_effect = UnsupportedFileTypeError("bad")
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": url},
|
||||
)
|
||||
else:
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": url},
|
||||
)
|
||||
|
||||
assert resp.status_code == expected_status
|
||||
data = resp.get_json()
|
||||
msg = (data.get("error") or {}).get("message") or data.get("message", "")
|
||||
assert expected_msg in msg.lower()
|
||||
|
||||
def test_upload_remote_file_fetch_failure(self, client, mock_account, auth_ctx):
|
||||
"""Test upload when fetching of remote file fails."""
|
||||
with (
|
||||
patch(
|
||||
"controllers.console.remote_files.current_account_with_tenant",
|
||||
return_value=(mock_account, "test-tenant-id"),
|
||||
),
|
||||
patch(
|
||||
"controllers.console.remote_files.ssrf_proxy.head",
|
||||
side_effect=httpx.RequestError("Connection failed"),
|
||||
),
|
||||
patch("libs.login.check_csrf_token", return_value=None),
|
||||
):
|
||||
with auth_ctx():
|
||||
resp = client.post(
|
||||
"/console/api/remote-files/upload",
|
||||
json={"url": "http://unreachable.com/file.pdf"},
|
||||
)
|
||||
|
||||
assert resp.status_code == 400
|
||||
data = resp.get_json()
|
||||
msg = (data.get("error") or {}).get("message") or data.get("message", "")
|
||||
assert "failed to fetch" in msg.lower()
|
||||
|
||||
@ -25,15 +25,19 @@ class TestMessageCycleManagerOptimization:
|
||||
task_state = Mock()
|
||||
return MessageCycleManager(application_generate_entity=mock_application_generate_entity, task_state=task_state)
|
||||
|
||||
def test_get_message_event_type_with_message_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE_FILE when message has files."""
|
||||
def test_get_message_event_type_with_assistant_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE_FILE when message has assistant-generated files.
|
||||
|
||||
This ensures that AI-generated images (belongs_to='assistant') trigger the MESSAGE_FILE event,
|
||||
allowing the frontend to properly display generated image files with url field.
|
||||
"""
|
||||
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
|
||||
# Setup mock session and message file
|
||||
mock_session = Mock()
|
||||
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
mock_message_file = Mock()
|
||||
# Current implementation uses session.scalar(select(...))
|
||||
mock_message_file.belongs_to = "assistant"
|
||||
mock_session.scalar.return_value = mock_message_file
|
||||
|
||||
# Execute
|
||||
@ -44,6 +48,31 @@ class TestMessageCycleManagerOptimization:
|
||||
assert result == StreamEvent.MESSAGE_FILE
|
||||
mock_session.scalar.assert_called_once()
|
||||
|
||||
def test_get_message_event_type_with_user_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE when message only has user-uploaded files.
|
||||
|
||||
This is a regression test for the issue where user-uploaded images (belongs_to='user')
|
||||
caused the LLM text response to be incorrectly tagged with MESSAGE_FILE event,
|
||||
resulting in broken images in the chat UI. The query filters for belongs_to='assistant',
|
||||
so when only user files exist, the database query returns None, resulting in MESSAGE event type.
|
||||
"""
|
||||
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
|
||||
# Setup mock session and message file
|
||||
mock_session = Mock()
|
||||
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
# When querying for assistant files with only user files present, return None
|
||||
# (simulates database query with belongs_to='assistant' filter returning no results)
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
# Execute
|
||||
with current_app.app_context():
|
||||
result = message_cycle_manager.get_message_event_type("test-message-id")
|
||||
|
||||
# Assert
|
||||
assert result == StreamEvent.MESSAGE
|
||||
mock_session.scalar.assert_called_once()
|
||||
|
||||
def test_get_message_event_type_without_message_file(self, message_cycle_manager):
|
||||
"""Test get_message_event_type returns MESSAGE when message has no files."""
|
||||
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
|
||||
@ -69,7 +98,7 @@ class TestMessageCycleManagerOptimization:
|
||||
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
|
||||
|
||||
mock_message_file = Mock()
|
||||
# Current implementation uses session.scalar(select(...))
|
||||
mock_message_file.belongs_to = "assistant"
|
||||
mock_session.scalar.return_value = mock_message_file
|
||||
|
||||
# Execute: compute event type once, then pass to message_to_stream_response
|
||||
|
||||
@ -496,6 +496,9 @@ class TestSchemaResolverClass:
|
||||
avg_time_no_cache = sum(results1) / len(results1)
|
||||
|
||||
# Second run (with cache) - run multiple times
|
||||
# Warm up cache first
|
||||
resolve_dify_schema_refs(schema)
|
||||
|
||||
results2 = []
|
||||
for _ in range(3):
|
||||
start = time.perf_counter()
|
||||
|
||||
211
api/tests/unit_tests/core/tools/test_base_tool.py
Normal file
211
api/tests/unit_tests/core/tools/test_base_tool.py
Normal file
@ -0,0 +1,211 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Generator
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, cast
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.tools.__base.tool import Tool
|
||||
from core.tools.__base.tool_runtime import ToolRuntime
|
||||
from core.tools.entities.common_entities import I18nObject
|
||||
from core.tools.entities.tool_entities import ToolEntity, ToolIdentity, ToolInvokeMessage, ToolProviderType
|
||||
|
||||
|
||||
class DummyCastType:
|
||||
def cast_value(self, value: Any) -> str:
|
||||
return f"cast:{value}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DummyParameter:
|
||||
name: str
|
||||
type: DummyCastType
|
||||
form: str = "llm"
|
||||
required: bool = False
|
||||
default: Any = None
|
||||
options: list[Any] | None = None
|
||||
llm_description: str | None = None
|
||||
|
||||
|
||||
class DummyTool(Tool):
|
||||
def __init__(self, entity: ToolEntity, runtime: ToolRuntime):
|
||||
super().__init__(entity=entity, runtime=runtime)
|
||||
self.result: ToolInvokeMessage | list[ToolInvokeMessage] | Generator[ToolInvokeMessage, None, None] = (
|
||||
self.create_text_message("default")
|
||||
)
|
||||
self.runtime_parameter_overrides: list[Any] | None = None
|
||||
self.last_invocation: dict[str, Any] | None = None
|
||||
|
||||
def tool_provider_type(self) -> ToolProviderType:
|
||||
return ToolProviderType.BUILT_IN
|
||||
|
||||
def _invoke(
|
||||
self,
|
||||
user_id: str,
|
||||
tool_parameters: dict[str, Any],
|
||||
conversation_id: str | None = None,
|
||||
app_id: str | None = None,
|
||||
message_id: str | None = None,
|
||||
) -> ToolInvokeMessage | list[ToolInvokeMessage] | Generator[ToolInvokeMessage, None, None]:
|
||||
self.last_invocation = {
|
||||
"user_id": user_id,
|
||||
"tool_parameters": tool_parameters,
|
||||
"conversation_id": conversation_id,
|
||||
"app_id": app_id,
|
||||
"message_id": message_id,
|
||||
}
|
||||
return self.result
|
||||
|
||||
def get_runtime_parameters(
|
||||
self,
|
||||
conversation_id: str | None = None,
|
||||
app_id: str | None = None,
|
||||
message_id: str | None = None,
|
||||
):
|
||||
if self.runtime_parameter_overrides is not None:
|
||||
return self.runtime_parameter_overrides
|
||||
return super().get_runtime_parameters(
|
||||
conversation_id=conversation_id,
|
||||
app_id=app_id,
|
||||
message_id=message_id,
|
||||
)
|
||||
|
||||
|
||||
def _build_tool(runtime: ToolRuntime | None = None) -> DummyTool:
|
||||
entity = ToolEntity(
|
||||
identity=ToolIdentity(author="test", name="dummy", label=I18nObject(en_US="dummy"), provider="test"),
|
||||
parameters=[],
|
||||
description=None,
|
||||
has_runtime_parameters=False,
|
||||
)
|
||||
runtime = runtime or ToolRuntime(tenant_id="tenant-1", invoke_from=InvokeFrom.DEBUGGER, runtime_parameters={})
|
||||
return DummyTool(entity=entity, runtime=runtime)
|
||||
|
||||
|
||||
def test_invoke_supports_single_message_and_parameter_casting():
|
||||
runtime = ToolRuntime(
|
||||
tenant_id="tenant-1",
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
runtime_parameters={"from_runtime": "runtime-value"},
|
||||
)
|
||||
tool = _build_tool(runtime)
|
||||
tool.entity.parameters = cast(
|
||||
Any,
|
||||
[
|
||||
DummyParameter(name="unused", type=DummyCastType()),
|
||||
DummyParameter(name="age", type=DummyCastType()),
|
||||
],
|
||||
)
|
||||
tool.result = tool.create_text_message("ok")
|
||||
|
||||
messages = list(
|
||||
tool.invoke(
|
||||
user_id="user-1",
|
||||
tool_parameters={"age": "18", "raw": "keep"},
|
||||
conversation_id="conv-1",
|
||||
app_id="app-1",
|
||||
message_id="msg-1",
|
||||
)
|
||||
)
|
||||
|
||||
assert len(messages) == 1
|
||||
assert messages[0].message.text == "ok"
|
||||
assert tool.last_invocation == {
|
||||
"user_id": "user-1",
|
||||
"tool_parameters": {"age": "cast:18", "raw": "keep", "from_runtime": "runtime-value"},
|
||||
"conversation_id": "conv-1",
|
||||
"app_id": "app-1",
|
||||
"message_id": "msg-1",
|
||||
}
|
||||
|
||||
|
||||
def test_invoke_supports_list_and_generator_results():
|
||||
tool = _build_tool()
|
||||
tool.result = [tool.create_text_message("a"), tool.create_text_message("b")]
|
||||
list_messages = list(tool.invoke(user_id="user-1", tool_parameters={}))
|
||||
assert [msg.message.text for msg in list_messages] == ["a", "b"]
|
||||
|
||||
def _message_generator() -> Generator[ToolInvokeMessage, None, None]:
|
||||
yield tool.create_text_message("g1")
|
||||
yield tool.create_text_message("g2")
|
||||
|
||||
tool.result = _message_generator()
|
||||
generated_messages = list(tool.invoke(user_id="user-2", tool_parameters={}))
|
||||
assert [msg.message.text for msg in generated_messages] == ["g1", "g2"]
|
||||
|
||||
|
||||
def test_fork_tool_runtime_returns_new_tool_with_copied_entity():
|
||||
tool = _build_tool()
|
||||
new_runtime = ToolRuntime(tenant_id="tenant-2", invoke_from=InvokeFrom.EXPLORE, runtime_parameters={})
|
||||
|
||||
forked = tool.fork_tool_runtime(new_runtime)
|
||||
|
||||
assert isinstance(forked, DummyTool)
|
||||
assert forked is not tool
|
||||
assert forked.runtime == new_runtime
|
||||
assert forked.entity == tool.entity
|
||||
assert forked.entity is not tool.entity
|
||||
|
||||
|
||||
def test_get_runtime_parameters_and_merge_runtime_parameters():
|
||||
tool = _build_tool()
|
||||
original = DummyParameter(name="temperature", type=DummyCastType(), form="schema", required=True, default="0.7")
|
||||
tool.entity.parameters = cast(Any, [original])
|
||||
|
||||
default_runtime_parameters = tool.get_runtime_parameters()
|
||||
assert default_runtime_parameters == [original]
|
||||
|
||||
override = DummyParameter(name="temperature", type=DummyCastType(), form="llm", required=False, default="0.5")
|
||||
appended = DummyParameter(name="new_param", type=DummyCastType(), form="form", required=False, default="x")
|
||||
tool.runtime_parameter_overrides = [override, appended]
|
||||
|
||||
merged = tool.get_merged_runtime_parameters()
|
||||
assert len(merged) == 2
|
||||
assert merged[0].name == "temperature"
|
||||
assert merged[0].form == "llm"
|
||||
assert merged[0].required is False
|
||||
assert merged[0].default == "0.5"
|
||||
assert merged[1].name == "new_param"
|
||||
|
||||
|
||||
def test_message_factory_helpers():
|
||||
tool = _build_tool()
|
||||
|
||||
image_message = tool.create_image_message("https://example.com/image.png")
|
||||
assert image_message.type == ToolInvokeMessage.MessageType.IMAGE
|
||||
assert image_message.message.text == "https://example.com/image.png"
|
||||
|
||||
file_obj = object()
|
||||
file_message = tool.create_file_message(file_obj) # type: ignore[arg-type]
|
||||
assert file_message.type == ToolInvokeMessage.MessageType.FILE
|
||||
assert file_message.message.file_marker == "file_marker"
|
||||
assert file_message.meta == {"file": file_obj}
|
||||
|
||||
link_message = tool.create_link_message("https://example.com")
|
||||
assert link_message.type == ToolInvokeMessage.MessageType.LINK
|
||||
assert link_message.message.text == "https://example.com"
|
||||
|
||||
text_message = tool.create_text_message("hello")
|
||||
assert text_message.type == ToolInvokeMessage.MessageType.TEXT
|
||||
assert text_message.message.text == "hello"
|
||||
|
||||
blob_message = tool.create_blob_message(b"blob", meta={"source": "unit-test"})
|
||||
assert blob_message.type == ToolInvokeMessage.MessageType.BLOB
|
||||
assert blob_message.message.blob == b"blob"
|
||||
assert blob_message.meta == {"source": "unit-test"}
|
||||
|
||||
json_message = tool.create_json_message({"k": "v"}, suppress_output=True)
|
||||
assert json_message.type == ToolInvokeMessage.MessageType.JSON
|
||||
assert json_message.message.json_object == {"k": "v"}
|
||||
assert json_message.message.suppress_output is True
|
||||
|
||||
variable_message = tool.create_variable_message("answer", 42, stream=False)
|
||||
assert variable_message.type == ToolInvokeMessage.MessageType.VARIABLE
|
||||
assert variable_message.message.variable_name == "answer"
|
||||
assert variable_message.message.variable_value == 42
|
||||
assert variable_message.message.stream is False
|
||||
|
||||
|
||||
def test_base_abstract_invoke_placeholder_returns_none():
|
||||
tool = _build_tool()
|
||||
assert Tool._invoke(tool, user_id="u", tool_parameters={}) is None
|
||||
@ -255,6 +255,32 @@ def test_create_variable_message():
|
||||
assert message.message.stream is False
|
||||
|
||||
|
||||
def test_create_file_message_should_include_file_marker():
|
||||
entity = ToolEntity(
|
||||
identity=ToolIdentity(author="test", name="test tool", label=I18nObject(en_US="test tool"), provider="test"),
|
||||
parameters=[],
|
||||
description=None,
|
||||
has_runtime_parameters=False,
|
||||
)
|
||||
runtime = ToolRuntime(tenant_id="test_tool", invoke_from=InvokeFrom.EXPLORE)
|
||||
tool = WorkflowTool(
|
||||
workflow_app_id="",
|
||||
workflow_as_tool_id="",
|
||||
version="1",
|
||||
workflow_entities={},
|
||||
workflow_call_depth=1,
|
||||
entity=entity,
|
||||
runtime=runtime,
|
||||
)
|
||||
|
||||
file_obj = object()
|
||||
message = tool.create_file_message(file_obj) # type: ignore[arg-type]
|
||||
|
||||
assert message.type == ToolInvokeMessage.MessageType.FILE
|
||||
assert message.message.file_marker == "file_marker"
|
||||
assert message.meta == {"file": file_obj}
|
||||
|
||||
|
||||
def test_resolve_user_from_database_falls_back_to_end_user(monkeypatch: pytest.MonkeyPatch):
|
||||
"""Ensure worker context can resolve EndUser when Account is missing."""
|
||||
|
||||
|
||||
@ -198,6 +198,15 @@ class SubscriptionTestCase:
|
||||
description: str = ""
|
||||
|
||||
|
||||
class FakeRedisClient:
|
||||
"""Minimal fake Redis client for unit tests."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.publish = MagicMock()
|
||||
self.spublish = MagicMock()
|
||||
self.pubsub = MagicMock(return_value=MagicMock())
|
||||
|
||||
|
||||
class TestRedisSubscription:
|
||||
"""Test cases for the _RedisSubscription class."""
|
||||
|
||||
@ -619,10 +628,13 @@ class TestRedisSubscription:
|
||||
class TestRedisShardedSubscription:
|
||||
"""Test cases for the _RedisShardedSubscription class."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def patch_sharded_redis_type(self, monkeypatch):
|
||||
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self) -> MagicMock:
|
||||
client = MagicMock()
|
||||
return client
|
||||
def mock_redis_client(self) -> FakeRedisClient:
|
||||
return FakeRedisClient()
|
||||
|
||||
@pytest.fixture
|
||||
def mock_pubsub(self) -> MagicMock:
|
||||
@ -636,7 +648,7 @@ class TestRedisShardedSubscription:
|
||||
|
||||
@pytest.fixture
|
||||
def sharded_subscription(
|
||||
self, mock_pubsub: MagicMock, mock_redis_client: MagicMock
|
||||
self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
|
||||
) -> Generator[_RedisShardedSubscription, None, None]:
|
||||
"""Create a _RedisShardedSubscription instance for testing."""
|
||||
subscription = _RedisShardedSubscription(
|
||||
@ -657,7 +669,7 @@ class TestRedisShardedSubscription:
|
||||
|
||||
# ==================== Lifecycle Tests ====================
|
||||
|
||||
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
|
||||
def test_sharded_subscription_initialization(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
|
||||
"""Test that sharded subscription is properly initialized."""
|
||||
subscription = _RedisShardedSubscription(
|
||||
client=mock_redis_client,
|
||||
@ -970,7 +982,7 @@ class TestRedisShardedSubscription:
|
||||
],
|
||||
)
|
||||
def test_sharded_subscription_scenarios(
|
||||
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: MagicMock
|
||||
self, test_case: SubscriptionTestCase, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient
|
||||
):
|
||||
"""Test various sharded subscription scenarios using table-driven approach."""
|
||||
subscription = _RedisShardedSubscription(
|
||||
@ -1058,7 +1070,7 @@ class TestRedisShardedSubscription:
|
||||
# Close should still work
|
||||
sharded_subscription.close() # Should not raise
|
||||
|
||||
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
|
||||
def test_channel_name_variations(self, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
|
||||
"""Test various sharded channel name formats."""
|
||||
channel_names = [
|
||||
"simple",
|
||||
@ -1120,10 +1132,13 @@ class TestRedisSubscriptionCommon:
|
||||
"""Parameterized fixture providing subscription type and class."""
|
||||
return request.param
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def patch_sharded_redis_type(self, monkeypatch):
|
||||
monkeypatch.setattr("libs.broadcast_channel.redis.sharded_channel.Redis", FakeRedisClient)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_redis_client(self) -> MagicMock:
|
||||
client = MagicMock()
|
||||
return client
|
||||
def mock_redis_client(self) -> FakeRedisClient:
|
||||
return FakeRedisClient()
|
||||
|
||||
@pytest.fixture
|
||||
def mock_pubsub(self) -> MagicMock:
|
||||
@ -1140,7 +1155,7 @@ class TestRedisSubscriptionCommon:
|
||||
return pubsub
|
||||
|
||||
@pytest.fixture
|
||||
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: MagicMock):
|
||||
def subscription(self, subscription_params, mock_pubsub: MagicMock, mock_redis_client: FakeRedisClient):
|
||||
"""Create a subscription instance based on parameterized type."""
|
||||
subscription_type, subscription_class = subscription_params
|
||||
topic_name = f"test-{subscription_type}-topic"
|
||||
|
||||
@ -62,6 +62,9 @@ class FakeRepo:
|
||||
end_before: datetime.datetime,
|
||||
last_seen: tuple[datetime.datetime, str] | None,
|
||||
batch_size: int,
|
||||
run_types=None,
|
||||
tenant_ids=None,
|
||||
workflow_ids=None,
|
||||
) -> list[FakeRun]:
|
||||
if self.call_idx >= len(self.batches):
|
||||
return []
|
||||
|
||||
@ -17,7 +17,6 @@ from core.workflow.nodes.human_input.entities import (
|
||||
from core.workflow.nodes.human_input.enums import FormInputType, HumanInputFormKind, HumanInputFormStatus
|
||||
from models.human_input import RecipientType
|
||||
from services.human_input_service import Form, FormExpiredError, HumanInputService, InvalidFormDataError
|
||||
from tasks.app_generate.workflow_execute_task import WORKFLOW_BASED_APP_EXECUTION_QUEUE
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -88,7 +87,6 @@ def test_enqueue_resume_dispatches_task_for_workflow(mocker, mock_session_factor
|
||||
|
||||
resume_task.apply_async.assert_called_once()
|
||||
call_kwargs = resume_task.apply_async.call_args.kwargs
|
||||
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
|
||||
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"
|
||||
|
||||
|
||||
@ -130,7 +128,6 @@ def test_enqueue_resume_dispatches_task_for_advanced_chat(mocker, mock_session_f
|
||||
|
||||
resume_task.apply_async.assert_called_once()
|
||||
call_kwargs = resume_task.apply_async.call_args.kwargs
|
||||
assert call_kwargs["queue"] == WORKFLOW_BASED_APP_EXECUTION_QUEUE
|
||||
assert call_kwargs["kwargs"]["payload"]["workflow_run_id"] == "workflow-run-id"
|
||||
|
||||
|
||||
|
||||
84
api/uv.lock
generated
84
api/uv.lock
generated
@ -1237,49 +1237,47 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.3"
|
||||
version = "46.0.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1368,7 +1366,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.12.1"
|
||||
version = "1.13.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
@ -1594,7 +1592,7 @@ requires-dist = [
|
||||
{ name = "gevent", specifier = "~=25.9.1" },
|
||||
{ name = "gmpy2", specifier = "~=2.2.1" },
|
||||
{ name = "google-api-core", specifier = "==2.18.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.90.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.189.0" },
|
||||
{ name = "google-auth", specifier = "==2.29.0" },
|
||||
{ name = "google-auth-httplib2", specifier = "==0.2.0" },
|
||||
{ name = "google-cloud-aiplatform", specifier = "==1.49.0" },
|
||||
@ -2306,7 +2304,7 @@ grpc = [
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.90.0"
|
||||
version = "2.189.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "google-api-core" },
|
||||
@ -2315,9 +2313,9 @@ dependencies = [
|
||||
{ name = "httplib2" },
|
||||
{ name = "uritemplate" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/35/8b/d990f947c261304a5c1599d45717d02c27d46af5f23e1fee5dc19c8fa79d/google-api-python-client-2.90.0.tar.gz", hash = "sha256:cbcb3ba8be37c6806676a49df16ac412077e5e5dc7fa967941eff977b31fba03", size = 10891311, upload-time = "2023-06-20T16:29:25.008Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6f/f8/0783aeca3410ee053d4dd1fccafd85197847b8f84dd038e036634605d083/google_api_python_client-2.189.0.tar.gz", hash = "sha256:45f2d8559b5c895dde6ad3fb33de025f5cb2c197fa5862f18df7f5295a172741", size = 13979470, upload-time = "2026-02-03T19:24:55.432Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/39/03/209b5c36a621ae644dc7d4743746cd3b38b18e133f8779ecaf6b95cc01ce/google_api_python_client-2.90.0-py2.py3-none-any.whl", hash = "sha256:4a41ffb7797d4f28e44635fb1e7076240b741c6493e7c3233c0e4421cec7c913", size = 11379891, upload-time = "2023-06-20T16:29:19.532Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/44/3677ff27998214f2fa7957359da48da378a0ffff1bd0bdaba42e752bc13e/google_api_python_client-2.189.0-py3-none-any.whl", hash = "sha256:a258c09660a49c6159173f8bbece171278e917e104a11f0640b34751b79c8a1a", size = 14547633, upload-time = "2026-02-03T19:24:52.845Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@ -106,10 +106,10 @@ if [[ -z "${QUEUES}" ]]; then
|
||||
# Configure queues based on edition
|
||||
if [[ "${EDITION}" == "CLOUD" ]]; then
|
||||
# Cloud edition: separate queues for dataset and trigger tasks
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow_professional,workflow_team,workflow_sandbox,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
else
|
||||
# Community edition (SELF_HOSTED): dataset and workflow have separate queues
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention"
|
||||
QUEUES="dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor,retention,workflow_based_app_execution"
|
||||
fi
|
||||
|
||||
echo "No queues specified, using edition-based defaults: ${QUEUES}"
|
||||
|
||||
@ -62,6 +62,9 @@ LANG=C.UTF-8
|
||||
LC_ALL=C.UTF-8
|
||||
PYTHONIOENCODING=utf-8
|
||||
|
||||
# Set UV cache directory to avoid permission issues with non-existent home directory
|
||||
UV_CACHE_DIR=/tmp/.uv-cache
|
||||
|
||||
# ------------------------------
|
||||
# Server Configuration
|
||||
# ------------------------------
|
||||
@ -384,6 +387,8 @@ CELERY_USE_SENTINEL=false
|
||||
CELERY_SENTINEL_MASTER_NAME=
|
||||
CELERY_SENTINEL_PASSWORD=
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT=0.1
|
||||
# e.g. {"tasks.add": {"rate_limit": "10/s"}}
|
||||
CELERY_TASK_ANNOTATIONS=null
|
||||
|
||||
# ------------------------------
|
||||
# CORS Configuration
|
||||
@ -1068,6 +1073,8 @@ WORKFLOW_LOG_CLEANUP_ENABLED=false
|
||||
WORKFLOW_LOG_RETENTION_DAYS=30
|
||||
# Batch size for workflow log cleanup operations (default: 100)
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
|
||||
# Comma-separated list of workflow IDs to clean logs for
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS=
|
||||
|
||||
# Aliyun SLS Logstore Configuration
|
||||
# Aliyun Access Key ID
|
||||
@ -1518,6 +1525,7 @@ AMPLITUDE_API_KEY=
|
||||
# Sandbox expired records clean configuration
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD=21
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE=1000
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL=200
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS=30
|
||||
|
||||
|
||||
|
||||
@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.1
|
||||
image: langgenius/dify-web:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -16,6 +16,7 @@ x-shared-env: &shared-api-worker-env
|
||||
LANG: ${LANG:-C.UTF-8}
|
||||
LC_ALL: ${LC_ALL:-C.UTF-8}
|
||||
PYTHONIOENCODING: ${PYTHONIOENCODING:-utf-8}
|
||||
UV_CACHE_DIR: ${UV_CACHE_DIR:-/tmp/.uv-cache}
|
||||
LOG_LEVEL: ${LOG_LEVEL:-INFO}
|
||||
LOG_OUTPUT_FORMAT: ${LOG_OUTPUT_FORMAT:-text}
|
||||
LOG_FILE: ${LOG_FILE:-/app/logs/server.log}
|
||||
@ -105,6 +106,7 @@ x-shared-env: &shared-api-worker-env
|
||||
CELERY_SENTINEL_MASTER_NAME: ${CELERY_SENTINEL_MASTER_NAME:-}
|
||||
CELERY_SENTINEL_PASSWORD: ${CELERY_SENTINEL_PASSWORD:-}
|
||||
CELERY_SENTINEL_SOCKET_TIMEOUT: ${CELERY_SENTINEL_SOCKET_TIMEOUT:-0.1}
|
||||
CELERY_TASK_ANNOTATIONS: ${CELERY_TASK_ANNOTATIONS:-null}
|
||||
WEB_API_CORS_ALLOW_ORIGINS: ${WEB_API_CORS_ALLOW_ORIGINS:-*}
|
||||
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
|
||||
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
|
||||
@ -468,6 +470,7 @@ x-shared-env: &shared-api-worker-env
|
||||
WORKFLOW_LOG_CLEANUP_ENABLED: ${WORKFLOW_LOG_CLEANUP_ENABLED:-false}
|
||||
WORKFLOW_LOG_RETENTION_DAYS: ${WORKFLOW_LOG_RETENTION_DAYS:-30}
|
||||
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: ${WORKFLOW_LOG_CLEANUP_BATCH_SIZE:-100}
|
||||
WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS: ${WORKFLOW_LOG_CLEANUP_SPECIFIC_WORKFLOW_IDS:-}
|
||||
ALIYUN_SLS_ACCESS_KEY_ID: ${ALIYUN_SLS_ACCESS_KEY_ID:-}
|
||||
ALIYUN_SLS_ACCESS_KEY_SECRET: ${ALIYUN_SLS_ACCESS_KEY_SECRET:-}
|
||||
ALIYUN_SLS_ENDPOINT: ${ALIYUN_SLS_ENDPOINT:-}
|
||||
@ -682,6 +685,7 @@ x-shared-env: &shared-api-worker-env
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD: ${SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD:-21}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE:-1000}
|
||||
SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL: ${SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_MAX_INTERVAL:-200}
|
||||
SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS: ${SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS:-30}
|
||||
PUBSUB_REDIS_URL: ${PUBSUB_REDIS_URL:-}
|
||||
PUBSUB_REDIS_CHANNEL_TYPE: ${PUBSUB_REDIS_CHANNEL_TYPE:-pubsub}
|
||||
@ -712,7 +716,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -754,7 +758,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -793,7 +797,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.12.1
|
||||
image: langgenius/dify-api:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -823,7 +827,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.12.1
|
||||
image: langgenius/dify-web:1.13.0
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
8
sdks/nodejs-client/pnpm-lock.yaml
generated
8
sdks/nodejs-client/pnpm-lock.yaml
generated
@ -10,7 +10,7 @@ importers:
|
||||
dependencies:
|
||||
axios:
|
||||
specifier: ^1.13.2
|
||||
version: 1.13.2
|
||||
version: 1.13.5
|
||||
devDependencies:
|
||||
'@eslint/js':
|
||||
specifier: ^9.39.2
|
||||
@ -544,8 +544,8 @@ packages:
|
||||
asynckit@0.4.0:
|
||||
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
|
||||
|
||||
axios@1.13.2:
|
||||
resolution: {integrity: sha512-VPk9ebNqPcy5lRGuSlKx752IlDatOjT9paPlm8A7yOuW2Fbvp4X3JznJtT4f0GzGLLiWE9W8onz51SqLYwzGaA==}
|
||||
axios@1.13.5:
|
||||
resolution: {integrity: sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==}
|
||||
|
||||
balanced-match@1.0.2:
|
||||
resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==}
|
||||
@ -1677,7 +1677,7 @@ snapshots:
|
||||
|
||||
asynckit@0.4.0: {}
|
||||
|
||||
axios@1.13.2:
|
||||
axios@1.13.5:
|
||||
dependencies:
|
||||
follow-redirects: 1.15.11
|
||||
form-data: 4.0.5
|
||||
|
||||
991
web/__tests__/billing/billing-integration.test.tsx
Normal file
991
web/__tests__/billing/billing-integration.test.tsx
Normal file
@ -0,0 +1,991 @@
|
||||
import type { UsagePlanInfo, UsageResetInfo } from '@/app/components/billing/type'
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import * as React from 'react'
|
||||
import AnnotationFull from '@/app/components/billing/annotation-full'
|
||||
import AnnotationFullModal from '@/app/components/billing/annotation-full/modal'
|
||||
import AppsFull from '@/app/components/billing/apps-full-in-dialog'
|
||||
import Billing from '@/app/components/billing/billing-page'
|
||||
import { defaultPlan, NUM_INFINITE } from '@/app/components/billing/config'
|
||||
import HeaderBillingBtn from '@/app/components/billing/header-billing-btn'
|
||||
import PlanComp from '@/app/components/billing/plan'
|
||||
import PlanUpgradeModal from '@/app/components/billing/plan-upgrade-modal'
|
||||
import PriorityLabel from '@/app/components/billing/priority-label'
|
||||
import TriggerEventsLimitModal from '@/app/components/billing/trigger-events-limit-modal'
|
||||
import { Plan } from '@/app/components/billing/type'
|
||||
import UpgradeBtn from '@/app/components/billing/upgrade-btn'
|
||||
import VectorSpaceFull from '@/app/components/billing/vector-space-full'
|
||||
|
||||
let mockProviderCtx: Record<string, unknown> = {}
|
||||
let mockAppCtx: Record<string, unknown> = {}
|
||||
const mockSetShowPricingModal = vi.fn()
|
||||
const mockSetShowAccountSettingModal = vi.fn()
|
||||
|
||||
vi.mock('@/context/provider-context', () => ({
|
||||
useProviderContext: () => mockProviderCtx,
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => mockAppCtx,
|
||||
}))
|
||||
|
||||
vi.mock('@/context/modal-context', () => ({
|
||||
useModalContext: () => ({
|
||||
setShowPricingModal: mockSetShowPricingModal,
|
||||
}),
|
||||
useModalContextSelector: (selector: (s: Record<string, unknown>) => unknown) =>
|
||||
selector({
|
||||
setShowAccountSettingModal: mockSetShowAccountSettingModal,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useGetLanguage: () => 'en-US',
|
||||
useGetPricingPageLanguage: () => 'en',
|
||||
}))
|
||||
|
||||
// ─── Service mocks ──────────────────────────────────────────────────────────
|
||||
const mockRefetch = vi.fn().mockResolvedValue({ data: 'https://billing.example.com' })
|
||||
vi.mock('@/service/use-billing', () => ({
|
||||
useBillingUrl: () => ({
|
||||
data: 'https://billing.example.com',
|
||||
isFetching: false,
|
||||
refetch: mockRefetch,
|
||||
}),
|
||||
useBindPartnerStackInfo: () => ({ mutateAsync: vi.fn() }),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-education', () => ({
|
||||
useEducationVerify: () => ({
|
||||
mutateAsync: vi.fn().mockResolvedValue({ token: 'test-token' }),
|
||||
isPending: false,
|
||||
}),
|
||||
}))
|
||||
|
||||
// ─── Navigation mocks ───────────────────────────────────────────────────────
|
||||
const mockRouterPush = vi.fn()
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: () => ({ push: mockRouterPush }),
|
||||
usePathname: () => '/billing',
|
||||
useSearchParams: () => new URLSearchParams(),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-async-window-open', () => ({
|
||||
useAsyncWindowOpen: () => vi.fn(),
|
||||
}))
|
||||
|
||||
// ─── External component mocks ───────────────────────────────────────────────
|
||||
vi.mock('@/app/education-apply/verify-state-modal', () => ({
|
||||
default: ({ isShow }: { isShow: boolean }) =>
|
||||
isShow ? <div data-testid="verify-state-modal" /> : null,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/header/utils/util', () => ({
|
||||
mailToSupport: () => 'mailto:support@test.com',
|
||||
}))
|
||||
|
||||
// ─── Test data factories ────────────────────────────────────────────────────
|
||||
type PlanOverrides = {
|
||||
type?: string
|
||||
usage?: Partial<UsagePlanInfo>
|
||||
total?: Partial<UsagePlanInfo>
|
||||
reset?: Partial<UsageResetInfo>
|
||||
}
|
||||
|
||||
const createPlanData = (overrides: PlanOverrides = {}) => ({
|
||||
...defaultPlan,
|
||||
...overrides,
|
||||
type: overrides.type ?? defaultPlan.type,
|
||||
usage: { ...defaultPlan.usage, ...overrides.usage },
|
||||
total: { ...defaultPlan.total, ...overrides.total },
|
||||
reset: { ...defaultPlan.reset, ...overrides.reset },
|
||||
})
|
||||
|
||||
const setupProviderContext = (planOverrides: PlanOverrides = {}, extra: Record<string, unknown> = {}) => {
|
||||
mockProviderCtx = {
|
||||
plan: createPlanData(planOverrides),
|
||||
enableBilling: true,
|
||||
isFetchedPlan: true,
|
||||
enableEducationPlan: false,
|
||||
isEducationAccount: false,
|
||||
allowRefreshEducationVerify: false,
|
||||
...extra,
|
||||
}
|
||||
}
|
||||
|
||||
const setupAppContext = (overrides: Record<string, unknown> = {}) => {
|
||||
mockAppCtx = {
|
||||
isCurrentWorkspaceManager: true,
|
||||
userProfile: { email: 'test@example.com' },
|
||||
langGeniusVersionInfo: { current_version: '1.0.0' },
|
||||
...overrides,
|
||||
}
|
||||
}
|
||||
|
||||
// Vitest hoists vi.mock() calls, so imports above will use mocked modules
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 1. Billing Page + Plan Component Integration
|
||||
// Tests the full data flow: BillingPage → PlanComp → UsageInfo → ProgressBar
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
describe('Billing Page + Plan Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
setupAppContext()
|
||||
})
|
||||
|
||||
// Verify that the billing page renders PlanComp with all 7 usage items
|
||||
describe('Rendering complete plan information', () => {
|
||||
it('should display all 7 usage metrics for sandbox plan', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: {
|
||||
buildApps: 3,
|
||||
teamMembers: 1,
|
||||
documentsUploadQuota: 10,
|
||||
vectorSpace: 20,
|
||||
annotatedResponse: 5,
|
||||
triggerEvents: 1000,
|
||||
apiRateLimit: 2000,
|
||||
},
|
||||
total: {
|
||||
buildApps: 5,
|
||||
teamMembers: 1,
|
||||
documentsUploadQuota: 50,
|
||||
vectorSpace: 50,
|
||||
annotatedResponse: 10,
|
||||
triggerEvents: 3000,
|
||||
apiRateLimit: 5000,
|
||||
},
|
||||
})
|
||||
|
||||
render(<Billing />)
|
||||
|
||||
// Plan name
|
||||
expect(screen.getByText(/plans\.sandbox\.name/i)).toBeInTheDocument()
|
||||
|
||||
// All 7 usage items should be visible
|
||||
expect(screen.getByText(/usagePage\.buildApps/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/usagePage\.teamMembers/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/usagePage\.documentsUploadQuota/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/usagePage\.vectorSpace/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/usagePage\.annotationQuota/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/usagePage\.triggerEvents/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/plansCommon\.apiRateLimit/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display usage values as "usage / total" format', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { buildApps: 3, teamMembers: 1 },
|
||||
total: { buildApps: 5, teamMembers: 1 },
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// Check that the buildApps usage fraction "3 / 5" is rendered
|
||||
const usageContainers = screen.getAllByText('3')
|
||||
expect(usageContainers.length).toBeGreaterThan(0)
|
||||
const totalContainers = screen.getAllByText('5')
|
||||
expect(totalContainers.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should show "unlimited" for infinite quotas (professional API rate limit)', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.professional,
|
||||
total: { apiRateLimit: NUM_INFINITE },
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.getByText(/plansCommon\.unlimited/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display reset days for trigger events when applicable', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.professional,
|
||||
total: { triggerEvents: 20000 },
|
||||
reset: { triggerEvents: 7 },
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// Reset text should be visible
|
||||
expect(screen.getByText(/usagePage\.resetsIn/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Verify billing URL button visibility and behavior
|
||||
describe('Billing URL button', () => {
|
||||
it('should show billing button when enableBilling and isCurrentWorkspaceManager', () => {
|
||||
setupProviderContext({ type: Plan.sandbox })
|
||||
setupAppContext({ isCurrentWorkspaceManager: true })
|
||||
|
||||
render(<Billing />)
|
||||
|
||||
expect(screen.getByText(/viewBillingTitle/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/viewBillingAction/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide billing button when user is not workspace manager', () => {
|
||||
setupProviderContext({ type: Plan.sandbox })
|
||||
setupAppContext({ isCurrentWorkspaceManager: false })
|
||||
|
||||
render(<Billing />)
|
||||
|
||||
expect(screen.queryByText(/viewBillingTitle/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide billing button when billing is disabled', () => {
|
||||
setupProviderContext({ type: Plan.sandbox }, { enableBilling: false })
|
||||
|
||||
render(<Billing />)
|
||||
|
||||
expect(screen.queryByText(/viewBillingTitle/i)).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 2. Plan Type Display Integration
|
||||
// Tests that different plan types render correct visual elements
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
describe('Plan Type Display Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
setupAppContext()
|
||||
})
|
||||
|
||||
it('should render sandbox plan with upgrade button (premium badge)', () => {
|
||||
setupProviderContext({ type: Plan.sandbox })
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.getByText(/plans\.sandbox\.name/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/plans\.sandbox\.for/i)).toBeInTheDocument()
|
||||
// Sandbox shows premium badge upgrade button (not plain)
|
||||
expect(screen.getByText(/upgradeBtn\.encourageShort/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render professional plan with plain upgrade button', () => {
|
||||
setupProviderContext({ type: Plan.professional })
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.getByText(/plans\.professional\.name/i)).toBeInTheDocument()
|
||||
// Professional shows plain button because it's not team
|
||||
expect(screen.getByText(/upgradeBtn\.encourageShort/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render team plan with plain-style upgrade button', () => {
|
||||
setupProviderContext({ type: Plan.team })
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.getByText(/plans\.team\.name/i)).toBeInTheDocument()
|
||||
// Team plan has isPlain=true, so shows "upgradeBtn.plain" text
|
||||
expect(screen.getByText(/upgradeBtn\.plain/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render upgrade button for enterprise plan', () => {
|
||||
setupProviderContext({ type: Plan.enterprise })
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.queryByText(/upgradeBtn\.encourageShort/i)).not.toBeInTheDocument()
|
||||
expect(screen.queryByText(/upgradeBtn\.plain/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show education verify button when enableEducationPlan is true and not yet verified', () => {
|
||||
setupProviderContext({ type: Plan.sandbox }, {
|
||||
enableEducationPlan: true,
|
||||
isEducationAccount: false,
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.getByText(/toVerified/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 3. Upgrade Flow Integration
|
||||
// Tests the flow: UpgradeBtn click → setShowPricingModal
|
||||
// and PlanUpgradeModal → close + trigger pricing
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
describe('Upgrade Flow Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
setupAppContext()
|
||||
setupProviderContext({ type: Plan.sandbox })
|
||||
})
|
||||
|
||||
// UpgradeBtn triggers pricing modal
|
||||
describe('UpgradeBtn triggers pricing modal', () => {
|
||||
it('should call setShowPricingModal when clicking premium badge upgrade button', async () => {
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<UpgradeBtn />)
|
||||
|
||||
const badgeText = screen.getByText(/upgradeBtn\.encourage/i)
|
||||
await user.click(badgeText)
|
||||
|
||||
expect(mockSetShowPricingModal).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should call setShowPricingModal when clicking plain upgrade button', async () => {
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<UpgradeBtn isPlain />)
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
expect(mockSetShowPricingModal).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should use custom onClick when provided instead of setShowPricingModal', async () => {
|
||||
const customOnClick = vi.fn()
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<UpgradeBtn onClick={customOnClick} />)
|
||||
|
||||
const badgeText = screen.getByText(/upgradeBtn\.encourage/i)
|
||||
await user.click(badgeText)
|
||||
|
||||
expect(customOnClick).toHaveBeenCalledTimes(1)
|
||||
expect(mockSetShowPricingModal).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should fire gtag event with loc parameter when clicked', async () => {
|
||||
const mockGtag = vi.fn()
|
||||
;(window as unknown as Record<string, unknown>).gtag = mockGtag
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<UpgradeBtn loc="billing-page" />)
|
||||
|
||||
const badgeText = screen.getByText(/upgradeBtn\.encourage/i)
|
||||
await user.click(badgeText)
|
||||
|
||||
expect(mockGtag).toHaveBeenCalledWith('event', 'click_upgrade_btn', { loc: 'billing-page' })
|
||||
delete (window as unknown as Record<string, unknown>).gtag
|
||||
})
|
||||
})
|
||||
|
||||
// PlanUpgradeModal integration: close modal and trigger pricing
|
||||
describe('PlanUpgradeModal upgrade flow', () => {
|
||||
it('should call onClose and setShowPricingModal when clicking upgrade button in modal', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onClose = vi.fn()
|
||||
|
||||
render(
|
||||
<PlanUpgradeModal
|
||||
show={true}
|
||||
onClose={onClose}
|
||||
title="Upgrade Required"
|
||||
description="You need a better plan"
|
||||
/>,
|
||||
)
|
||||
|
||||
// The modal should show title and description
|
||||
expect(screen.getByText('Upgrade Required')).toBeInTheDocument()
|
||||
expect(screen.getByText('You need a better plan')).toBeInTheDocument()
|
||||
|
||||
// Click the upgrade button inside the modal
|
||||
const upgradeText = screen.getByText(/triggerLimitModal\.upgrade/i)
|
||||
await user.click(upgradeText)
|
||||
|
||||
// Should close the current modal first
|
||||
expect(onClose).toHaveBeenCalledTimes(1)
|
||||
// Then open pricing modal
|
||||
expect(mockSetShowPricingModal).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should call onClose and custom onUpgrade when provided', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onClose = vi.fn()
|
||||
const onUpgrade = vi.fn()
|
||||
|
||||
render(
|
||||
<PlanUpgradeModal
|
||||
show={true}
|
||||
onClose={onClose}
|
||||
onUpgrade={onUpgrade}
|
||||
title="Test"
|
||||
description="Test"
|
||||
/>,
|
||||
)
|
||||
|
||||
const upgradeText = screen.getByText(/triggerLimitModal\.upgrade/i)
|
||||
await user.click(upgradeText)
|
||||
|
||||
expect(onClose).toHaveBeenCalledTimes(1)
|
||||
expect(onUpgrade).toHaveBeenCalledTimes(1)
|
||||
// Custom onUpgrade replaces default setShowPricingModal
|
||||
expect(mockSetShowPricingModal).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call onClose when clicking dismiss button', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onClose = vi.fn()
|
||||
|
||||
render(
|
||||
<PlanUpgradeModal
|
||||
show={true}
|
||||
onClose={onClose}
|
||||
title="Test"
|
||||
description="Test"
|
||||
/>,
|
||||
)
|
||||
|
||||
const dismissBtn = screen.getByText(/triggerLimitModal\.dismiss/i)
|
||||
await user.click(dismissBtn)
|
||||
|
||||
expect(onClose).toHaveBeenCalledTimes(1)
|
||||
expect(mockSetShowPricingModal).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
// Upgrade from PlanComp: clicking upgrade button in plan component triggers pricing
|
||||
describe('PlanComp upgrade button triggers pricing', () => {
|
||||
it('should open pricing modal when clicking upgrade in sandbox plan', async () => {
|
||||
const user = userEvent.setup()
|
||||
setupProviderContext({ type: Plan.sandbox })
|
||||
|
||||
render(<PlanComp loc="test-loc" />)
|
||||
|
||||
const upgradeText = screen.getByText(/upgradeBtn\.encourageShort/i)
|
||||
await user.click(upgradeText)
|
||||
|
||||
expect(mockSetShowPricingModal).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 4. Capacity Full Components Integration
|
||||
// Tests AppsFull, VectorSpaceFull, AnnotationFull, TriggerEventsLimitModal
|
||||
// with real child components (UsageInfo, ProgressBar, UpgradeBtn)
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
describe('Capacity Full Components Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
setupAppContext()
|
||||
})
|
||||
|
||||
// AppsFull renders with correct messaging and components
|
||||
describe('AppsFull integration', () => {
|
||||
it('should display upgrade tip and upgrade button for sandbox plan at capacity', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { buildApps: 5 },
|
||||
total: { buildApps: 5 },
|
||||
})
|
||||
|
||||
render(<AppsFull loc="test" />)
|
||||
|
||||
// Should show "full" tip
|
||||
expect(screen.getByText(/apps\.fullTip1$/i)).toBeInTheDocument()
|
||||
// Should show upgrade button
|
||||
expect(screen.getByText(/upgradeBtn\.encourageShort/i)).toBeInTheDocument()
|
||||
// Should show usage/total fraction "5/5"
|
||||
expect(screen.getByText(/5\/5/)).toBeInTheDocument()
|
||||
// Should have a progress bar rendered
|
||||
expect(screen.getByTestId('billing-progress-bar')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display upgrade tip and upgrade button for professional plan', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.professional,
|
||||
usage: { buildApps: 48 },
|
||||
total: { buildApps: 50 },
|
||||
})
|
||||
|
||||
render(<AppsFull loc="test" />)
|
||||
|
||||
expect(screen.getByText(/apps\.fullTip1$/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/upgradeBtn\.encourageShort/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display contact tip and contact button for team plan', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.team,
|
||||
usage: { buildApps: 200 },
|
||||
total: { buildApps: 200 },
|
||||
})
|
||||
|
||||
render(<AppsFull loc="test" />)
|
||||
|
||||
// Team plan shows different tip
|
||||
expect(screen.getByText(/apps\.fullTip2$/i)).toBeInTheDocument()
|
||||
// Team plan shows "Contact Us" instead of upgrade
|
||||
expect(screen.getByText(/apps\.contactUs/i)).toBeInTheDocument()
|
||||
expect(screen.queryByText(/upgradeBtn\.encourageShort/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render progress bar with correct color based on usage percentage', () => {
|
||||
// 100% usage should show error color
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { buildApps: 5 },
|
||||
total: { buildApps: 5 },
|
||||
})
|
||||
|
||||
render(<AppsFull loc="test" />)
|
||||
|
||||
const progressBar = screen.getByTestId('billing-progress-bar')
|
||||
expect(progressBar).toHaveClass('bg-components-progress-error-progress')
|
||||
})
|
||||
})
|
||||
|
||||
// VectorSpaceFull renders with VectorSpaceInfo and UpgradeBtn
|
||||
describe('VectorSpaceFull integration', () => {
|
||||
it('should display full tip, upgrade button, and vector space usage info', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { vectorSpace: 50 },
|
||||
total: { vectorSpace: 50 },
|
||||
})
|
||||
|
||||
render(<VectorSpaceFull />)
|
||||
|
||||
// Should show full tip
|
||||
expect(screen.getByText(/vectorSpace\.fullTip/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/vectorSpace\.fullSolution/i)).toBeInTheDocument()
|
||||
// Should show upgrade button
|
||||
expect(screen.getByText(/upgradeBtn\.encourage$/i)).toBeInTheDocument()
|
||||
// Should show vector space usage info
|
||||
expect(screen.getByText(/usagePage\.vectorSpace/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// AnnotationFull renders with Usage component and UpgradeBtn
|
||||
describe('AnnotationFull integration', () => {
|
||||
it('should display annotation full tip, upgrade button, and usage info', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { annotatedResponse: 10 },
|
||||
total: { annotatedResponse: 10 },
|
||||
})
|
||||
|
||||
render(<AnnotationFull />)
|
||||
|
||||
expect(screen.getByText(/annotatedResponse\.fullTipLine1/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/annotatedResponse\.fullTipLine2/i)).toBeInTheDocument()
|
||||
// UpgradeBtn rendered
|
||||
expect(screen.getByText(/upgradeBtn\.encourage$/i)).toBeInTheDocument()
|
||||
// Usage component should show annotation quota
|
||||
expect(screen.getByText(/annotatedResponse\.quotaTitle/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// AnnotationFullModal shows modal with usage and upgrade button
|
||||
describe('AnnotationFullModal integration', () => {
|
||||
it('should render modal with annotation info and upgrade button when show is true', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { annotatedResponse: 10 },
|
||||
total: { annotatedResponse: 10 },
|
||||
})
|
||||
|
||||
render(<AnnotationFullModal show={true} onHide={vi.fn()} />)
|
||||
|
||||
expect(screen.getByText(/annotatedResponse\.fullTipLine1/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/annotatedResponse\.quotaTitle/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/upgradeBtn\.encourage$/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render content when show is false', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { annotatedResponse: 10 },
|
||||
total: { annotatedResponse: 10 },
|
||||
})
|
||||
|
||||
render(<AnnotationFullModal show={false} onHide={vi.fn()} />)
|
||||
|
||||
expect(screen.queryByText(/annotatedResponse\.fullTipLine1/i)).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// TriggerEventsLimitModal renders PlanUpgradeModal with embedded UsageInfo
|
||||
describe('TriggerEventsLimitModal integration', () => {
|
||||
it('should display trigger limit title, usage info, and upgrade button', () => {
|
||||
setupProviderContext({ type: Plan.professional })
|
||||
|
||||
render(
|
||||
<TriggerEventsLimitModal
|
||||
show={true}
|
||||
onClose={vi.fn()}
|
||||
onUpgrade={vi.fn()}
|
||||
usage={18000}
|
||||
total={20000}
|
||||
resetInDays={5}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Modal title and description
|
||||
expect(screen.getByText(/triggerLimitModal\.title/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/triggerLimitModal\.description/i)).toBeInTheDocument()
|
||||
// Embedded UsageInfo with trigger events data
|
||||
expect(screen.getByText(/triggerLimitModal\.usageTitle/i)).toBeInTheDocument()
|
||||
expect(screen.getByText('18000')).toBeInTheDocument()
|
||||
expect(screen.getByText('20000')).toBeInTheDocument()
|
||||
// Reset info
|
||||
expect(screen.getByText(/usagePage\.resetsIn/i)).toBeInTheDocument()
|
||||
// Upgrade and dismiss buttons
|
||||
expect(screen.getByText(/triggerLimitModal\.upgrade/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/triggerLimitModal\.dismiss/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call onClose and onUpgrade when clicking upgrade', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onClose = vi.fn()
|
||||
const onUpgrade = vi.fn()
|
||||
setupProviderContext({ type: Plan.professional })
|
||||
|
||||
render(
|
||||
<TriggerEventsLimitModal
|
||||
show={true}
|
||||
onClose={onClose}
|
||||
onUpgrade={onUpgrade}
|
||||
usage={20000}
|
||||
total={20000}
|
||||
/>,
|
||||
)
|
||||
|
||||
const upgradeBtn = screen.getByText(/triggerLimitModal\.upgrade/i)
|
||||
await user.click(upgradeBtn)
|
||||
|
||||
expect(onClose).toHaveBeenCalledTimes(1)
|
||||
expect(onUpgrade).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 5. Header Billing Button Integration
|
||||
// Tests HeaderBillingBtn behavior for different plan states
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
describe('Header Billing Button Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
setupAppContext()
|
||||
})
|
||||
|
||||
it('should render UpgradeBtn (premium badge) for sandbox plan', () => {
|
||||
setupProviderContext({ type: Plan.sandbox })
|
||||
|
||||
render(<HeaderBillingBtn />)
|
||||
|
||||
expect(screen.getByText(/upgradeBtn\.encourageShort/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render "pro" badge for professional plan', () => {
|
||||
setupProviderContext({ type: Plan.professional })
|
||||
|
||||
render(<HeaderBillingBtn />)
|
||||
|
||||
expect(screen.getByText('pro')).toBeInTheDocument()
|
||||
expect(screen.queryByText(/upgradeBtn/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render "team" badge for team plan', () => {
|
||||
setupProviderContext({ type: Plan.team })
|
||||
|
||||
render(<HeaderBillingBtn />)
|
||||
|
||||
expect(screen.getByText('team')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should return null when billing is disabled', () => {
|
||||
setupProviderContext({ type: Plan.sandbox }, { enableBilling: false })
|
||||
|
||||
const { container } = render(<HeaderBillingBtn />)
|
||||
|
||||
expect(container.innerHTML).toBe('')
|
||||
})
|
||||
|
||||
it('should return null when plan is not fetched yet', () => {
|
||||
setupProviderContext({ type: Plan.sandbox }, { isFetchedPlan: false })
|
||||
|
||||
const { container } = render(<HeaderBillingBtn />)
|
||||
|
||||
expect(container.innerHTML).toBe('')
|
||||
})
|
||||
|
||||
it('should call onClick when clicking pro/team badge in non-display-only mode', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onClick = vi.fn()
|
||||
setupProviderContext({ type: Plan.professional })
|
||||
|
||||
render(<HeaderBillingBtn onClick={onClick} />)
|
||||
|
||||
await user.click(screen.getByText('pro'))
|
||||
|
||||
expect(onClick).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should not call onClick when isDisplayOnly is true', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onClick = vi.fn()
|
||||
setupProviderContext({ type: Plan.professional })
|
||||
|
||||
render(<HeaderBillingBtn onClick={onClick} isDisplayOnly />)
|
||||
|
||||
await user.click(screen.getByText('pro'))
|
||||
|
||||
expect(onClick).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 6. PriorityLabel Integration
|
||||
// Tests priority badge display for different plan types
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
describe('PriorityLabel Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
setupAppContext()
|
||||
})
|
||||
|
||||
it('should display "standard" priority for sandbox plan', () => {
|
||||
setupProviderContext({ type: Plan.sandbox })
|
||||
|
||||
render(<PriorityLabel />)
|
||||
|
||||
expect(screen.getByText(/plansCommon\.priority\.standard/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display "priority" for professional plan with icon', () => {
|
||||
setupProviderContext({ type: Plan.professional })
|
||||
|
||||
const { container } = render(<PriorityLabel />)
|
||||
|
||||
expect(screen.getByText(/plansCommon\.priority\.priority/i)).toBeInTheDocument()
|
||||
// Professional plan should show the priority icon
|
||||
expect(container.querySelector('svg')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display "top-priority" for team plan with icon', () => {
|
||||
setupProviderContext({ type: Plan.team })
|
||||
|
||||
const { container } = render(<PriorityLabel />)
|
||||
|
||||
expect(screen.getByText(/plansCommon\.priority\.top-priority/i)).toBeInTheDocument()
|
||||
expect(container.querySelector('svg')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display "top-priority" for enterprise plan', () => {
|
||||
setupProviderContext({ type: Plan.enterprise })
|
||||
|
||||
render(<PriorityLabel />)
|
||||
|
||||
expect(screen.getByText(/plansCommon\.priority\.top-priority/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 7. Usage Display Edge Cases
|
||||
// Tests storage mode, threshold logic, and progress bar color integration
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
describe('Usage Display Edge Cases', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
setupAppContext()
|
||||
})
|
||||
|
||||
// Vector space storage mode behavior
|
||||
describe('VectorSpace storage mode in PlanComp', () => {
|
||||
it('should show "< 50" for sandbox plan with low vector space usage', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { vectorSpace: 10 },
|
||||
total: { vectorSpace: 50 },
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// Storage mode: usage below threshold shows "< 50"
|
||||
expect(screen.getByText(/</)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show indeterminate progress bar for usage below threshold', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { vectorSpace: 10 },
|
||||
total: { vectorSpace: 50 },
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// Should have an indeterminate progress bar
|
||||
expect(screen.getByTestId('billing-progress-bar-indeterminate')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show actual usage for pro plan above threshold', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.professional,
|
||||
usage: { vectorSpace: 1024 },
|
||||
total: { vectorSpace: 5120 },
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// Pro plan above threshold shows actual value
|
||||
expect(screen.getByText('1024')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Progress bar color logic through real components
|
||||
describe('Progress bar color reflects usage severity', () => {
|
||||
it('should show normal color for low usage percentage', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { buildApps: 1 },
|
||||
total: { buildApps: 5 },
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// 20% usage - normal color
|
||||
const progressBars = screen.getAllByTestId('billing-progress-bar')
|
||||
// At least one should have the normal progress color
|
||||
const hasNormalColor = progressBars.some(bar =>
|
||||
bar.classList.contains('bg-components-progress-bar-progress-solid'),
|
||||
)
|
||||
expect(hasNormalColor).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
// Reset days calculation in PlanComp
|
||||
describe('Reset days integration', () => {
|
||||
it('should not show reset for sandbox trigger events (no reset_date)', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
total: { triggerEvents: 3000 },
|
||||
reset: { triggerEvents: null },
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// Find the trigger events section - should not have reset text
|
||||
const triggerSection = screen.getByText(/usagePage\.triggerEvents/i)
|
||||
const parent = triggerSection.closest('[class*="flex flex-col"]')
|
||||
// No reset text should appear (sandbox doesn't show reset for triggerEvents)
|
||||
expect(parent?.textContent).not.toContain('usagePage.resetsIn')
|
||||
})
|
||||
|
||||
it('should show reset for professional trigger events with reset date', () => {
|
||||
setupProviderContext({
|
||||
type: Plan.professional,
|
||||
total: { triggerEvents: 20000 },
|
||||
reset: { triggerEvents: 14 },
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// Professional plan with finite triggerEvents should show reset
|
||||
const resetTexts = screen.getAllByText(/usagePage\.resetsIn/i)
|
||||
expect(resetTexts.length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
// 8. Cross-Component Upgrade Flow (End-to-End)
|
||||
// Tests the complete chain: capacity alert → upgrade button → pricing
|
||||
// ═══════════════════════════════════════════════════════════════════════════
|
||||
describe('Cross-Component Upgrade Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
setupAppContext()
|
||||
})
|
||||
|
||||
it('should trigger pricing from AppsFull upgrade button', async () => {
|
||||
const user = userEvent.setup()
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { buildApps: 5 },
|
||||
total: { buildApps: 5 },
|
||||
})
|
||||
|
||||
render(<AppsFull loc="app-create" />)
|
||||
|
||||
const upgradeText = screen.getByText(/upgradeBtn\.encourageShort/i)
|
||||
await user.click(upgradeText)
|
||||
|
||||
expect(mockSetShowPricingModal).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should trigger pricing from VectorSpaceFull upgrade button', async () => {
|
||||
const user = userEvent.setup()
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { vectorSpace: 50 },
|
||||
total: { vectorSpace: 50 },
|
||||
})
|
||||
|
||||
render(<VectorSpaceFull />)
|
||||
|
||||
const upgradeText = screen.getByText(/upgradeBtn\.encourage$/i)
|
||||
await user.click(upgradeText)
|
||||
|
||||
expect(mockSetShowPricingModal).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should trigger pricing from AnnotationFull upgrade button', async () => {
|
||||
const user = userEvent.setup()
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { annotatedResponse: 10 },
|
||||
total: { annotatedResponse: 10 },
|
||||
})
|
||||
|
||||
render(<AnnotationFull />)
|
||||
|
||||
const upgradeText = screen.getByText(/upgradeBtn\.encourage$/i)
|
||||
await user.click(upgradeText)
|
||||
|
||||
expect(mockSetShowPricingModal).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should trigger pricing from TriggerEventsLimitModal through PlanUpgradeModal', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onClose = vi.fn()
|
||||
setupProviderContext({ type: Plan.professional })
|
||||
|
||||
render(
|
||||
<TriggerEventsLimitModal
|
||||
show={true}
|
||||
onClose={onClose}
|
||||
onUpgrade={vi.fn()}
|
||||
usage={20000}
|
||||
total={20000}
|
||||
/>,
|
||||
)
|
||||
|
||||
// TriggerEventsLimitModal passes onUpgrade to PlanUpgradeModal
|
||||
// PlanUpgradeModal's upgrade button calls onClose then onUpgrade
|
||||
const upgradeBtn = screen.getByText(/triggerLimitModal\.upgrade/i)
|
||||
await user.click(upgradeBtn)
|
||||
|
||||
expect(onClose).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should trigger pricing from AnnotationFullModal upgrade button', async () => {
|
||||
const user = userEvent.setup()
|
||||
setupProviderContext({
|
||||
type: Plan.sandbox,
|
||||
usage: { annotatedResponse: 10 },
|
||||
total: { annotatedResponse: 10 },
|
||||
})
|
||||
|
||||
render(<AnnotationFullModal show={true} onHide={vi.fn()} />)
|
||||
|
||||
const upgradeText = screen.getByText(/upgradeBtn\.encourage$/i)
|
||||
await user.click(upgradeText)
|
||||
|
||||
expect(mockSetShowPricingModal).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
296
web/__tests__/billing/cloud-plan-payment-flow.test.tsx
Normal file
296
web/__tests__/billing/cloud-plan-payment-flow.test.tsx
Normal file
@ -0,0 +1,296 @@
|
||||
/**
|
||||
* Integration test: Cloud Plan Payment Flow
|
||||
*
|
||||
* Tests the payment flow for cloud plan items:
|
||||
* CloudPlanItem → Button click → permission check → fetch URL → redirect
|
||||
*
|
||||
* Covers plan comparison, downgrade prevention, monthly/yearly pricing,
|
||||
* and workspace manager permission enforcement.
|
||||
*/
|
||||
import type { BasicPlan } from '@/app/components/billing/type'
|
||||
import { cleanup, render, screen, waitFor } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import * as React from 'react'
|
||||
import { ALL_PLANS } from '@/app/components/billing/config'
|
||||
import { PlanRange } from '@/app/components/billing/pricing/plan-switcher/plan-range-switcher'
|
||||
import CloudPlanItem from '@/app/components/billing/pricing/plans/cloud-plan-item'
|
||||
import { Plan } from '@/app/components/billing/type'
|
||||
|
||||
// ─── Mock state ──────────────────────────────────────────────────────────────
|
||||
let mockAppCtx: Record<string, unknown> = {}
|
||||
const mockFetchSubscriptionUrls = vi.fn()
|
||||
const mockInvoices = vi.fn()
|
||||
const mockOpenAsyncWindow = vi.fn()
|
||||
const mockToastNotify = vi.fn()
|
||||
|
||||
// ─── Context mocks ───────────────────────────────────────────────────────────
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => mockAppCtx,
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useGetLanguage: () => 'en-US',
|
||||
}))
|
||||
|
||||
// ─── Service mocks ───────────────────────────────────────────────────────────
|
||||
vi.mock('@/service/billing', () => ({
|
||||
fetchSubscriptionUrls: (...args: unknown[]) => mockFetchSubscriptionUrls(...args),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/client', () => ({
|
||||
consoleClient: {
|
||||
billing: {
|
||||
invoices: () => mockInvoices(),
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-async-window-open', () => ({
|
||||
useAsyncWindowOpen: () => mockOpenAsyncWindow,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
default: { notify: (args: unknown) => mockToastNotify(args) },
|
||||
}))
|
||||
|
||||
// ─── Navigation mocks ───────────────────────────────────────────────────────
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: () => ({ push: vi.fn() }),
|
||||
usePathname: () => '/billing',
|
||||
useSearchParams: () => new URLSearchParams(),
|
||||
}))
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
const setupAppContext = (overrides: Record<string, unknown> = {}) => {
|
||||
mockAppCtx = {
|
||||
isCurrentWorkspaceManager: true,
|
||||
...overrides,
|
||||
}
|
||||
}
|
||||
|
||||
type RenderCloudPlanItemOptions = {
|
||||
currentPlan?: BasicPlan
|
||||
plan?: BasicPlan
|
||||
planRange?: PlanRange
|
||||
canPay?: boolean
|
||||
}
|
||||
|
||||
const renderCloudPlanItem = ({
|
||||
currentPlan = Plan.sandbox,
|
||||
plan = Plan.professional,
|
||||
planRange = PlanRange.monthly,
|
||||
canPay = true,
|
||||
}: RenderCloudPlanItemOptions = {}) => {
|
||||
return render(
|
||||
<CloudPlanItem
|
||||
currentPlan={currentPlan}
|
||||
plan={plan}
|
||||
planRange={planRange}
|
||||
canPay={canPay}
|
||||
/>,
|
||||
)
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════════
|
||||
describe('Cloud Plan Payment Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
setupAppContext()
|
||||
mockFetchSubscriptionUrls.mockResolvedValue({ url: 'https://pay.example.com/checkout' })
|
||||
mockInvoices.mockResolvedValue({ url: 'https://billing.example.com/invoices' })
|
||||
})
|
||||
|
||||
// ─── 1. Plan Display ────────────────────────────────────────────────────
|
||||
describe('Plan display', () => {
|
||||
it('should render plan name and description', () => {
|
||||
renderCloudPlanItem({ plan: Plan.professional })
|
||||
|
||||
expect(screen.getByText(/plans\.professional\.name/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/plans\.professional\.description/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show "Free" price for sandbox plan', () => {
|
||||
renderCloudPlanItem({ plan: Plan.sandbox })
|
||||
|
||||
expect(screen.getByText(/plansCommon\.free/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show monthly price for paid plans', () => {
|
||||
renderCloudPlanItem({ plan: Plan.professional, planRange: PlanRange.monthly })
|
||||
|
||||
expect(screen.getByText(`$${ALL_PLANS.professional.price}`)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show yearly discounted price (10 months) and strikethrough original (12 months)', () => {
|
||||
renderCloudPlanItem({ plan: Plan.professional, planRange: PlanRange.yearly })
|
||||
|
||||
const yearlyPrice = ALL_PLANS.professional.price * 10
|
||||
const originalPrice = ALL_PLANS.professional.price * 12
|
||||
|
||||
expect(screen.getByText(`$${yearlyPrice}`)).toBeInTheDocument()
|
||||
expect(screen.getByText(`$${originalPrice}`)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show "most popular" badge for professional plan', () => {
|
||||
renderCloudPlanItem({ plan: Plan.professional })
|
||||
|
||||
expect(screen.getByText(/plansCommon\.mostPopular/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not show "most popular" badge for sandbox or team plans', () => {
|
||||
const { unmount } = renderCloudPlanItem({ plan: Plan.sandbox })
|
||||
expect(screen.queryByText(/plansCommon\.mostPopular/i)).not.toBeInTheDocument()
|
||||
unmount()
|
||||
|
||||
renderCloudPlanItem({ plan: Plan.team })
|
||||
expect(screen.queryByText(/plansCommon\.mostPopular/i)).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 2. Button Text Logic ───────────────────────────────────────────────
|
||||
describe('Button text logic', () => {
|
||||
it('should show "Current Plan" when plan matches current plan', () => {
|
||||
renderCloudPlanItem({ currentPlan: Plan.professional, plan: Plan.professional })
|
||||
|
||||
expect(screen.getByText(/plansCommon\.currentPlan/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show "Start for Free" for sandbox plan when not current', () => {
|
||||
renderCloudPlanItem({ currentPlan: Plan.professional, plan: Plan.sandbox })
|
||||
|
||||
expect(screen.getByText(/plansCommon\.startForFree/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show "Start Building" for professional plan when not current', () => {
|
||||
renderCloudPlanItem({ currentPlan: Plan.sandbox, plan: Plan.professional })
|
||||
|
||||
expect(screen.getByText(/plansCommon\.startBuilding/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show "Get Started" for team plan when not current', () => {
|
||||
renderCloudPlanItem({ currentPlan: Plan.sandbox, plan: Plan.team })
|
||||
|
||||
expect(screen.getByText(/plansCommon\.getStarted/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 3. Downgrade Prevention ────────────────────────────────────────────
|
||||
describe('Downgrade prevention', () => {
|
||||
it('should disable sandbox button when user is on professional plan (downgrade)', () => {
|
||||
renderCloudPlanItem({ currentPlan: Plan.professional, plan: Plan.sandbox })
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
expect(button).toBeDisabled()
|
||||
})
|
||||
|
||||
it('should disable sandbox and professional buttons when user is on team plan', () => {
|
||||
const { unmount } = renderCloudPlanItem({ currentPlan: Plan.team, plan: Plan.sandbox })
|
||||
expect(screen.getByRole('button')).toBeDisabled()
|
||||
unmount()
|
||||
|
||||
renderCloudPlanItem({ currentPlan: Plan.team, plan: Plan.professional })
|
||||
expect(screen.getByRole('button')).toBeDisabled()
|
||||
})
|
||||
|
||||
it('should not disable current paid plan button (for invoice management)', () => {
|
||||
renderCloudPlanItem({ currentPlan: Plan.professional, plan: Plan.professional })
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
expect(button).not.toBeDisabled()
|
||||
})
|
||||
|
||||
it('should enable higher-tier plan buttons for upgrade', () => {
|
||||
renderCloudPlanItem({ currentPlan: Plan.sandbox, plan: Plan.team })
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
expect(button).not.toBeDisabled()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 4. Payment URL Flow ────────────────────────────────────────────────
|
||||
describe('Payment URL flow', () => {
|
||||
it('should call fetchSubscriptionUrls with plan and "month" for monthly range', async () => {
|
||||
const user = userEvent.setup()
|
||||
// Simulate clicking on a professional plan button (user is on sandbox)
|
||||
renderCloudPlanItem({
|
||||
currentPlan: Plan.sandbox,
|
||||
plan: Plan.professional,
|
||||
planRange: PlanRange.monthly,
|
||||
})
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchSubscriptionUrls).toHaveBeenCalledWith(Plan.professional, 'month')
|
||||
})
|
||||
})
|
||||
|
||||
it('should call fetchSubscriptionUrls with plan and "year" for yearly range', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderCloudPlanItem({
|
||||
currentPlan: Plan.sandbox,
|
||||
plan: Plan.team,
|
||||
planRange: PlanRange.yearly,
|
||||
})
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchSubscriptionUrls).toHaveBeenCalledWith(Plan.team, 'year')
|
||||
})
|
||||
})
|
||||
|
||||
it('should open invoice management for current paid plan', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderCloudPlanItem({ currentPlan: Plan.professional, plan: Plan.professional })
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockOpenAsyncWindow).toHaveBeenCalled()
|
||||
})
|
||||
// Should NOT call fetchSubscriptionUrls (invoice, not subscription)
|
||||
expect(mockFetchSubscriptionUrls).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not do anything when clicking on sandbox free plan button', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderCloudPlanItem({ currentPlan: Plan.sandbox, plan: Plan.sandbox })
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
// Wait a tick and verify no actions were taken
|
||||
await waitFor(() => {
|
||||
expect(mockFetchSubscriptionUrls).not.toHaveBeenCalled()
|
||||
expect(mockOpenAsyncWindow).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 5. Permission Check ────────────────────────────────────────────────
|
||||
describe('Permission check', () => {
|
||||
it('should show error toast when non-manager clicks upgrade button', async () => {
|
||||
setupAppContext({ isCurrentWorkspaceManager: false })
|
||||
const user = userEvent.setup()
|
||||
renderCloudPlanItem({ currentPlan: Plan.sandbox, plan: Plan.professional })
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
type: 'error',
|
||||
}),
|
||||
)
|
||||
})
|
||||
// Should not proceed with payment
|
||||
expect(mockFetchSubscriptionUrls).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
318
web/__tests__/billing/education-verification-flow.test.tsx
Normal file
318
web/__tests__/billing/education-verification-flow.test.tsx
Normal file
@ -0,0 +1,318 @@
|
||||
/**
|
||||
* Integration test: Education Verification Flow
|
||||
*
|
||||
* Tests the education plan verification flow in PlanComp:
|
||||
* PlanComp → handleVerify → useEducationVerify → router.push → education-apply
|
||||
* PlanComp → handleVerify → error → show VerifyStateModal
|
||||
*
|
||||
* Also covers education button visibility based on context flags.
|
||||
*/
|
||||
import type { UsagePlanInfo, UsageResetInfo } from '@/app/components/billing/type'
|
||||
import { cleanup, render, screen, waitFor } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import * as React from 'react'
|
||||
import { defaultPlan } from '@/app/components/billing/config'
|
||||
import PlanComp from '@/app/components/billing/plan'
|
||||
import { Plan } from '@/app/components/billing/type'
|
||||
|
||||
// ─── Mock state ──────────────────────────────────────────────────────────────
|
||||
let mockProviderCtx: Record<string, unknown> = {}
|
||||
let mockAppCtx: Record<string, unknown> = {}
|
||||
const mockSetShowPricingModal = vi.fn()
|
||||
const mockSetShowAccountSettingModal = vi.fn()
|
||||
const mockRouterPush = vi.fn()
|
||||
const mockMutateAsync = vi.fn()
|
||||
|
||||
// ─── Context mocks ───────────────────────────────────────────────────────────
|
||||
vi.mock('@/context/provider-context', () => ({
|
||||
useProviderContext: () => mockProviderCtx,
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => mockAppCtx,
|
||||
}))
|
||||
|
||||
vi.mock('@/context/modal-context', () => ({
|
||||
useModalContext: () => ({
|
||||
setShowPricingModal: mockSetShowPricingModal,
|
||||
}),
|
||||
useModalContextSelector: (selector: (s: Record<string, unknown>) => unknown) =>
|
||||
selector({
|
||||
setShowAccountSettingModal: mockSetShowAccountSettingModal,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useGetLanguage: () => 'en-US',
|
||||
}))
|
||||
|
||||
// ─── Service mocks ───────────────────────────────────────────────────────────
|
||||
vi.mock('@/service/use-education', () => ({
|
||||
useEducationVerify: () => ({
|
||||
mutateAsync: mockMutateAsync,
|
||||
isPending: false,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-billing', () => ({
|
||||
useBillingUrl: () => ({
|
||||
data: 'https://billing.example.com',
|
||||
isFetching: false,
|
||||
refetch: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
// ─── Navigation mocks ───────────────────────────────────────────────────────
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: () => ({ push: mockRouterPush }),
|
||||
usePathname: () => '/billing',
|
||||
useSearchParams: () => new URLSearchParams(),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-async-window-open', () => ({
|
||||
useAsyncWindowOpen: () => vi.fn(),
|
||||
}))
|
||||
|
||||
// ─── External component mocks ───────────────────────────────────────────────
|
||||
vi.mock('@/app/education-apply/verify-state-modal', () => ({
|
||||
default: ({ isShow, title, content, email, showLink }: {
|
||||
isShow: boolean
|
||||
title?: string
|
||||
content?: string
|
||||
email?: string
|
||||
showLink?: boolean
|
||||
}) =>
|
||||
isShow
|
||||
? (
|
||||
<div data-testid="verify-state-modal">
|
||||
{title && <span data-testid="modal-title">{title}</span>}
|
||||
{content && <span data-testid="modal-content">{content}</span>}
|
||||
{email && <span data-testid="modal-email">{email}</span>}
|
||||
{showLink && <span data-testid="modal-show-link">link</span>}
|
||||
</div>
|
||||
)
|
||||
: null,
|
||||
}))
|
||||
|
||||
// ─── Test data factories ────────────────────────────────────────────────────
|
||||
type PlanOverrides = {
|
||||
type?: string
|
||||
usage?: Partial<UsagePlanInfo>
|
||||
total?: Partial<UsagePlanInfo>
|
||||
reset?: Partial<UsageResetInfo>
|
||||
}
|
||||
|
||||
const createPlanData = (overrides: PlanOverrides = {}) => ({
|
||||
...defaultPlan,
|
||||
...overrides,
|
||||
type: overrides.type ?? defaultPlan.type,
|
||||
usage: { ...defaultPlan.usage, ...overrides.usage },
|
||||
total: { ...defaultPlan.total, ...overrides.total },
|
||||
reset: { ...defaultPlan.reset, ...overrides.reset },
|
||||
})
|
||||
|
||||
const setupContexts = (
|
||||
planOverrides: PlanOverrides = {},
|
||||
providerOverrides: Record<string, unknown> = {},
|
||||
appOverrides: Record<string, unknown> = {},
|
||||
) => {
|
||||
mockProviderCtx = {
|
||||
plan: createPlanData(planOverrides),
|
||||
enableBilling: true,
|
||||
isFetchedPlan: true,
|
||||
enableEducationPlan: false,
|
||||
isEducationAccount: false,
|
||||
allowRefreshEducationVerify: false,
|
||||
...providerOverrides,
|
||||
}
|
||||
mockAppCtx = {
|
||||
isCurrentWorkspaceManager: true,
|
||||
userProfile: { email: 'student@university.edu' },
|
||||
langGeniusVersionInfo: { current_version: '1.0.0' },
|
||||
...appOverrides,
|
||||
}
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════════
|
||||
describe('Education Verification Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
setupContexts()
|
||||
})
|
||||
|
||||
// ─── 1. Education Button Visibility ─────────────────────────────────────
|
||||
describe('Education button visibility', () => {
|
||||
it('should not show verify button when enableEducationPlan is false', () => {
|
||||
setupContexts({}, { enableEducationPlan: false })
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.queryByText(/toVerified/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show verify button when enableEducationPlan is true and not yet verified', () => {
|
||||
setupContexts({}, { enableEducationPlan: true, isEducationAccount: false })
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.getByText(/toVerified/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not show verify button when already verified and not about to expire', () => {
|
||||
setupContexts({}, {
|
||||
enableEducationPlan: true,
|
||||
isEducationAccount: true,
|
||||
allowRefreshEducationVerify: false,
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.queryByText(/toVerified/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show verify button when about to expire (allowRefreshEducationVerify is true)', () => {
|
||||
setupContexts({}, {
|
||||
enableEducationPlan: true,
|
||||
isEducationAccount: true,
|
||||
allowRefreshEducationVerify: true,
|
||||
})
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// Shown because isAboutToExpire = allowRefreshEducationVerify = true
|
||||
expect(screen.getByText(/toVerified/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 2. Successful Verification Flow ────────────────────────────────────
|
||||
describe('Successful verification flow', () => {
|
||||
it('should navigate to education-apply with token on successful verification', async () => {
|
||||
mockMutateAsync.mockResolvedValue({ token: 'edu-token-123' })
|
||||
setupContexts({}, { enableEducationPlan: true, isEducationAccount: false })
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
const verifyButton = screen.getByText(/toVerified/i)
|
||||
await user.click(verifyButton)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledTimes(1)
|
||||
expect(mockRouterPush).toHaveBeenCalledWith('/education-apply?token=edu-token-123')
|
||||
})
|
||||
})
|
||||
|
||||
it('should remove education verifying flag from localStorage on success', async () => {
|
||||
mockMutateAsync.mockResolvedValue({ token: 'token-xyz' })
|
||||
setupContexts({}, { enableEducationPlan: true, isEducationAccount: false })
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
await user.click(screen.getByText(/toVerified/i))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(localStorage.removeItem).toHaveBeenCalledWith('educationVerifying')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 3. Failed Verification Flow ────────────────────────────────────────
|
||||
describe('Failed verification flow', () => {
|
||||
it('should show VerifyStateModal with rejection info on error', async () => {
|
||||
mockMutateAsync.mockRejectedValue(new Error('Verification failed'))
|
||||
setupContexts({}, { enableEducationPlan: true, isEducationAccount: false })
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
// Modal should not be visible initially
|
||||
expect(screen.queryByTestId('verify-state-modal')).not.toBeInTheDocument()
|
||||
|
||||
const verifyButton = screen.getByText(/toVerified/i)
|
||||
await user.click(verifyButton)
|
||||
|
||||
// Modal should appear after verification failure
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('verify-state-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Modal should display rejection title and content
|
||||
expect(screen.getByTestId('modal-title')).toHaveTextContent(/rejectTitle/i)
|
||||
expect(screen.getByTestId('modal-content')).toHaveTextContent(/rejectContent/i)
|
||||
})
|
||||
|
||||
it('should show email and link in VerifyStateModal', async () => {
|
||||
mockMutateAsync.mockRejectedValue(new Error('fail'))
|
||||
setupContexts({}, { enableEducationPlan: true, isEducationAccount: false })
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
await user.click(screen.getByText(/toVerified/i))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('modal-email')).toHaveTextContent('student@university.edu')
|
||||
expect(screen.getByTestId('modal-show-link')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should not redirect on verification failure', async () => {
|
||||
mockMutateAsync.mockRejectedValue(new Error('fail'))
|
||||
setupContexts({}, { enableEducationPlan: true, isEducationAccount: false })
|
||||
const user = userEvent.setup()
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
await user.click(screen.getByText(/toVerified/i))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('verify-state-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Should NOT navigate
|
||||
expect(mockRouterPush).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 4. Education + Upgrade Coexistence ─────────────────────────────────
|
||||
describe('Education and upgrade button coexistence', () => {
|
||||
it('should show both education verify and upgrade buttons for sandbox user', () => {
|
||||
setupContexts(
|
||||
{ type: Plan.sandbox },
|
||||
{ enableEducationPlan: true, isEducationAccount: false },
|
||||
)
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.getByText(/toVerified/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/upgradeBtn\.encourageShort/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not show upgrade button for enterprise plan', () => {
|
||||
setupContexts(
|
||||
{ type: Plan.enterprise },
|
||||
{ enableEducationPlan: true, isEducationAccount: false },
|
||||
)
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.getByText(/toVerified/i)).toBeInTheDocument()
|
||||
expect(screen.queryByText(/upgradeBtn\.encourageShort/i)).not.toBeInTheDocument()
|
||||
expect(screen.queryByText(/upgradeBtn\.plain/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show team plan with plain upgrade button and education button', () => {
|
||||
setupContexts(
|
||||
{ type: Plan.team },
|
||||
{ enableEducationPlan: true, isEducationAccount: false },
|
||||
)
|
||||
|
||||
render(<PlanComp loc="test" />)
|
||||
|
||||
expect(screen.getByText(/toVerified/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/upgradeBtn\.plain/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
326
web/__tests__/billing/partner-stack-flow.test.tsx
Normal file
326
web/__tests__/billing/partner-stack-flow.test.tsx
Normal file
@ -0,0 +1,326 @@
|
||||
/**
|
||||
* Integration test: Partner Stack Flow
|
||||
*
|
||||
* Tests the PartnerStack integration:
|
||||
* PartnerStack component → usePSInfo hook → cookie management → bind API call
|
||||
*
|
||||
* Covers URL param reading, cookie persistence, API bind on mount,
|
||||
* cookie cleanup after successful bind, and error handling for 400 status.
|
||||
*/
|
||||
import { act, cleanup, render, renderHook, waitFor } from '@testing-library/react'
|
||||
import Cookies from 'js-cookie'
|
||||
import * as React from 'react'
|
||||
import usePSInfo from '@/app/components/billing/partner-stack/use-ps-info'
|
||||
import { PARTNER_STACK_CONFIG } from '@/config'
|
||||
|
||||
// ─── Mock state ──────────────────────────────────────────────────────────────
|
||||
let mockSearchParams = new URLSearchParams()
|
||||
const mockMutateAsync = vi.fn()
|
||||
|
||||
// ─── Module mocks ────────────────────────────────────────────────────────────
|
||||
vi.mock('next/navigation', () => ({
|
||||
useSearchParams: () => mockSearchParams,
|
||||
useRouter: () => ({ push: vi.fn() }),
|
||||
usePathname: () => '/',
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-billing', () => ({
|
||||
useBindPartnerStackInfo: () => ({
|
||||
mutateAsync: mockMutateAsync,
|
||||
}),
|
||||
useBillingUrl: () => ({
|
||||
data: '',
|
||||
isFetching: false,
|
||||
refetch: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/config', async (importOriginal) => {
|
||||
const actual = await importOriginal<Record<string, unknown>>()
|
||||
return {
|
||||
...actual,
|
||||
IS_CLOUD_EDITION: true,
|
||||
PARTNER_STACK_CONFIG: {
|
||||
cookieName: 'partner_stack_info',
|
||||
saveCookieDays: 90,
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
// ─── Cookie helpers ──────────────────────────────────────────────────────────
|
||||
const getCookieData = () => {
|
||||
const raw = Cookies.get(PARTNER_STACK_CONFIG.cookieName)
|
||||
if (!raw)
|
||||
return null
|
||||
try {
|
||||
return JSON.parse(raw)
|
||||
}
|
||||
catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const setCookieData = (data: Record<string, string>) => {
|
||||
Cookies.set(PARTNER_STACK_CONFIG.cookieName, JSON.stringify(data))
|
||||
}
|
||||
|
||||
const clearCookie = () => {
|
||||
Cookies.remove(PARTNER_STACK_CONFIG.cookieName)
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════════
|
||||
describe('Partner Stack Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
clearCookie()
|
||||
mockSearchParams = new URLSearchParams()
|
||||
mockMutateAsync.mockResolvedValue({})
|
||||
})
|
||||
|
||||
// ─── 1. URL Param Reading ───────────────────────────────────────────────
|
||||
describe('URL param reading', () => {
|
||||
it('should read ps_partner_key and ps_xid from URL search params', () => {
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'partner-123',
|
||||
ps_xid: 'click-456',
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
|
||||
expect(result.current.psPartnerKey).toBe('partner-123')
|
||||
expect(result.current.psClickId).toBe('click-456')
|
||||
})
|
||||
|
||||
it('should fall back to cookie when URL params are not present', () => {
|
||||
setCookieData({ partnerKey: 'cookie-partner', clickId: 'cookie-click' })
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
|
||||
expect(result.current.psPartnerKey).toBe('cookie-partner')
|
||||
expect(result.current.psClickId).toBe('cookie-click')
|
||||
})
|
||||
|
||||
it('should prefer URL params over cookie values', () => {
|
||||
setCookieData({ partnerKey: 'cookie-partner', clickId: 'cookie-click' })
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'url-partner',
|
||||
ps_xid: 'url-click',
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
|
||||
expect(result.current.psPartnerKey).toBe('url-partner')
|
||||
expect(result.current.psClickId).toBe('url-click')
|
||||
})
|
||||
|
||||
it('should return null for both values when no params and no cookie', () => {
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
|
||||
expect(result.current.psPartnerKey).toBeUndefined()
|
||||
expect(result.current.psClickId).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 2. Cookie Persistence (saveOrUpdate) ───────────────────────────────
|
||||
describe('Cookie persistence via saveOrUpdate', () => {
|
||||
it('should save PS info to cookie when URL params provide new values', () => {
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'new-partner',
|
||||
ps_xid: 'new-click',
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
act(() => result.current.saveOrUpdate())
|
||||
|
||||
const cookieData = getCookieData()
|
||||
expect(cookieData).toEqual({
|
||||
partnerKey: 'new-partner',
|
||||
clickId: 'new-click',
|
||||
})
|
||||
})
|
||||
|
||||
it('should not update cookie when values have not changed', () => {
|
||||
setCookieData({ partnerKey: 'same-partner', clickId: 'same-click' })
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'same-partner',
|
||||
ps_xid: 'same-click',
|
||||
})
|
||||
|
||||
const cookieSetSpy = vi.spyOn(Cookies, 'set')
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
act(() => result.current.saveOrUpdate())
|
||||
|
||||
// Should not call set because values haven't changed
|
||||
expect(cookieSetSpy).not.toHaveBeenCalled()
|
||||
cookieSetSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('should not save to cookie when partner key is missing', () => {
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_xid: 'click-only',
|
||||
})
|
||||
|
||||
const cookieSetSpy = vi.spyOn(Cookies, 'set')
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
act(() => result.current.saveOrUpdate())
|
||||
|
||||
expect(cookieSetSpy).not.toHaveBeenCalled()
|
||||
cookieSetSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('should not save to cookie when click ID is missing', () => {
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'partner-only',
|
||||
})
|
||||
|
||||
const cookieSetSpy = vi.spyOn(Cookies, 'set')
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
act(() => result.current.saveOrUpdate())
|
||||
|
||||
expect(cookieSetSpy).not.toHaveBeenCalled()
|
||||
cookieSetSpy.mockRestore()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 3. Bind API Flow ──────────────────────────────────────────────────
|
||||
describe('Bind API flow', () => {
|
||||
it('should call mutateAsync with partnerKey and clickId on bind', async () => {
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'bind-partner',
|
||||
ps_xid: 'bind-click',
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
await act(async () => {
|
||||
await result.current.bind()
|
||||
})
|
||||
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith({
|
||||
partnerKey: 'bind-partner',
|
||||
clickId: 'bind-click',
|
||||
})
|
||||
})
|
||||
|
||||
it('should remove cookie after successful bind', async () => {
|
||||
setCookieData({ partnerKey: 'rm-partner', clickId: 'rm-click' })
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'rm-partner',
|
||||
ps_xid: 'rm-click',
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
await act(async () => {
|
||||
await result.current.bind()
|
||||
})
|
||||
|
||||
// Cookie should be removed after successful bind
|
||||
expect(Cookies.get(PARTNER_STACK_CONFIG.cookieName)).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should remove cookie on 400 error (already bound)', async () => {
|
||||
mockMutateAsync.mockRejectedValue({ status: 400 })
|
||||
setCookieData({ partnerKey: 'err-partner', clickId: 'err-click' })
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'err-partner',
|
||||
ps_xid: 'err-click',
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
await act(async () => {
|
||||
await result.current.bind()
|
||||
})
|
||||
|
||||
// Cookie should be removed even on 400
|
||||
expect(Cookies.get(PARTNER_STACK_CONFIG.cookieName)).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should not remove cookie on non-400 errors', async () => {
|
||||
mockMutateAsync.mockRejectedValue({ status: 500 })
|
||||
setCookieData({ partnerKey: 'keep-partner', clickId: 'keep-click' })
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'keep-partner',
|
||||
ps_xid: 'keep-click',
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
await act(async () => {
|
||||
await result.current.bind()
|
||||
})
|
||||
|
||||
// Cookie should still exist for non-400 errors
|
||||
const cookieData = getCookieData()
|
||||
expect(cookieData).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should not call bind when partner key is missing', async () => {
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_xid: 'click-only',
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
await act(async () => {
|
||||
await result.current.bind()
|
||||
})
|
||||
|
||||
expect(mockMutateAsync).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not call bind a second time (idempotency)', async () => {
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'partner-once',
|
||||
ps_xid: 'click-once',
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => usePSInfo())
|
||||
|
||||
// First bind
|
||||
await act(async () => {
|
||||
await result.current.bind()
|
||||
})
|
||||
expect(mockMutateAsync).toHaveBeenCalledTimes(1)
|
||||
|
||||
// Second bind should be skipped (hasBind = true)
|
||||
await act(async () => {
|
||||
await result.current.bind()
|
||||
})
|
||||
expect(mockMutateAsync).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 4. PartnerStack Component Mount ────────────────────────────────────
|
||||
describe('PartnerStack component mount behavior', () => {
|
||||
it('should call saveOrUpdate and bind on mount when IS_CLOUD_EDITION is true', async () => {
|
||||
mockSearchParams = new URLSearchParams({
|
||||
ps_partner_key: 'mount-partner',
|
||||
ps_xid: 'mount-click',
|
||||
})
|
||||
|
||||
// Use lazy import so the mocks are applied
|
||||
const { default: PartnerStack } = await import('@/app/components/billing/partner-stack')
|
||||
|
||||
render(<PartnerStack />)
|
||||
|
||||
// The component calls saveOrUpdate and bind in useEffect
|
||||
await waitFor(() => {
|
||||
// Bind should have been called
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith({
|
||||
partnerKey: 'mount-partner',
|
||||
clickId: 'mount-click',
|
||||
})
|
||||
})
|
||||
|
||||
// Cookie should have been saved (saveOrUpdate was called before bind)
|
||||
// After bind succeeds, cookie is removed
|
||||
expect(Cookies.get(PARTNER_STACK_CONFIG.cookieName)).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should render nothing (return null)', async () => {
|
||||
const { default: PartnerStack } = await import('@/app/components/billing/partner-stack')
|
||||
|
||||
const { container } = render(<PartnerStack />)
|
||||
|
||||
expect(container.innerHTML).toBe('')
|
||||
})
|
||||
})
|
||||
})
|
||||
327
web/__tests__/billing/pricing-modal-flow.test.tsx
Normal file
327
web/__tests__/billing/pricing-modal-flow.test.tsx
Normal file
@ -0,0 +1,327 @@
|
||||
/**
|
||||
* Integration test: Pricing Modal Flow
|
||||
*
|
||||
* Tests the full Pricing modal lifecycle:
|
||||
* Pricing → PlanSwitcher (category + range toggle) → Plans (cloud / self-hosted)
|
||||
* → CloudPlanItem / SelfHostedPlanItem → Footer
|
||||
*
|
||||
* Validates cross-component state propagation when the user switches between
|
||||
* cloud / self-hosted categories and monthly / yearly plan ranges.
|
||||
*/
|
||||
import { cleanup, render, screen } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import * as React from 'react'
|
||||
import { ALL_PLANS } from '@/app/components/billing/config'
|
||||
import Pricing from '@/app/components/billing/pricing'
|
||||
import { Plan } from '@/app/components/billing/type'
|
||||
|
||||
// ─── Mock state ──────────────────────────────────────────────────────────────
|
||||
let mockProviderCtx: Record<string, unknown> = {}
|
||||
let mockAppCtx: Record<string, unknown> = {}
|
||||
|
||||
// ─── Context mocks ───────────────────────────────────────────────────────────
|
||||
vi.mock('@/context/provider-context', () => ({
|
||||
useProviderContext: () => mockProviderCtx,
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => mockAppCtx,
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useGetLanguage: () => 'en-US',
|
||||
useGetPricingPageLanguage: () => 'en',
|
||||
}))
|
||||
|
||||
// ─── Service mocks ───────────────────────────────────────────────────────────
|
||||
vi.mock('@/service/billing', () => ({
|
||||
fetchSubscriptionUrls: vi.fn().mockResolvedValue({ url: 'https://pay.example.com' }),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/client', () => ({
|
||||
consoleClient: {
|
||||
billing: {
|
||||
invoices: vi.fn().mockResolvedValue({ url: 'https://invoice.example.com' }),
|
||||
},
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-async-window-open', () => ({
|
||||
useAsyncWindowOpen: () => vi.fn(),
|
||||
}))
|
||||
|
||||
// ─── Navigation mocks ───────────────────────────────────────────────────────
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: () => ({ push: vi.fn() }),
|
||||
usePathname: () => '/billing',
|
||||
useSearchParams: () => new URLSearchParams(),
|
||||
}))
|
||||
|
||||
// ─── External component mocks (lightweight) ─────────────────────────────────
|
||||
vi.mock('@/app/components/base/icons/src/public/billing', () => ({
|
||||
Azure: () => <span data-testid="icon-azure" />,
|
||||
GoogleCloud: () => <span data-testid="icon-gcloud" />,
|
||||
AwsMarketplaceLight: () => <span data-testid="icon-aws-light" />,
|
||||
AwsMarketplaceDark: () => <span data-testid="icon-aws-dark" />,
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-theme', () => ({
|
||||
default: () => ({ theme: 'light' }),
|
||||
useTheme: () => ({ theme: 'light' }),
|
||||
}))
|
||||
|
||||
// Self-hosted List uses t() with returnObjects which returns string in mock;
|
||||
// mock it to avoid deep i18n dependency (unit tests cover this component)
|
||||
vi.mock('@/app/components/billing/pricing/plans/self-hosted-plan-item/list', () => ({
|
||||
default: ({ plan }: { plan: string }) => (
|
||||
<div data-testid={`self-hosted-list-${plan}`}>Features</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// ─── Helpers ─────────────────────────────────────────────────────────────────
|
||||
const defaultPlanData = {
|
||||
type: Plan.sandbox,
|
||||
usage: {
|
||||
buildApps: 1,
|
||||
teamMembers: 1,
|
||||
documentsUploadQuota: 0,
|
||||
vectorSpace: 10,
|
||||
annotatedResponse: 1,
|
||||
triggerEvents: 0,
|
||||
apiRateLimit: 0,
|
||||
},
|
||||
total: {
|
||||
buildApps: 5,
|
||||
teamMembers: 1,
|
||||
documentsUploadQuota: 50,
|
||||
vectorSpace: 50,
|
||||
annotatedResponse: 10,
|
||||
triggerEvents: 3000,
|
||||
apiRateLimit: 5000,
|
||||
},
|
||||
}
|
||||
|
||||
const setupContexts = (planOverrides: Record<string, unknown> = {}, appOverrides: Record<string, unknown> = {}) => {
|
||||
mockProviderCtx = {
|
||||
plan: { ...defaultPlanData, ...planOverrides },
|
||||
enableBilling: true,
|
||||
isFetchedPlan: true,
|
||||
enableEducationPlan: false,
|
||||
isEducationAccount: false,
|
||||
allowRefreshEducationVerify: false,
|
||||
}
|
||||
mockAppCtx = {
|
||||
isCurrentWorkspaceManager: true,
|
||||
userProfile: { email: 'test@example.com' },
|
||||
langGeniusVersionInfo: { current_version: '1.0.0' },
|
||||
...appOverrides,
|
||||
}
|
||||
}
|
||||
|
||||
// ═══════════════════════════════════════════════════════════════════════════════
|
||||
describe('Pricing Modal Flow', () => {
|
||||
const onCancel = vi.fn()
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
setupContexts()
|
||||
})
|
||||
|
||||
// ─── 1. Initial Rendering ────────────────────────────────────────────────
|
||||
describe('Initial rendering', () => {
|
||||
it('should render header with close button and footer with pricing link', () => {
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
// Header close button exists (multiple plan buttons also exist)
|
||||
const buttons = screen.getAllByRole('button')
|
||||
expect(buttons.length).toBeGreaterThanOrEqual(1)
|
||||
// Footer pricing link
|
||||
expect(screen.getByText(/plansCommon\.comparePlanAndFeatures/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should default to cloud category with three cloud plans', () => {
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
// Three cloud plans: sandbox, professional, team
|
||||
expect(screen.getByText(/plans\.sandbox\.name/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/plans\.professional\.name/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/plans\.team\.name/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show plan range switcher (annual billing toggle) by default for cloud', () => {
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
expect(screen.getByText(/plansCommon\.annualBilling/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show tax tip in footer for cloud category', () => {
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
// Use exact match to avoid matching taxTipSecond
|
||||
expect(screen.getByText('billing.plansCommon.taxTip')).toBeInTheDocument()
|
||||
expect(screen.getByText('billing.plansCommon.taxTipSecond')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 2. Category Switching ───────────────────────────────────────────────
|
||||
describe('Category switching', () => {
|
||||
it('should switch to self-hosted plans when clicking self-hosted tab', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
// Click the self-hosted tab
|
||||
const selfTab = screen.getByText(/plansCommon\.self/i)
|
||||
await user.click(selfTab)
|
||||
|
||||
// Self-hosted plans should appear
|
||||
expect(screen.getByText(/plans\.community\.name/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/plans\.premium\.name/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/plans\.enterprise\.name/i)).toBeInTheDocument()
|
||||
|
||||
// Cloud plans should disappear
|
||||
expect(screen.queryByText(/plans\.sandbox\.name/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide plan range switcher for self-hosted category', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
await user.click(screen.getByText(/plansCommon\.self/i))
|
||||
|
||||
// Annual billing toggle should not be visible
|
||||
expect(screen.queryByText(/plansCommon\.annualBilling/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide tax tip in footer for self-hosted category', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
await user.click(screen.getByText(/plansCommon\.self/i))
|
||||
|
||||
expect(screen.queryByText('billing.plansCommon.taxTip')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should switch back to cloud plans when clicking cloud tab', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
// Switch to self-hosted
|
||||
await user.click(screen.getByText(/plansCommon\.self/i))
|
||||
expect(screen.queryByText(/plans\.sandbox\.name/i)).not.toBeInTheDocument()
|
||||
|
||||
// Switch back to cloud
|
||||
await user.click(screen.getByText(/plansCommon\.cloud/i))
|
||||
expect(screen.getByText(/plans\.sandbox\.name/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/plansCommon\.annualBilling/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 3. Plan Range Switching (Monthly ↔ Yearly) ──────────────────────────
|
||||
describe('Plan range switching', () => {
|
||||
it('should show monthly prices by default', () => {
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
// Professional monthly price: $59
|
||||
const proPriceStr = `$${ALL_PLANS.professional.price}`
|
||||
expect(screen.getByText(proPriceStr)).toBeInTheDocument()
|
||||
|
||||
// Team monthly price: $159
|
||||
const teamPriceStr = `$${ALL_PLANS.team.price}`
|
||||
expect(screen.getByText(teamPriceStr)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show "Free" for sandbox plan regardless of range', () => {
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
expect(screen.getByText(/plansCommon\.free/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show "most popular" badge only for professional plan', () => {
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
expect(screen.getByText(/plansCommon\.mostPopular/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 4. Cloud Plan Button States ─────────────────────────────────────────
|
||||
describe('Cloud plan button states', () => {
|
||||
it('should show "Current Plan" for the current plan (sandbox)', () => {
|
||||
setupContexts({ type: Plan.sandbox })
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
expect(screen.getByText(/plansCommon\.currentPlan/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show specific button text for non-current plans', () => {
|
||||
setupContexts({ type: Plan.sandbox })
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
// Professional button text
|
||||
expect(screen.getByText(/plansCommon\.startBuilding/i)).toBeInTheDocument()
|
||||
// Team button text
|
||||
expect(screen.getByText(/plansCommon\.getStarted/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should mark sandbox as "Current Plan" for professional user (enterprise normalized to team)', () => {
|
||||
setupContexts({ type: Plan.enterprise })
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
// Enterprise is normalized to team for display, so team is "Current Plan"
|
||||
expect(screen.getByText(/plansCommon\.currentPlan/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 5. Self-Hosted Plan Details ─────────────────────────────────────────
|
||||
describe('Self-hosted plan details', () => {
|
||||
it('should show cloud provider icons only for premium plan', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
await user.click(screen.getByText(/plansCommon\.self/i))
|
||||
|
||||
// Premium plan should show Azure and Google Cloud icons
|
||||
expect(screen.getByTestId('icon-azure')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('icon-gcloud')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show "coming soon" text for premium plan cloud providers', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
await user.click(screen.getByText(/plansCommon\.self/i))
|
||||
|
||||
expect(screen.getByText(/plans\.premium\.comingSoon/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 6. Close Handling ───────────────────────────────────────────────────
|
||||
describe('Close handling', () => {
|
||||
it('should call onCancel when pressing ESC key', () => {
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
// ahooks useKeyPress listens on document for keydown events
|
||||
document.dispatchEvent(new KeyboardEvent('keydown', {
|
||||
key: 'Escape',
|
||||
code: 'Escape',
|
||||
keyCode: 27,
|
||||
bubbles: true,
|
||||
}))
|
||||
|
||||
expect(onCancel).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 7. Pricing URL ─────────────────────────────────────────────────────
|
||||
describe('Pricing page URL', () => {
|
||||
it('should render pricing link with correct URL', () => {
|
||||
render(<Pricing onCancel={onCancel} />)
|
||||
|
||||
const link = screen.getByText(/plansCommon\.comparePlanAndFeatures/i)
|
||||
expect(link.closest('a')).toHaveAttribute(
|
||||
'href',
|
||||
'https://dify.ai/en/pricing#plans-and-features',
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
225
web/__tests__/billing/self-hosted-plan-flow.test.tsx
Normal file
225
web/__tests__/billing/self-hosted-plan-flow.test.tsx
Normal file
@ -0,0 +1,225 @@
|
||||
/**
|
||||
* Integration test: Self-Hosted Plan Flow
|
||||
*
|
||||
* Tests the self-hosted plan items:
|
||||
* SelfHostedPlanItem → Button click → permission check → redirect to external URL
|
||||
*
|
||||
* Covers community/premium/enterprise plan rendering, external URL navigation,
|
||||
* and workspace manager permission enforcement.
|
||||
*/
|
||||
import { cleanup, render, screen, waitFor } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import * as React from 'react'
|
||||
import { contactSalesUrl, getStartedWithCommunityUrl, getWithPremiumUrl } from '@/app/components/billing/config'
|
||||
import SelfHostedPlanItem from '@/app/components/billing/pricing/plans/self-hosted-plan-item'
|
||||
import { SelfHostedPlan } from '@/app/components/billing/type'
|
||||
|
||||
let mockAppCtx: Record<string, unknown> = {}
|
||||
const mockToastNotify = vi.fn()
|
||||
|
||||
const originalLocation = window.location
|
||||
let assignedHref = ''
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => mockAppCtx,
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useGetLanguage: () => 'en-US',
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-theme', () => ({
|
||||
default: () => ({ theme: 'light' }),
|
||||
useTheme: () => ({ theme: 'light' }),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/icons/src/public/billing', () => ({
|
||||
Azure: () => <span data-testid="icon-azure" />,
|
||||
GoogleCloud: () => <span data-testid="icon-gcloud" />,
|
||||
AwsMarketplaceLight: () => <span data-testid="icon-aws-light" />,
|
||||
AwsMarketplaceDark: () => <span data-testid="icon-aws-dark" />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
default: { notify: (args: unknown) => mockToastNotify(args) },
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/billing/pricing/plans/self-hosted-plan-item/list', () => ({
|
||||
default: ({ plan }: { plan: string }) => (
|
||||
<div data-testid={`self-hosted-list-${plan}`}>Features</div>
|
||||
),
|
||||
}))
|
||||
|
||||
const setupAppContext = (overrides: Record<string, unknown> = {}) => {
|
||||
mockAppCtx = {
|
||||
isCurrentWorkspaceManager: true,
|
||||
...overrides,
|
||||
}
|
||||
}
|
||||
|
||||
describe('Self-Hosted Plan Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
setupAppContext()
|
||||
|
||||
// Mock window.location with minimal getter/setter (Location props are non-enumerable)
|
||||
assignedHref = ''
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: {
|
||||
get href() { return assignedHref },
|
||||
set href(value: string) { assignedHref = value },
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original location
|
||||
Object.defineProperty(window, 'location', {
|
||||
configurable: true,
|
||||
value: originalLocation,
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 1. Plan Rendering ──────────────────────────────────────────────────
|
||||
describe('Plan rendering', () => {
|
||||
it('should render community plan with name and description', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.community} />)
|
||||
|
||||
expect(screen.getByText(/plans\.community\.name/i)).toBeInTheDocument()
|
||||
expect(screen.getByText(/plans\.community\.description/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render premium plan with cloud provider icons', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.premium} />)
|
||||
|
||||
expect(screen.getByText(/plans\.premium\.name/i)).toBeInTheDocument()
|
||||
expect(screen.getByTestId('icon-azure')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('icon-gcloud')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render enterprise plan without cloud provider icons', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.enterprise} />)
|
||||
|
||||
expect(screen.getByText(/plans\.enterprise\.name/i)).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('icon-azure')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not show price tip for community (free) plan', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.community} />)
|
||||
|
||||
expect(screen.queryByText(/plans\.community\.priceTip/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show price tip for premium plan', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.premium} />)
|
||||
|
||||
expect(screen.getByText(/plans\.premium\.priceTip/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render features list for each plan', () => {
|
||||
const { unmount: unmount1 } = render(<SelfHostedPlanItem plan={SelfHostedPlan.community} />)
|
||||
expect(screen.getByTestId('self-hosted-list-community')).toBeInTheDocument()
|
||||
unmount1()
|
||||
|
||||
const { unmount: unmount2 } = render(<SelfHostedPlanItem plan={SelfHostedPlan.premium} />)
|
||||
expect(screen.getByTestId('self-hosted-list-premium')).toBeInTheDocument()
|
||||
unmount2()
|
||||
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.enterprise} />)
|
||||
expect(screen.getByTestId('self-hosted-list-enterprise')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show AWS marketplace icon for premium plan button', () => {
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.premium} />)
|
||||
|
||||
expect(screen.getByTestId('icon-aws-light')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 2. Navigation Flow ─────────────────────────────────────────────────
|
||||
describe('Navigation flow', () => {
|
||||
it('should redirect to GitHub when clicking community plan button', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.community} />)
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
expect(assignedHref).toBe(getStartedWithCommunityUrl)
|
||||
})
|
||||
|
||||
it('should redirect to AWS Marketplace when clicking premium plan button', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.premium} />)
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
expect(assignedHref).toBe(getWithPremiumUrl)
|
||||
})
|
||||
|
||||
it('should redirect to Typeform when clicking enterprise plan button', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.enterprise} />)
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
expect(assignedHref).toBe(contactSalesUrl)
|
||||
})
|
||||
})
|
||||
|
||||
// ─── 3. Permission Check ────────────────────────────────────────────────
|
||||
describe('Permission check', () => {
|
||||
it('should show error toast when non-manager clicks community button', async () => {
|
||||
setupAppContext({ isCurrentWorkspaceManager: false })
|
||||
const user = userEvent.setup()
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.community} />)
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'error' }),
|
||||
)
|
||||
})
|
||||
// Should NOT redirect
|
||||
expect(assignedHref).toBe('')
|
||||
})
|
||||
|
||||
it('should show error toast when non-manager clicks premium button', async () => {
|
||||
setupAppContext({ isCurrentWorkspaceManager: false })
|
||||
const user = userEvent.setup()
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.premium} />)
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'error' }),
|
||||
)
|
||||
})
|
||||
expect(assignedHref).toBe('')
|
||||
})
|
||||
|
||||
it('should show error toast when non-manager clicks enterprise button', async () => {
|
||||
setupAppContext({ isCurrentWorkspaceManager: false })
|
||||
const user = userEvent.setup()
|
||||
render(<SelfHostedPlanItem plan={SelfHostedPlan.enterprise} />)
|
||||
|
||||
const button = screen.getByRole('button')
|
||||
await user.click(button)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'error' }),
|
||||
)
|
||||
})
|
||||
expect(assignedHref).toBe('')
|
||||
})
|
||||
})
|
||||
})
|
||||
301
web/__tests__/datasets/create-dataset-flow.test.tsx
Normal file
301
web/__tests__/datasets/create-dataset-flow.test.tsx
Normal file
@ -0,0 +1,301 @@
|
||||
/**
|
||||
* Integration Test: Create Dataset Flow
|
||||
*
|
||||
* Tests cross-module data flow: step-one data → step-two hooks → creation params → API call
|
||||
* Validates data contracts between steps.
|
||||
*/
|
||||
|
||||
import type { CustomFile } from '@/models/datasets'
|
||||
import type { RetrievalConfig } from '@/types/app'
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { ChunkingMode, DataSourceType, ProcessMode } from '@/models/datasets'
|
||||
import { RETRIEVE_METHOD } from '@/types/app'
|
||||
|
||||
const mockCreateFirstDocument = vi.fn()
|
||||
const mockCreateDocument = vi.fn()
|
||||
vi.mock('@/service/knowledge/use-create-dataset', () => ({
|
||||
useCreateFirstDocument: () => ({ mutateAsync: mockCreateFirstDocument, isPending: false }),
|
||||
useCreateDocument: () => ({ mutateAsync: mockCreateDocument, isPending: false }),
|
||||
getNotionInfo: (pages: { page_id: string }[], credentialId: string) => ({
|
||||
workspace_id: 'ws-1',
|
||||
pages: pages.map(p => p.page_id),
|
||||
notion_credential_id: credentialId,
|
||||
}),
|
||||
getWebsiteInfo: (opts: { websitePages: { url: string }[], websiteCrawlProvider: string }) => ({
|
||||
urls: opts.websitePages.map(p => p.url),
|
||||
only_main_content: true,
|
||||
provider: opts.websiteCrawlProvider,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/knowledge/use-dataset', () => ({
|
||||
useInvalidDatasetList: () => vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
default: { notify: vi.fn() },
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/amplitude', () => ({
|
||||
trackEvent: vi.fn(),
|
||||
}))
|
||||
|
||||
// Import hooks after mocks
|
||||
const { useSegmentationState, DEFAULT_SEGMENT_IDENTIFIER, DEFAULT_MAXIMUM_CHUNK_LENGTH, DEFAULT_OVERLAP }
|
||||
= await import('@/app/components/datasets/create/step-two/hooks')
|
||||
const { useDocumentCreation, IndexingType }
|
||||
= await import('@/app/components/datasets/create/step-two/hooks')
|
||||
|
||||
const createMockFile = (overrides?: Partial<CustomFile>): CustomFile => ({
|
||||
id: 'file-1',
|
||||
name: 'test.txt',
|
||||
type: 'text/plain',
|
||||
size: 1024,
|
||||
extension: '.txt',
|
||||
mime_type: 'text/plain',
|
||||
created_at: 0,
|
||||
created_by: '',
|
||||
...overrides,
|
||||
} as CustomFile)
|
||||
|
||||
describe('Create Dataset Flow - Cross-Step Data Contract', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Step-One → Step-Two: Segmentation Defaults', () => {
|
||||
it('should initialise with correct default segmentation values', () => {
|
||||
const { result } = renderHook(() => useSegmentationState())
|
||||
expect(result.current.segmentIdentifier).toBe(DEFAULT_SEGMENT_IDENTIFIER)
|
||||
expect(result.current.maxChunkLength).toBe(DEFAULT_MAXIMUM_CHUNK_LENGTH)
|
||||
expect(result.current.overlap).toBe(DEFAULT_OVERLAP)
|
||||
expect(result.current.segmentationType).toBe(ProcessMode.general)
|
||||
})
|
||||
|
||||
it('should produce valid process rule for general chunking', () => {
|
||||
const { result } = renderHook(() => useSegmentationState())
|
||||
const processRule = result.current.getProcessRule(ChunkingMode.text)
|
||||
|
||||
// mode should be segmentationType = ProcessMode.general = 'custom'
|
||||
expect(processRule.mode).toBe('custom')
|
||||
expect(processRule.rules.segmentation).toEqual({
|
||||
separator: '\n\n', // unescaped from \\n\\n
|
||||
max_tokens: DEFAULT_MAXIMUM_CHUNK_LENGTH,
|
||||
chunk_overlap: DEFAULT_OVERLAP,
|
||||
})
|
||||
// rules is empty initially since no default config loaded
|
||||
expect(processRule.rules.pre_processing_rules).toEqual([])
|
||||
})
|
||||
|
||||
it('should produce valid process rule for parent-child chunking', () => {
|
||||
const { result } = renderHook(() => useSegmentationState())
|
||||
const processRule = result.current.getProcessRule(ChunkingMode.parentChild)
|
||||
|
||||
expect(processRule.mode).toBe('hierarchical')
|
||||
expect(processRule.rules.parent_mode).toBe('paragraph')
|
||||
expect(processRule.rules.segmentation).toEqual({
|
||||
separator: '\n\n',
|
||||
max_tokens: 1024,
|
||||
})
|
||||
expect(processRule.rules.subchunk_segmentation).toEqual({
|
||||
separator: '\n',
|
||||
max_tokens: 512,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Step-Two → Creation API: Params Building', () => {
|
||||
it('should build valid creation params for file upload workflow', () => {
|
||||
const files = [createMockFile()]
|
||||
const { result: segResult } = renderHook(() => useSegmentationState())
|
||||
const { result: creationResult } = renderHook(() =>
|
||||
useDocumentCreation({
|
||||
dataSourceType: DataSourceType.FILE,
|
||||
files,
|
||||
notionPages: [],
|
||||
notionCredentialId: '',
|
||||
websitePages: [],
|
||||
}),
|
||||
)
|
||||
|
||||
const processRule = segResult.current.getProcessRule(ChunkingMode.text)
|
||||
const retrievalConfig: RetrievalConfig = {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: { reranking_provider_name: '', reranking_model_name: '' },
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
}
|
||||
|
||||
const params = creationResult.current.buildCreationParams(
|
||||
ChunkingMode.text,
|
||||
'English',
|
||||
processRule,
|
||||
retrievalConfig,
|
||||
{ provider: 'openai', model: 'text-embedding-ada-002' },
|
||||
IndexingType.QUALIFIED,
|
||||
)
|
||||
|
||||
expect(params).not.toBeNull()
|
||||
// File IDs come from file.id (not file.file.id)
|
||||
expect(params!.data_source.type).toBe(DataSourceType.FILE)
|
||||
expect(params!.data_source.info_list.file_info_list?.file_ids).toContain('file-1')
|
||||
|
||||
expect(params!.indexing_technique).toBe(IndexingType.QUALIFIED)
|
||||
expect(params!.doc_form).toBe(ChunkingMode.text)
|
||||
expect(params!.doc_language).toBe('English')
|
||||
expect(params!.embedding_model).toBe('text-embedding-ada-002')
|
||||
expect(params!.embedding_model_provider).toBe('openai')
|
||||
expect(params!.process_rule.mode).toBe('custom')
|
||||
})
|
||||
|
||||
it('should validate params: overlap must not exceed maxChunkLength', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useDocumentCreation({
|
||||
dataSourceType: DataSourceType.FILE,
|
||||
files: [createMockFile()],
|
||||
notionPages: [],
|
||||
notionCredentialId: '',
|
||||
websitePages: [],
|
||||
}),
|
||||
)
|
||||
|
||||
// validateParams returns false (invalid) when overlap > maxChunkLength for general mode
|
||||
const isValid = result.current.validateParams({
|
||||
segmentationType: 'general',
|
||||
maxChunkLength: 100,
|
||||
limitMaxChunkLength: 4000,
|
||||
overlap: 200, // overlap > maxChunkLength
|
||||
indexType: IndexingType.QUALIFIED,
|
||||
embeddingModel: { provider: 'openai', model: 'text-embedding-ada-002' },
|
||||
rerankModelList: [],
|
||||
retrievalConfig: {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: { reranking_provider_name: '', reranking_model_name: '' },
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
},
|
||||
})
|
||||
expect(isValid).toBe(false)
|
||||
})
|
||||
|
||||
it('should validate params: maxChunkLength must not exceed limit', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useDocumentCreation({
|
||||
dataSourceType: DataSourceType.FILE,
|
||||
files: [createMockFile()],
|
||||
notionPages: [],
|
||||
notionCredentialId: '',
|
||||
websitePages: [],
|
||||
}),
|
||||
)
|
||||
|
||||
const isValid = result.current.validateParams({
|
||||
segmentationType: 'general',
|
||||
maxChunkLength: 5000,
|
||||
limitMaxChunkLength: 4000, // limit < maxChunkLength
|
||||
overlap: 50,
|
||||
indexType: IndexingType.QUALIFIED,
|
||||
embeddingModel: { provider: 'openai', model: 'text-embedding-ada-002' },
|
||||
rerankModelList: [],
|
||||
retrievalConfig: {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: { reranking_provider_name: '', reranking_model_name: '' },
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
},
|
||||
})
|
||||
expect(isValid).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Full Flow: Segmentation State → Process Rule → Creation Params Consistency', () => {
|
||||
it('should keep segmentation values consistent across getProcessRule and buildCreationParams', () => {
|
||||
const files = [createMockFile()]
|
||||
const { result: segResult } = renderHook(() => useSegmentationState())
|
||||
const { result: creationResult } = renderHook(() =>
|
||||
useDocumentCreation({
|
||||
dataSourceType: DataSourceType.FILE,
|
||||
files,
|
||||
notionPages: [],
|
||||
notionCredentialId: '',
|
||||
websitePages: [],
|
||||
}),
|
||||
)
|
||||
|
||||
// Change segmentation settings
|
||||
act(() => {
|
||||
segResult.current.setMaxChunkLength(2048)
|
||||
segResult.current.setOverlap(100)
|
||||
})
|
||||
|
||||
const processRule = segResult.current.getProcessRule(ChunkingMode.text)
|
||||
expect(processRule.rules.segmentation.max_tokens).toBe(2048)
|
||||
expect(processRule.rules.segmentation.chunk_overlap).toBe(100)
|
||||
|
||||
const params = creationResult.current.buildCreationParams(
|
||||
ChunkingMode.text,
|
||||
'Chinese',
|
||||
processRule,
|
||||
{
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: { reranking_provider_name: '', reranking_model_name: '' },
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
},
|
||||
{ provider: 'openai', model: 'text-embedding-ada-002' },
|
||||
IndexingType.QUALIFIED,
|
||||
)
|
||||
|
||||
expect(params).not.toBeNull()
|
||||
expect(params!.process_rule.rules.segmentation.max_tokens).toBe(2048)
|
||||
expect(params!.process_rule.rules.segmentation.chunk_overlap).toBe(100)
|
||||
expect(params!.doc_language).toBe('Chinese')
|
||||
})
|
||||
|
||||
it('should support parent-child mode through the full pipeline', () => {
|
||||
const files = [createMockFile()]
|
||||
const { result: segResult } = renderHook(() => useSegmentationState())
|
||||
const { result: creationResult } = renderHook(() =>
|
||||
useDocumentCreation({
|
||||
dataSourceType: DataSourceType.FILE,
|
||||
files,
|
||||
notionPages: [],
|
||||
notionCredentialId: '',
|
||||
websitePages: [],
|
||||
}),
|
||||
)
|
||||
|
||||
const processRule = segResult.current.getProcessRule(ChunkingMode.parentChild)
|
||||
const params = creationResult.current.buildCreationParams(
|
||||
ChunkingMode.parentChild,
|
||||
'English',
|
||||
processRule,
|
||||
{
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: { reranking_provider_name: '', reranking_model_name: '' },
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
},
|
||||
{ provider: 'openai', model: 'text-embedding-ada-002' },
|
||||
IndexingType.QUALIFIED,
|
||||
)
|
||||
|
||||
expect(params).not.toBeNull()
|
||||
expect(params!.doc_form).toBe(ChunkingMode.parentChild)
|
||||
expect(params!.process_rule.mode).toBe('hierarchical')
|
||||
expect(params!.process_rule.rules.parent_mode).toBe('paragraph')
|
||||
expect(params!.process_rule.rules.subchunk_segmentation).toBeDefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
451
web/__tests__/datasets/dataset-settings-flow.test.tsx
Normal file
451
web/__tests__/datasets/dataset-settings-flow.test.tsx
Normal file
@ -0,0 +1,451 @@
|
||||
/**
|
||||
* Integration Test: Dataset Settings Flow
|
||||
*
|
||||
* Tests cross-module data contracts in the dataset settings form:
|
||||
* useFormState hook ↔ index method config ↔ retrieval config ↔ permission state.
|
||||
*
|
||||
* The unit-level use-form-state.spec.ts validates the hook in isolation.
|
||||
* This integration test verifies that changing one configuration dimension
|
||||
* correctly cascades to dependent parts (index method → retrieval config,
|
||||
* permission → member list visibility, embedding model → embedding available state).
|
||||
*/
|
||||
|
||||
import type { DataSet } from '@/models/datasets'
|
||||
import type { RetrievalConfig } from '@/types/app'
|
||||
import { act, renderHook, waitFor } from '@testing-library/react'
|
||||
import { IndexingType } from '@/app/components/datasets/create/step-two'
|
||||
import { ChunkingMode, DatasetPermission, DataSourceType, WeightedScoreEnum } from '@/models/datasets'
|
||||
import { RETRIEVE_METHOD } from '@/types/app'
|
||||
|
||||
// --- Mocks ---
|
||||
|
||||
const mockMutateDatasets = vi.fn()
|
||||
const mockInvalidDatasetList = vi.fn()
|
||||
const mockUpdateDatasetSetting = vi.fn().mockResolvedValue({})
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useSelector: () => false,
|
||||
}))
|
||||
|
||||
vi.mock('@/service/datasets', () => ({
|
||||
updateDatasetSetting: (...args: unknown[]) => mockUpdateDatasetSetting(...args),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/knowledge/use-dataset', () => ({
|
||||
useInvalidDatasetList: () => mockInvalidDatasetList,
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-common', () => ({
|
||||
useMembers: () => ({
|
||||
data: {
|
||||
accounts: [
|
||||
{ id: 'user-1', name: 'Alice', email: 'alice@example.com', role: 'owner', avatar: '', avatar_url: '', last_login_at: '', created_at: '', status: 'active' },
|
||||
{ id: 'user-2', name: 'Bob', email: 'bob@example.com', role: 'admin', avatar: '', avatar_url: '', last_login_at: '', created_at: '', status: 'active' },
|
||||
{ id: 'user-3', name: 'Charlie', email: 'charlie@example.com', role: 'normal', avatar: '', avatar_url: '', last_login_at: '', created_at: '', status: 'active' },
|
||||
],
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/header/account-setting/model-provider-page/hooks', () => ({
|
||||
useModelList: () => ({ data: [] }),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/datasets/common/check-rerank-model', () => ({
|
||||
isReRankModelSelected: () => true,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
default: { notify: vi.fn() },
|
||||
}))
|
||||
|
||||
// --- Dataset factory ---
|
||||
|
||||
const createMockDataset = (overrides?: Partial<DataSet>): DataSet => ({
|
||||
id: 'ds-settings-1',
|
||||
name: 'Settings Test Dataset',
|
||||
description: 'Integration test dataset',
|
||||
permission: DatasetPermission.onlyMe,
|
||||
icon_info: {
|
||||
icon_type: 'emoji',
|
||||
icon: '📙',
|
||||
icon_background: '#FFF4ED',
|
||||
icon_url: '',
|
||||
},
|
||||
indexing_technique: 'high_quality',
|
||||
indexing_status: 'completed',
|
||||
data_source_type: DataSourceType.FILE,
|
||||
doc_form: ChunkingMode.text,
|
||||
embedding_model: 'text-embedding-ada-002',
|
||||
embedding_model_provider: 'openai',
|
||||
embedding_available: true,
|
||||
app_count: 2,
|
||||
document_count: 10,
|
||||
total_document_count: 10,
|
||||
word_count: 5000,
|
||||
provider: 'vendor',
|
||||
tags: [],
|
||||
partial_member_list: [],
|
||||
external_knowledge_info: {
|
||||
external_knowledge_id: '',
|
||||
external_knowledge_api_id: '',
|
||||
external_knowledge_api_name: '',
|
||||
external_knowledge_api_endpoint: '',
|
||||
},
|
||||
external_retrieval_model: {
|
||||
top_k: 2,
|
||||
score_threshold: 0.5,
|
||||
score_threshold_enabled: false,
|
||||
},
|
||||
retrieval_model_dict: {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: { reranking_provider_name: '', reranking_model_name: '' },
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
} as RetrievalConfig,
|
||||
retrieval_model: {
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_model: { reranking_provider_name: '', reranking_model_name: '' },
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
} as RetrievalConfig,
|
||||
built_in_field_enabled: false,
|
||||
keyword_number: 10,
|
||||
created_by: 'user-1',
|
||||
updated_by: 'user-1',
|
||||
updated_at: Date.now(),
|
||||
runtime_mode: 'general',
|
||||
enable_api: true,
|
||||
is_multimodal: false,
|
||||
...overrides,
|
||||
} as DataSet)
|
||||
|
||||
let mockDataset: DataSet = createMockDataset()
|
||||
|
||||
vi.mock('@/context/dataset-detail', () => ({
|
||||
useDatasetDetailContextWithSelector: (
|
||||
selector: (state: { dataset: DataSet | null, mutateDatasetRes: () => void }) => unknown,
|
||||
) => selector({ dataset: mockDataset, mutateDatasetRes: mockMutateDatasets }),
|
||||
}))
|
||||
|
||||
// Import after mocks are registered
|
||||
const { useFormState } = await import(
|
||||
'@/app/components/datasets/settings/form/hooks/use-form-state',
|
||||
)
|
||||
|
||||
describe('Dataset Settings Flow - Cross-Module Configuration Cascade', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockUpdateDatasetSetting.mockResolvedValue({})
|
||||
mockDataset = createMockDataset()
|
||||
})
|
||||
|
||||
describe('Form State Initialization from Dataset → Index Method → Retrieval Config Chain', () => {
|
||||
it('should initialise all form dimensions from a QUALIFIED dataset', () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
expect(result.current.name).toBe('Settings Test Dataset')
|
||||
expect(result.current.description).toBe('Integration test dataset')
|
||||
expect(result.current.indexMethod).toBe('high_quality')
|
||||
expect(result.current.embeddingModel).toEqual({
|
||||
provider: 'openai',
|
||||
model: 'text-embedding-ada-002',
|
||||
})
|
||||
expect(result.current.retrievalConfig.search_method).toBe(RETRIEVE_METHOD.semantic)
|
||||
})
|
||||
|
||||
it('should initialise from an ECONOMICAL dataset with keyword retrieval', () => {
|
||||
mockDataset = createMockDataset({
|
||||
indexing_technique: IndexingType.ECONOMICAL,
|
||||
embedding_model: '',
|
||||
embedding_model_provider: '',
|
||||
retrieval_model_dict: {
|
||||
search_method: RETRIEVE_METHOD.keywordSearch,
|
||||
reranking_enable: false,
|
||||
reranking_model: { reranking_provider_name: '', reranking_model_name: '' },
|
||||
top_k: 5,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0,
|
||||
} as RetrievalConfig,
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
expect(result.current.indexMethod).toBe(IndexingType.ECONOMICAL)
|
||||
expect(result.current.embeddingModel).toEqual({ provider: '', model: '' })
|
||||
expect(result.current.retrievalConfig.search_method).toBe(RETRIEVE_METHOD.keywordSearch)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Index Method Change → Retrieval Config Sync', () => {
|
||||
it('should allow switching index method from QUALIFIED to ECONOMICAL', () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
expect(result.current.indexMethod).toBe('high_quality')
|
||||
|
||||
act(() => {
|
||||
result.current.setIndexMethod(IndexingType.ECONOMICAL)
|
||||
})
|
||||
|
||||
expect(result.current.indexMethod).toBe(IndexingType.ECONOMICAL)
|
||||
})
|
||||
|
||||
it('should allow updating retrieval config after index method switch', () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
act(() => {
|
||||
result.current.setIndexMethod(IndexingType.ECONOMICAL)
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.setRetrievalConfig({
|
||||
...result.current.retrievalConfig,
|
||||
search_method: RETRIEVE_METHOD.keywordSearch,
|
||||
reranking_enable: false,
|
||||
})
|
||||
})
|
||||
|
||||
expect(result.current.indexMethod).toBe(IndexingType.ECONOMICAL)
|
||||
expect(result.current.retrievalConfig.search_method).toBe(RETRIEVE_METHOD.keywordSearch)
|
||||
expect(result.current.retrievalConfig.reranking_enable).toBe(false)
|
||||
})
|
||||
|
||||
it('should preserve retrieval config when switching back to QUALIFIED', () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
const originalConfig = { ...result.current.retrievalConfig }
|
||||
|
||||
act(() => {
|
||||
result.current.setIndexMethod(IndexingType.ECONOMICAL)
|
||||
})
|
||||
act(() => {
|
||||
result.current.setIndexMethod(IndexingType.QUALIFIED)
|
||||
})
|
||||
|
||||
expect(result.current.indexMethod).toBe('high_quality')
|
||||
expect(result.current.retrievalConfig.search_method).toBe(originalConfig.search_method)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Permission Change → Member List Visibility Logic', () => {
|
||||
it('should start with onlyMe permission and empty member selection', () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
expect(result.current.permission).toBe(DatasetPermission.onlyMe)
|
||||
expect(result.current.selectedMemberIDs).toEqual([])
|
||||
})
|
||||
|
||||
it('should enable member selection when switching to partialMembers', () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
act(() => {
|
||||
result.current.setPermission(DatasetPermission.partialMembers)
|
||||
})
|
||||
|
||||
expect(result.current.permission).toBe(DatasetPermission.partialMembers)
|
||||
expect(result.current.memberList).toHaveLength(3)
|
||||
expect(result.current.memberList.map(m => m.id)).toEqual(['user-1', 'user-2', 'user-3'])
|
||||
})
|
||||
|
||||
it('should persist member selection through permission toggle', () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
act(() => {
|
||||
result.current.setPermission(DatasetPermission.partialMembers)
|
||||
result.current.setSelectedMemberIDs(['user-1', 'user-3'])
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.setPermission(DatasetPermission.allTeamMembers)
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.setPermission(DatasetPermission.partialMembers)
|
||||
})
|
||||
|
||||
expect(result.current.selectedMemberIDs).toEqual(['user-1', 'user-3'])
|
||||
})
|
||||
|
||||
it('should include partial_member_list in save payload only for partialMembers', async () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
act(() => {
|
||||
result.current.setPermission(DatasetPermission.partialMembers)
|
||||
result.current.setSelectedMemberIDs(['user-2'])
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleSave()
|
||||
})
|
||||
|
||||
expect(mockUpdateDatasetSetting).toHaveBeenCalledWith({
|
||||
datasetId: 'ds-settings-1',
|
||||
body: expect.objectContaining({
|
||||
permission: DatasetPermission.partialMembers,
|
||||
partial_member_list: [
|
||||
expect.objectContaining({ user_id: 'user-2', role: 'admin' }),
|
||||
],
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
it('should not include partial_member_list for allTeamMembers permission', async () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
act(() => {
|
||||
result.current.setPermission(DatasetPermission.allTeamMembers)
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleSave()
|
||||
})
|
||||
|
||||
const savedBody = mockUpdateDatasetSetting.mock.calls[0][0].body as Record<string, unknown>
|
||||
expect(savedBody).not.toHaveProperty('partial_member_list')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Form Submission Validation → All Fields Together', () => {
|
||||
it('should reject empty name on save', async () => {
|
||||
const Toast = await import('@/app/components/base/toast')
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
act(() => {
|
||||
result.current.setName('')
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleSave()
|
||||
})
|
||||
|
||||
expect(Toast.default.notify).toHaveBeenCalledWith({
|
||||
type: 'error',
|
||||
message: expect.any(String),
|
||||
})
|
||||
expect(mockUpdateDatasetSetting).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should include all configuration dimensions in a successful save', async () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
act(() => {
|
||||
result.current.setName('Updated Name')
|
||||
result.current.setDescription('Updated Description')
|
||||
result.current.setIndexMethod(IndexingType.ECONOMICAL)
|
||||
result.current.setKeywordNumber(15)
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleSave()
|
||||
})
|
||||
|
||||
expect(mockUpdateDatasetSetting).toHaveBeenCalledWith({
|
||||
datasetId: 'ds-settings-1',
|
||||
body: expect.objectContaining({
|
||||
name: 'Updated Name',
|
||||
description: 'Updated Description',
|
||||
indexing_technique: 'economy',
|
||||
keyword_number: 15,
|
||||
embedding_model: 'text-embedding-ada-002',
|
||||
embedding_model_provider: 'openai',
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
it('should call mutateDatasets and invalidDatasetList after successful save', async () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleSave()
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockMutateDatasets).toHaveBeenCalled()
|
||||
expect(mockInvalidDatasetList).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Embedding Model Change → Retrieval Config Cascade', () => {
|
||||
it('should update embedding model independently of retrieval config', () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
const originalRetrievalConfig = { ...result.current.retrievalConfig }
|
||||
|
||||
act(() => {
|
||||
result.current.setEmbeddingModel({ provider: 'cohere', model: 'embed-english-v3.0' })
|
||||
})
|
||||
|
||||
expect(result.current.embeddingModel).toEqual({
|
||||
provider: 'cohere',
|
||||
model: 'embed-english-v3.0',
|
||||
})
|
||||
expect(result.current.retrievalConfig.search_method).toBe(originalRetrievalConfig.search_method)
|
||||
})
|
||||
|
||||
it('should propagate embedding model into weighted retrieval config on save', async () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
act(() => {
|
||||
result.current.setEmbeddingModel({ provider: 'cohere', model: 'embed-v3' })
|
||||
result.current.setRetrievalConfig({
|
||||
...result.current.retrievalConfig,
|
||||
search_method: RETRIEVE_METHOD.hybrid,
|
||||
weights: {
|
||||
weight_type: WeightedScoreEnum.Customized,
|
||||
vector_setting: {
|
||||
vector_weight: 0.6,
|
||||
embedding_provider_name: '',
|
||||
embedding_model_name: '',
|
||||
},
|
||||
keyword_setting: { keyword_weight: 0.4 },
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleSave()
|
||||
})
|
||||
|
||||
expect(mockUpdateDatasetSetting).toHaveBeenCalledWith({
|
||||
datasetId: 'ds-settings-1',
|
||||
body: expect.objectContaining({
|
||||
embedding_model: 'embed-v3',
|
||||
embedding_model_provider: 'cohere',
|
||||
retrieval_model: expect.objectContaining({
|
||||
weights: expect.objectContaining({
|
||||
vector_setting: expect.objectContaining({
|
||||
embedding_provider_name: 'cohere',
|
||||
embedding_model_name: 'embed-v3',
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle switching from semantic to hybrid search with embedding model', () => {
|
||||
const { result } = renderHook(() => useFormState())
|
||||
|
||||
act(() => {
|
||||
result.current.setRetrievalConfig({
|
||||
...result.current.retrievalConfig,
|
||||
search_method: RETRIEVE_METHOD.hybrid,
|
||||
reranking_enable: true,
|
||||
reranking_model: {
|
||||
reranking_provider_name: 'cohere',
|
||||
reranking_model_name: 'rerank-english-v3.0',
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
expect(result.current.retrievalConfig.search_method).toBe(RETRIEVE_METHOD.hybrid)
|
||||
expect(result.current.retrievalConfig.reranking_enable).toBe(true)
|
||||
expect(result.current.embeddingModel.model).toBe('text-embedding-ada-002')
|
||||
})
|
||||
})
|
||||
})
|
||||
335
web/__tests__/datasets/document-management.test.tsx
Normal file
335
web/__tests__/datasets/document-management.test.tsx
Normal file
@ -0,0 +1,335 @@
|
||||
/**
|
||||
* Integration Test: Document Management Flow
|
||||
*
|
||||
* Tests cross-module interactions: query state (URL-based) → document list sorting →
|
||||
* document selection → status filter utilities.
|
||||
* Validates the data contract between documents page hooks and list component hooks.
|
||||
*/
|
||||
|
||||
import type { SimpleDocumentDetail } from '@/models/datasets'
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { DataSourceType } from '@/models/datasets'
|
||||
|
||||
const mockPush = vi.fn()
|
||||
vi.mock('next/navigation', () => ({
|
||||
useSearchParams: () => new URLSearchParams(''),
|
||||
useRouter: () => ({ push: mockPush }),
|
||||
usePathname: () => '/datasets/ds-1/documents',
|
||||
}))
|
||||
|
||||
const { sanitizeStatusValue, normalizeStatusForQuery } = await import(
|
||||
'@/app/components/datasets/documents/status-filter',
|
||||
)
|
||||
|
||||
const { useDocumentSort } = await import(
|
||||
'@/app/components/datasets/documents/components/document-list/hooks/use-document-sort',
|
||||
)
|
||||
const { useDocumentSelection } = await import(
|
||||
'@/app/components/datasets/documents/components/document-list/hooks/use-document-selection',
|
||||
)
|
||||
const { default: useDocumentListQueryState } = await import(
|
||||
'@/app/components/datasets/documents/hooks/use-document-list-query-state',
|
||||
)
|
||||
|
||||
type LocalDoc = SimpleDocumentDetail & { percent?: number }
|
||||
|
||||
const createDoc = (overrides?: Partial<LocalDoc>): LocalDoc => ({
|
||||
id: `doc-${Math.random().toString(36).slice(2, 8)}`,
|
||||
name: 'test-doc.txt',
|
||||
word_count: 500,
|
||||
hit_count: 10,
|
||||
created_at: Date.now() / 1000,
|
||||
data_source_type: DataSourceType.FILE,
|
||||
display_status: 'available',
|
||||
indexing_status: 'completed',
|
||||
enabled: true,
|
||||
archived: false,
|
||||
doc_type: null,
|
||||
doc_metadata: null,
|
||||
position: 1,
|
||||
dataset_process_rule_id: 'rule-1',
|
||||
...overrides,
|
||||
} as LocalDoc)
|
||||
|
||||
describe('Document Management Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Status Filter Utilities', () => {
|
||||
it('should sanitize valid status values', () => {
|
||||
expect(sanitizeStatusValue('all')).toBe('all')
|
||||
expect(sanitizeStatusValue('available')).toBe('available')
|
||||
expect(sanitizeStatusValue('error')).toBe('error')
|
||||
})
|
||||
|
||||
it('should fallback to "all" for invalid values', () => {
|
||||
expect(sanitizeStatusValue(null)).toBe('all')
|
||||
expect(sanitizeStatusValue(undefined)).toBe('all')
|
||||
expect(sanitizeStatusValue('')).toBe('all')
|
||||
expect(sanitizeStatusValue('nonexistent')).toBe('all')
|
||||
})
|
||||
|
||||
it('should handle URL aliases', () => {
|
||||
// 'active' is aliased to 'available'
|
||||
expect(sanitizeStatusValue('active')).toBe('available')
|
||||
})
|
||||
|
||||
it('should normalize status for API query', () => {
|
||||
expect(normalizeStatusForQuery('all')).toBe('all')
|
||||
// 'enabled' normalized to 'available' for query
|
||||
expect(normalizeStatusForQuery('enabled')).toBe('available')
|
||||
})
|
||||
})
|
||||
|
||||
describe('URL-based Query State', () => {
|
||||
it('should parse default query from empty URL params', () => {
|
||||
const { result } = renderHook(() => useDocumentListQueryState())
|
||||
|
||||
expect(result.current.query).toEqual({
|
||||
page: 1,
|
||||
limit: 10,
|
||||
keyword: '',
|
||||
status: 'all',
|
||||
sort: '-created_at',
|
||||
})
|
||||
})
|
||||
|
||||
it('should update query and push to router', () => {
|
||||
const { result } = renderHook(() => useDocumentListQueryState())
|
||||
|
||||
act(() => {
|
||||
result.current.updateQuery({ keyword: 'test', page: 2 })
|
||||
})
|
||||
|
||||
expect(mockPush).toHaveBeenCalled()
|
||||
// The push call should contain the updated query params
|
||||
const pushUrl = mockPush.mock.calls[0][0] as string
|
||||
expect(pushUrl).toContain('keyword=test')
|
||||
expect(pushUrl).toContain('page=2')
|
||||
})
|
||||
|
||||
it('should reset query to defaults', () => {
|
||||
const { result } = renderHook(() => useDocumentListQueryState())
|
||||
|
||||
act(() => {
|
||||
result.current.resetQuery()
|
||||
})
|
||||
|
||||
expect(mockPush).toHaveBeenCalled()
|
||||
// Default query omits default values from URL
|
||||
const pushUrl = mockPush.mock.calls[0][0] as string
|
||||
expect(pushUrl).toBe('/datasets/ds-1/documents')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Document Sort Integration', () => {
|
||||
it('should return documents unsorted when no sort field set', () => {
|
||||
const docs = [
|
||||
createDoc({ id: 'doc-1', name: 'Banana.txt', word_count: 300 }),
|
||||
createDoc({ id: 'doc-2', name: 'Apple.txt', word_count: 100 }),
|
||||
createDoc({ id: 'doc-3', name: 'Cherry.txt', word_count: 200 }),
|
||||
]
|
||||
|
||||
const { result } = renderHook(() => useDocumentSort({
|
||||
documents: docs,
|
||||
statusFilterValue: '',
|
||||
remoteSortValue: '-created_at',
|
||||
}))
|
||||
|
||||
expect(result.current.sortField).toBeNull()
|
||||
expect(result.current.sortedDocuments).toHaveLength(3)
|
||||
})
|
||||
|
||||
it('should sort by name descending', () => {
|
||||
const docs = [
|
||||
createDoc({ id: 'doc-1', name: 'Banana.txt' }),
|
||||
createDoc({ id: 'doc-2', name: 'Apple.txt' }),
|
||||
createDoc({ id: 'doc-3', name: 'Cherry.txt' }),
|
||||
]
|
||||
|
||||
const { result } = renderHook(() => useDocumentSort({
|
||||
documents: docs,
|
||||
statusFilterValue: '',
|
||||
remoteSortValue: '-created_at',
|
||||
}))
|
||||
|
||||
act(() => {
|
||||
result.current.handleSort('name')
|
||||
})
|
||||
|
||||
expect(result.current.sortField).toBe('name')
|
||||
expect(result.current.sortOrder).toBe('desc')
|
||||
const names = result.current.sortedDocuments.map(d => d.name)
|
||||
expect(names).toEqual(['Cherry.txt', 'Banana.txt', 'Apple.txt'])
|
||||
})
|
||||
|
||||
it('should toggle sort order on same field click', () => {
|
||||
const docs = [createDoc({ id: 'doc-1', name: 'A.txt' }), createDoc({ id: 'doc-2', name: 'B.txt' })]
|
||||
|
||||
const { result } = renderHook(() => useDocumentSort({
|
||||
documents: docs,
|
||||
statusFilterValue: '',
|
||||
remoteSortValue: '-created_at',
|
||||
}))
|
||||
|
||||
act(() => result.current.handleSort('name'))
|
||||
expect(result.current.sortOrder).toBe('desc')
|
||||
|
||||
act(() => result.current.handleSort('name'))
|
||||
expect(result.current.sortOrder).toBe('asc')
|
||||
})
|
||||
|
||||
it('should filter by status before sorting', () => {
|
||||
const docs = [
|
||||
createDoc({ id: 'doc-1', name: 'A.txt', display_status: 'available' }),
|
||||
createDoc({ id: 'doc-2', name: 'B.txt', display_status: 'error' }),
|
||||
createDoc({ id: 'doc-3', name: 'C.txt', display_status: 'available' }),
|
||||
]
|
||||
|
||||
const { result } = renderHook(() => useDocumentSort({
|
||||
documents: docs,
|
||||
statusFilterValue: 'available',
|
||||
remoteSortValue: '-created_at',
|
||||
}))
|
||||
|
||||
// Only 'available' documents should remain
|
||||
expect(result.current.sortedDocuments).toHaveLength(2)
|
||||
expect(result.current.sortedDocuments.every(d => d.display_status === 'available')).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Document Selection Integration', () => {
|
||||
it('should manage selection state externally', () => {
|
||||
const docs = [
|
||||
createDoc({ id: 'doc-1' }),
|
||||
createDoc({ id: 'doc-2' }),
|
||||
createDoc({ id: 'doc-3' }),
|
||||
]
|
||||
const onSelectedIdChange = vi.fn()
|
||||
|
||||
const { result } = renderHook(() => useDocumentSelection({
|
||||
documents: docs,
|
||||
selectedIds: [],
|
||||
onSelectedIdChange,
|
||||
}))
|
||||
|
||||
expect(result.current.isAllSelected).toBe(false)
|
||||
expect(result.current.isSomeSelected).toBe(false)
|
||||
})
|
||||
|
||||
it('should select all documents', () => {
|
||||
const docs = [
|
||||
createDoc({ id: 'doc-1' }),
|
||||
createDoc({ id: 'doc-2' }),
|
||||
]
|
||||
const onSelectedIdChange = vi.fn()
|
||||
|
||||
const { result } = renderHook(() => useDocumentSelection({
|
||||
documents: docs,
|
||||
selectedIds: [],
|
||||
onSelectedIdChange,
|
||||
}))
|
||||
|
||||
act(() => {
|
||||
result.current.onSelectAll()
|
||||
})
|
||||
|
||||
expect(onSelectedIdChange).toHaveBeenCalledWith(
|
||||
expect.arrayContaining(['doc-1', 'doc-2']),
|
||||
)
|
||||
})
|
||||
|
||||
it('should detect all-selected state', () => {
|
||||
const docs = [
|
||||
createDoc({ id: 'doc-1' }),
|
||||
createDoc({ id: 'doc-2' }),
|
||||
]
|
||||
|
||||
const { result } = renderHook(() => useDocumentSelection({
|
||||
documents: docs,
|
||||
selectedIds: ['doc-1', 'doc-2'],
|
||||
onSelectedIdChange: vi.fn(),
|
||||
}))
|
||||
|
||||
expect(result.current.isAllSelected).toBe(true)
|
||||
})
|
||||
|
||||
it('should detect partial selection', () => {
|
||||
const docs = [
|
||||
createDoc({ id: 'doc-1' }),
|
||||
createDoc({ id: 'doc-2' }),
|
||||
createDoc({ id: 'doc-3' }),
|
||||
]
|
||||
|
||||
const { result } = renderHook(() => useDocumentSelection({
|
||||
documents: docs,
|
||||
selectedIds: ['doc-1'],
|
||||
onSelectedIdChange: vi.fn(),
|
||||
}))
|
||||
|
||||
expect(result.current.isSomeSelected).toBe(true)
|
||||
expect(result.current.isAllSelected).toBe(false)
|
||||
})
|
||||
|
||||
it('should identify downloadable selected documents (FILE type only)', () => {
|
||||
const docs = [
|
||||
createDoc({ id: 'doc-1', data_source_type: DataSourceType.FILE }),
|
||||
createDoc({ id: 'doc-2', data_source_type: DataSourceType.NOTION }),
|
||||
]
|
||||
|
||||
const { result } = renderHook(() => useDocumentSelection({
|
||||
documents: docs,
|
||||
selectedIds: ['doc-1', 'doc-2'],
|
||||
onSelectedIdChange: vi.fn(),
|
||||
}))
|
||||
|
||||
expect(result.current.downloadableSelectedIds).toEqual(['doc-1'])
|
||||
})
|
||||
|
||||
it('should clear selection', () => {
|
||||
const onSelectedIdChange = vi.fn()
|
||||
const docs = [createDoc({ id: 'doc-1' })]
|
||||
|
||||
const { result } = renderHook(() => useDocumentSelection({
|
||||
documents: docs,
|
||||
selectedIds: ['doc-1'],
|
||||
onSelectedIdChange,
|
||||
}))
|
||||
|
||||
act(() => {
|
||||
result.current.clearSelection()
|
||||
})
|
||||
|
||||
expect(onSelectedIdChange).toHaveBeenCalledWith([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cross-Module: Query State → Sort → Selection Pipeline', () => {
|
||||
it('should maintain consistent default state across all hooks', () => {
|
||||
const docs = [createDoc({ id: 'doc-1' })]
|
||||
const { result: queryResult } = renderHook(() => useDocumentListQueryState())
|
||||
const { result: sortResult } = renderHook(() => useDocumentSort({
|
||||
documents: docs,
|
||||
statusFilterValue: queryResult.current.query.status,
|
||||
remoteSortValue: queryResult.current.query.sort,
|
||||
}))
|
||||
const { result: selResult } = renderHook(() => useDocumentSelection({
|
||||
documents: sortResult.current.sortedDocuments,
|
||||
selectedIds: [],
|
||||
onSelectedIdChange: vi.fn(),
|
||||
}))
|
||||
|
||||
// Query defaults
|
||||
expect(queryResult.current.query.sort).toBe('-created_at')
|
||||
expect(queryResult.current.query.status).toBe('all')
|
||||
|
||||
// Sort inherits 'all' status → no filtering applied
|
||||
expect(sortResult.current.sortedDocuments).toHaveLength(1)
|
||||
|
||||
// Selection starts empty
|
||||
expect(selResult.current.isAllSelected).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
215
web/__tests__/datasets/external-knowledge-base.test.tsx
Normal file
215
web/__tests__/datasets/external-knowledge-base.test.tsx
Normal file
@ -0,0 +1,215 @@
|
||||
/**
|
||||
* Integration Test: External Knowledge Base Creation Flow
|
||||
*
|
||||
* Tests the data contract, validation logic, and API interaction
|
||||
* for external knowledge base creation.
|
||||
*/
|
||||
|
||||
import type { CreateKnowledgeBaseReq } from '@/app/components/datasets/external-knowledge-base/create/declarations'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
// --- Factory ---
|
||||
const createFormData = (overrides?: Partial<CreateKnowledgeBaseReq>): CreateKnowledgeBaseReq => ({
|
||||
name: 'My External KB',
|
||||
description: 'A test external knowledge base',
|
||||
external_knowledge_api_id: 'api-1',
|
||||
external_knowledge_id: 'ext-kb-123',
|
||||
external_retrieval_model: {
|
||||
top_k: 4,
|
||||
score_threshold: 0.5,
|
||||
score_threshold_enabled: false,
|
||||
},
|
||||
provider: 'external',
|
||||
...overrides,
|
||||
})
|
||||
|
||||
describe('External Knowledge Base Creation Flow', () => {
|
||||
describe('Data Contract: CreateKnowledgeBaseReq', () => {
|
||||
it('should define a complete form structure', () => {
|
||||
const form = createFormData()
|
||||
|
||||
expect(form).toHaveProperty('name')
|
||||
expect(form).toHaveProperty('external_knowledge_api_id')
|
||||
expect(form).toHaveProperty('external_knowledge_id')
|
||||
expect(form).toHaveProperty('external_retrieval_model')
|
||||
expect(form).toHaveProperty('provider')
|
||||
expect(form.provider).toBe('external')
|
||||
})
|
||||
|
||||
it('should include retrieval model settings', () => {
|
||||
const form = createFormData()
|
||||
|
||||
expect(form.external_retrieval_model).toEqual({
|
||||
top_k: 4,
|
||||
score_threshold: 0.5,
|
||||
score_threshold_enabled: false,
|
||||
})
|
||||
})
|
||||
|
||||
it('should allow partial overrides', () => {
|
||||
const form = createFormData({
|
||||
name: 'Custom Name',
|
||||
external_retrieval_model: {
|
||||
top_k: 10,
|
||||
score_threshold: 0.8,
|
||||
score_threshold_enabled: true,
|
||||
},
|
||||
})
|
||||
|
||||
expect(form.name).toBe('Custom Name')
|
||||
expect(form.external_retrieval_model.top_k).toBe(10)
|
||||
expect(form.external_retrieval_model.score_threshold_enabled).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Form Validation Logic', () => {
|
||||
const isFormValid = (form: CreateKnowledgeBaseReq): boolean => {
|
||||
return (
|
||||
form.name.trim() !== ''
|
||||
&& form.external_knowledge_api_id !== ''
|
||||
&& form.external_knowledge_id !== ''
|
||||
&& form.external_retrieval_model.top_k !== undefined
|
||||
&& form.external_retrieval_model.score_threshold !== undefined
|
||||
)
|
||||
}
|
||||
|
||||
it('should validate a complete form', () => {
|
||||
const form = createFormData()
|
||||
expect(isFormValid(form)).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject empty name', () => {
|
||||
const form = createFormData({ name: '' })
|
||||
expect(isFormValid(form)).toBe(false)
|
||||
})
|
||||
|
||||
it('should reject whitespace-only name', () => {
|
||||
const form = createFormData({ name: ' ' })
|
||||
expect(isFormValid(form)).toBe(false)
|
||||
})
|
||||
|
||||
it('should reject empty external_knowledge_api_id', () => {
|
||||
const form = createFormData({ external_knowledge_api_id: '' })
|
||||
expect(isFormValid(form)).toBe(false)
|
||||
})
|
||||
|
||||
it('should reject empty external_knowledge_id', () => {
|
||||
const form = createFormData({ external_knowledge_id: '' })
|
||||
expect(isFormValid(form)).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Form State Transitions', () => {
|
||||
it('should start with empty default state', () => {
|
||||
const defaultForm: CreateKnowledgeBaseReq = {
|
||||
name: '',
|
||||
description: '',
|
||||
external_knowledge_api_id: '',
|
||||
external_knowledge_id: '',
|
||||
external_retrieval_model: {
|
||||
top_k: 4,
|
||||
score_threshold: 0.5,
|
||||
score_threshold_enabled: false,
|
||||
},
|
||||
provider: 'external',
|
||||
}
|
||||
|
||||
// Verify default state matches component's initial useState
|
||||
expect(defaultForm.name).toBe('')
|
||||
expect(defaultForm.external_knowledge_api_id).toBe('')
|
||||
expect(defaultForm.external_knowledge_id).toBe('')
|
||||
expect(defaultForm.provider).toBe('external')
|
||||
})
|
||||
|
||||
it('should support immutable form updates', () => {
|
||||
const form = createFormData({ name: '' })
|
||||
const updated = { ...form, name: 'Updated Name' }
|
||||
|
||||
expect(form.name).toBe('')
|
||||
expect(updated.name).toBe('Updated Name')
|
||||
// Other fields should remain unchanged
|
||||
expect(updated.external_knowledge_api_id).toBe(form.external_knowledge_api_id)
|
||||
})
|
||||
|
||||
it('should support retrieval model updates', () => {
|
||||
const form = createFormData()
|
||||
const updated = {
|
||||
...form,
|
||||
external_retrieval_model: {
|
||||
...form.external_retrieval_model,
|
||||
top_k: 10,
|
||||
score_threshold_enabled: true,
|
||||
},
|
||||
}
|
||||
|
||||
expect(updated.external_retrieval_model.top_k).toBe(10)
|
||||
expect(updated.external_retrieval_model.score_threshold_enabled).toBe(true)
|
||||
// Unchanged field
|
||||
expect(updated.external_retrieval_model.score_threshold).toBe(0.5)
|
||||
})
|
||||
})
|
||||
|
||||
describe('API Call Data Contract', () => {
|
||||
it('should produce a valid API payload from form data', () => {
|
||||
const form = createFormData()
|
||||
|
||||
// The API expects the full CreateKnowledgeBaseReq
|
||||
expect(form.name).toBeTruthy()
|
||||
expect(form.external_knowledge_api_id).toBeTruthy()
|
||||
expect(form.external_knowledge_id).toBeTruthy()
|
||||
expect(form.provider).toBe('external')
|
||||
expect(typeof form.external_retrieval_model.top_k).toBe('number')
|
||||
expect(typeof form.external_retrieval_model.score_threshold).toBe('number')
|
||||
expect(typeof form.external_retrieval_model.score_threshold_enabled).toBe('boolean')
|
||||
})
|
||||
|
||||
it('should support optional description', () => {
|
||||
const formWithDesc = createFormData({ description: 'Some description' })
|
||||
const formWithoutDesc = createFormData({ description: '' })
|
||||
|
||||
expect(formWithDesc.description).toBe('Some description')
|
||||
expect(formWithoutDesc.description).toBe('')
|
||||
})
|
||||
|
||||
it('should validate retrieval model bounds', () => {
|
||||
const form = createFormData({
|
||||
external_retrieval_model: {
|
||||
top_k: 0,
|
||||
score_threshold: 0,
|
||||
score_threshold_enabled: false,
|
||||
},
|
||||
})
|
||||
|
||||
expect(form.external_retrieval_model.top_k).toBe(0)
|
||||
expect(form.external_retrieval_model.score_threshold).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('External API List Integration', () => {
|
||||
it('should validate API item structure', () => {
|
||||
const apiItem = {
|
||||
id: 'api-1',
|
||||
name: 'Production API',
|
||||
settings: {
|
||||
endpoint: 'https://api.example.com',
|
||||
api_key: 'key-123',
|
||||
},
|
||||
}
|
||||
|
||||
expect(apiItem).toHaveProperty('id')
|
||||
expect(apiItem).toHaveProperty('name')
|
||||
expect(apiItem).toHaveProperty('settings')
|
||||
expect(apiItem.settings).toHaveProperty('endpoint')
|
||||
expect(apiItem.settings).toHaveProperty('api_key')
|
||||
})
|
||||
|
||||
it('should link API selection to form data', () => {
|
||||
const selectedApi = { id: 'api-2', name: 'Staging API' }
|
||||
const form = createFormData({
|
||||
external_knowledge_api_id: selectedApi.id,
|
||||
})
|
||||
|
||||
expect(form.external_knowledge_api_id).toBe('api-2')
|
||||
})
|
||||
})
|
||||
})
|
||||
404
web/__tests__/datasets/hit-testing-flow.test.tsx
Normal file
404
web/__tests__/datasets/hit-testing-flow.test.tsx
Normal file
@ -0,0 +1,404 @@
|
||||
/**
|
||||
* Integration Test: Hit Testing Flow
|
||||
*
|
||||
* Tests the query submission → API response → callback chain flow
|
||||
* by rendering the actual QueryInput component and triggering user interactions.
|
||||
* Validates that the production onSubmit logic correctly constructs payloads
|
||||
* and invokes callbacks on success/failure.
|
||||
*/
|
||||
|
||||
import type {
|
||||
HitTestingResponse,
|
||||
Query,
|
||||
} from '@/models/datasets'
|
||||
import type { RetrievalConfig } from '@/types/app'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import QueryInput from '@/app/components/datasets/hit-testing/components/query-input'
|
||||
import { RETRIEVE_METHOD } from '@/types/app'
|
||||
|
||||
// --- Mocks ---
|
||||
|
||||
vi.mock('@/context/dataset-detail', () => ({
|
||||
default: {},
|
||||
useDatasetDetailContext: vi.fn(() => ({ dataset: undefined })),
|
||||
useDatasetDetailContextWithSelector: vi.fn(() => false),
|
||||
}))
|
||||
|
||||
vi.mock('use-context-selector', () => ({
|
||||
useContext: vi.fn(() => ({})),
|
||||
useContextSelector: vi.fn(() => false),
|
||||
createContext: vi.fn(() => ({})),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/datasets/common/image-uploader/image-uploader-in-retrieval-testing', () => ({
|
||||
default: ({ textArea, actionButton }: { textArea: React.ReactNode, actionButton: React.ReactNode }) => (
|
||||
<div data-testid="image-uploader-mock">
|
||||
{textArea}
|
||||
{actionButton}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// --- Factories ---
|
||||
|
||||
const createRetrievalConfig = (overrides = {}): RetrievalConfig => ({
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
reranking_enable: false,
|
||||
reranking_mode: undefined,
|
||||
reranking_model: {
|
||||
reranking_provider_name: '',
|
||||
reranking_model_name: '',
|
||||
},
|
||||
weights: undefined,
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
score_threshold: 0.5,
|
||||
...overrides,
|
||||
} as RetrievalConfig)
|
||||
|
||||
const createHitTestingResponse = (numResults: number): HitTestingResponse => ({
|
||||
query: {
|
||||
content: 'What is Dify?',
|
||||
tsne_position: { x: 0, y: 0 },
|
||||
},
|
||||
records: Array.from({ length: numResults }, (_, i) => ({
|
||||
segment: {
|
||||
id: `seg-${i}`,
|
||||
document: {
|
||||
id: `doc-${i}`,
|
||||
data_source_type: 'upload_file',
|
||||
name: `document-${i}.txt`,
|
||||
doc_type: null as unknown as import('@/models/datasets').DocType,
|
||||
},
|
||||
content: `Result content ${i}`,
|
||||
sign_content: `Result content ${i}`,
|
||||
position: i + 1,
|
||||
word_count: 100 + i * 50,
|
||||
tokens: 50 + i * 25,
|
||||
keywords: ['test', 'dify'],
|
||||
hit_count: i * 5,
|
||||
index_node_hash: `hash-${i}`,
|
||||
answer: '',
|
||||
},
|
||||
content: {
|
||||
id: `seg-${i}`,
|
||||
document: {
|
||||
id: `doc-${i}`,
|
||||
data_source_type: 'upload_file',
|
||||
name: `document-${i}.txt`,
|
||||
doc_type: null as unknown as import('@/models/datasets').DocType,
|
||||
},
|
||||
content: `Result content ${i}`,
|
||||
sign_content: `Result content ${i}`,
|
||||
position: i + 1,
|
||||
word_count: 100 + i * 50,
|
||||
tokens: 50 + i * 25,
|
||||
keywords: ['test', 'dify'],
|
||||
hit_count: i * 5,
|
||||
index_node_hash: `hash-${i}`,
|
||||
answer: '',
|
||||
},
|
||||
score: 0.95 - i * 0.1,
|
||||
tsne_position: { x: 0, y: 0 },
|
||||
child_chunks: null,
|
||||
files: [],
|
||||
})),
|
||||
})
|
||||
|
||||
const createTextQuery = (content: string): Query[] => [
|
||||
{ content, content_type: 'text_query', file_info: null },
|
||||
]
|
||||
|
||||
// --- Helpers ---
|
||||
|
||||
const findSubmitButton = () => {
|
||||
const buttons = screen.getAllByRole('button')
|
||||
const submitButton = buttons.find(btn => btn.classList.contains('w-[88px]'))
|
||||
expect(submitButton).toBeTruthy()
|
||||
return submitButton!
|
||||
}
|
||||
|
||||
// --- Tests ---
|
||||
|
||||
describe('Hit Testing Flow', () => {
|
||||
const mockHitTestingMutation = vi.fn()
|
||||
const mockExternalMutation = vi.fn()
|
||||
const mockSetHitResult = vi.fn()
|
||||
const mockSetExternalHitResult = vi.fn()
|
||||
const mockOnUpdateList = vi.fn()
|
||||
const mockSetQueries = vi.fn()
|
||||
const mockOnClickRetrievalMethod = vi.fn()
|
||||
const mockOnSubmit = vi.fn()
|
||||
|
||||
const createDefaultProps = (overrides: Record<string, unknown> = {}) => ({
|
||||
onUpdateList: mockOnUpdateList,
|
||||
setHitResult: mockSetHitResult,
|
||||
setExternalHitResult: mockSetExternalHitResult,
|
||||
loading: false,
|
||||
queries: [] as Query[],
|
||||
setQueries: mockSetQueries,
|
||||
isExternal: false,
|
||||
onClickRetrievalMethod: mockOnClickRetrievalMethod,
|
||||
retrievalConfig: createRetrievalConfig(),
|
||||
isEconomy: false,
|
||||
onSubmit: mockOnSubmit,
|
||||
hitTestingMutation: mockHitTestingMutation,
|
||||
externalKnowledgeBaseHitTestingMutation: mockExternalMutation,
|
||||
...overrides,
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Query Submission → API Call', () => {
|
||||
it('should call hitTestingMutation with correct payload including retrieval model', async () => {
|
||||
const retrievalConfig = createRetrievalConfig({
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
})
|
||||
mockHitTestingMutation.mockResolvedValue(createHitTestingResponse(3))
|
||||
|
||||
render(
|
||||
<QueryInput {...createDefaultProps({
|
||||
queries: createTextQuery('How does RAG work?'),
|
||||
retrievalConfig,
|
||||
})}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(findSubmitButton())
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockHitTestingMutation).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
query: 'How does RAG work?',
|
||||
attachment_ids: [],
|
||||
retrieval_model: expect.objectContaining({
|
||||
search_method: RETRIEVE_METHOD.semantic,
|
||||
top_k: 3,
|
||||
score_threshold_enabled: false,
|
||||
}),
|
||||
}),
|
||||
expect.objectContaining({
|
||||
onSuccess: expect.any(Function),
|
||||
}),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should override search_method to keywordSearch when isEconomy is true', async () => {
|
||||
const retrievalConfig = createRetrievalConfig({ search_method: RETRIEVE_METHOD.semantic })
|
||||
mockHitTestingMutation.mockResolvedValue(createHitTestingResponse(1))
|
||||
|
||||
render(
|
||||
<QueryInput {...createDefaultProps({
|
||||
queries: createTextQuery('test query'),
|
||||
retrievalConfig,
|
||||
isEconomy: true,
|
||||
})}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(findSubmitButton())
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockHitTestingMutation).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
retrieval_model: expect.objectContaining({
|
||||
search_method: RETRIEVE_METHOD.keywordSearch,
|
||||
}),
|
||||
}),
|
||||
expect.anything(),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle empty results by calling setHitResult with empty records', async () => {
|
||||
const emptyResponse = createHitTestingResponse(0)
|
||||
mockHitTestingMutation.mockImplementation(async (_params: unknown, options?: { onSuccess?: (data: HitTestingResponse) => void }) => {
|
||||
options?.onSuccess?.(emptyResponse)
|
||||
return emptyResponse
|
||||
})
|
||||
|
||||
render(
|
||||
<QueryInput {...createDefaultProps({
|
||||
queries: createTextQuery('nonexistent topic'),
|
||||
})}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(findSubmitButton())
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSetHitResult).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ records: [] }),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
it('should not call success callbacks when mutation resolves without onSuccess', async () => {
|
||||
// Simulate a mutation that resolves but does not invoke the onSuccess callback
|
||||
mockHitTestingMutation.mockResolvedValue(undefined)
|
||||
|
||||
render(
|
||||
<QueryInput {...createDefaultProps({
|
||||
queries: createTextQuery('test'),
|
||||
})}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(findSubmitButton())
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockHitTestingMutation).toHaveBeenCalled()
|
||||
})
|
||||
// Success callbacks should not fire when onSuccess is not invoked
|
||||
expect(mockSetHitResult).not.toHaveBeenCalled()
|
||||
expect(mockOnUpdateList).not.toHaveBeenCalled()
|
||||
expect(mockOnSubmit).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('API Response → Results Data Contract', () => {
|
||||
it('should produce results with required segment fields for rendering', () => {
|
||||
const response = createHitTestingResponse(3)
|
||||
|
||||
// Validate each result has the fields needed by ResultItem component
|
||||
response.records.forEach((record) => {
|
||||
expect(record.segment).toHaveProperty('id')
|
||||
expect(record.segment).toHaveProperty('content')
|
||||
expect(record.segment).toHaveProperty('position')
|
||||
expect(record.segment).toHaveProperty('word_count')
|
||||
expect(record.segment).toHaveProperty('document')
|
||||
expect(record.segment.document).toHaveProperty('name')
|
||||
expect(record.score).toBeGreaterThanOrEqual(0)
|
||||
expect(record.score).toBeLessThanOrEqual(1)
|
||||
})
|
||||
})
|
||||
|
||||
it('should maintain correct score ordering', () => {
|
||||
const response = createHitTestingResponse(5)
|
||||
|
||||
for (let i = 1; i < response.records.length; i++) {
|
||||
expect(response.records[i - 1].score).toBeGreaterThanOrEqual(response.records[i].score)
|
||||
}
|
||||
})
|
||||
|
||||
it('should include document metadata for result item display', () => {
|
||||
const response = createHitTestingResponse(1)
|
||||
const record = response.records[0]
|
||||
|
||||
expect(record.segment.document.name).toBeTruthy()
|
||||
expect(record.segment.document.data_source_type).toBeTruthy()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Successful Submission → Callback Chain', () => {
|
||||
it('should call setHitResult, onUpdateList, and onSubmit after successful submission', async () => {
|
||||
const response = createHitTestingResponse(3)
|
||||
mockHitTestingMutation.mockImplementation(async (_params: unknown, options?: { onSuccess?: (data: HitTestingResponse) => void }) => {
|
||||
options?.onSuccess?.(response)
|
||||
return response
|
||||
})
|
||||
|
||||
render(
|
||||
<QueryInput {...createDefaultProps({
|
||||
queries: createTextQuery('Test query'),
|
||||
})}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(findSubmitButton())
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSetHitResult).toHaveBeenCalledWith(response)
|
||||
expect(mockOnUpdateList).toHaveBeenCalledTimes(1)
|
||||
expect(mockOnSubmit).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
it('should trigger records list refresh via onUpdateList after query', async () => {
|
||||
const response = createHitTestingResponse(1)
|
||||
mockHitTestingMutation.mockImplementation(async (_params: unknown, options?: { onSuccess?: (data: HitTestingResponse) => void }) => {
|
||||
options?.onSuccess?.(response)
|
||||
return response
|
||||
})
|
||||
|
||||
render(
|
||||
<QueryInput {...createDefaultProps({
|
||||
queries: createTextQuery('new query'),
|
||||
})}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(findSubmitButton())
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockOnUpdateList).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('External KB Hit Testing', () => {
|
||||
it('should use external mutation with correct payload for external datasets', async () => {
|
||||
mockExternalMutation.mockImplementation(async (_params: unknown, options?: { onSuccess?: (data: { records: never[] }) => void }) => {
|
||||
const response = { records: [] }
|
||||
options?.onSuccess?.(response)
|
||||
return response
|
||||
})
|
||||
|
||||
render(
|
||||
<QueryInput {...createDefaultProps({
|
||||
queries: createTextQuery('test'),
|
||||
isExternal: true,
|
||||
})}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(findSubmitButton())
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockExternalMutation).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
query: 'test',
|
||||
external_retrieval_model: expect.objectContaining({
|
||||
top_k: 4,
|
||||
score_threshold: 0.5,
|
||||
score_threshold_enabled: false,
|
||||
}),
|
||||
}),
|
||||
expect.objectContaining({
|
||||
onSuccess: expect.any(Function),
|
||||
}),
|
||||
)
|
||||
// Internal mutation should NOT be called
|
||||
expect(mockHitTestingMutation).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should call setExternalHitResult and onUpdateList on successful external submission', async () => {
|
||||
const externalResponse = { records: [] }
|
||||
mockExternalMutation.mockImplementation(async (_params: unknown, options?: { onSuccess?: (data: { records: never[] }) => void }) => {
|
||||
options?.onSuccess?.(externalResponse)
|
||||
return externalResponse
|
||||
})
|
||||
|
||||
render(
|
||||
<QueryInput {...createDefaultProps({
|
||||
queries: createTextQuery('external query'),
|
||||
isExternal: true,
|
||||
})}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(findSubmitButton())
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockSetExternalHitResult).toHaveBeenCalledWith(externalResponse)
|
||||
expect(mockOnUpdateList).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
337
web/__tests__/datasets/metadata-management-flow.test.tsx
Normal file
337
web/__tests__/datasets/metadata-management-flow.test.tsx
Normal file
@ -0,0 +1,337 @@
|
||||
/**
|
||||
* Integration Test: Metadata Management Flow
|
||||
*
|
||||
* Tests the cross-module composition of metadata name validation, type constraints,
|
||||
* and duplicate detection across the metadata management hooks.
|
||||
*
|
||||
* The unit-level use-check-metadata-name.spec.ts tests the validation hook alone.
|
||||
* This integration test verifies:
|
||||
* - Name validation combined with existing metadata list (duplicate detection)
|
||||
* - Metadata type enum constraints matching expected data model
|
||||
* - Full add/rename workflow: validate name → check duplicates → allow or reject
|
||||
* - Name uniqueness logic: existing metadata keeps its own name, cannot take another's
|
||||
*/
|
||||
|
||||
import type { MetadataItemWithValueLength } from '@/app/components/datasets/metadata/types'
|
||||
import { renderHook } from '@testing-library/react'
|
||||
import { DataType } from '@/app/components/datasets/metadata/types'
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
const { default: useCheckMetadataName } = await import(
|
||||
'@/app/components/datasets/metadata/hooks/use-check-metadata-name',
|
||||
)
|
||||
|
||||
// --- Factory functions ---
|
||||
|
||||
const createMetadataItem = (
|
||||
id: string,
|
||||
name: string,
|
||||
type = DataType.string,
|
||||
count = 0,
|
||||
): MetadataItemWithValueLength => ({
|
||||
id,
|
||||
name,
|
||||
type,
|
||||
count,
|
||||
})
|
||||
|
||||
const createMetadataList = (): MetadataItemWithValueLength[] => [
|
||||
createMetadataItem('meta-1', 'author', DataType.string, 5),
|
||||
createMetadataItem('meta-2', 'created_date', DataType.time, 10),
|
||||
createMetadataItem('meta-3', 'page_count', DataType.number, 3),
|
||||
createMetadataItem('meta-4', 'source_url', DataType.string, 8),
|
||||
createMetadataItem('meta-5', 'version', DataType.number, 2),
|
||||
]
|
||||
|
||||
describe('Metadata Management Flow - Cross-Module Validation Composition', () => {
|
||||
describe('Name Validation Flow: Format Rules', () => {
|
||||
it('should accept valid lowercase names with underscores', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
|
||||
expect(result.current.checkName('valid_name').errorMsg).toBe('')
|
||||
expect(result.current.checkName('author').errorMsg).toBe('')
|
||||
expect(result.current.checkName('page_count').errorMsg).toBe('')
|
||||
expect(result.current.checkName('v2_field').errorMsg).toBe('')
|
||||
})
|
||||
|
||||
it('should reject empty names', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
|
||||
expect(result.current.checkName('').errorMsg).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should reject names with invalid characters', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
|
||||
expect(result.current.checkName('Author').errorMsg).toBeTruthy()
|
||||
expect(result.current.checkName('my-field').errorMsg).toBeTruthy()
|
||||
expect(result.current.checkName('field name').errorMsg).toBeTruthy()
|
||||
expect(result.current.checkName('1field').errorMsg).toBeTruthy()
|
||||
expect(result.current.checkName('_private').errorMsg).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should reject names exceeding 255 characters', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
|
||||
const longName = 'a'.repeat(256)
|
||||
expect(result.current.checkName(longName).errorMsg).toBeTruthy()
|
||||
|
||||
const maxName = 'a'.repeat(255)
|
||||
expect(result.current.checkName(maxName).errorMsg).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Metadata Type Constraints: Enum Values Match Expected Set', () => {
|
||||
it('should define exactly three data types', () => {
|
||||
const typeValues = Object.values(DataType)
|
||||
expect(typeValues).toHaveLength(3)
|
||||
})
|
||||
|
||||
it('should include string, number, and time types', () => {
|
||||
expect(DataType.string).toBe('string')
|
||||
expect(DataType.number).toBe('number')
|
||||
expect(DataType.time).toBe('time')
|
||||
})
|
||||
|
||||
it('should use consistent types in metadata items', () => {
|
||||
const metadataList = createMetadataList()
|
||||
|
||||
const stringItems = metadataList.filter(m => m.type === DataType.string)
|
||||
const numberItems = metadataList.filter(m => m.type === DataType.number)
|
||||
const timeItems = metadataList.filter(m => m.type === DataType.time)
|
||||
|
||||
expect(stringItems).toHaveLength(2)
|
||||
expect(numberItems).toHaveLength(2)
|
||||
expect(timeItems).toHaveLength(1)
|
||||
})
|
||||
|
||||
it('should enforce type-safe metadata item construction', () => {
|
||||
const item = createMetadataItem('test-1', 'test_field', DataType.number, 0)
|
||||
|
||||
expect(item.id).toBe('test-1')
|
||||
expect(item.name).toBe('test_field')
|
||||
expect(item.type).toBe(DataType.number)
|
||||
expect(item.count).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Duplicate Name Detection: Add Metadata → Check Name → Detect Duplicates', () => {
|
||||
it('should detect duplicate names against an existing metadata list', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
const existingMetadata = createMetadataList()
|
||||
|
||||
const checkDuplicate = (newName: string): boolean => {
|
||||
const formatCheck = result.current.checkName(newName)
|
||||
if (formatCheck.errorMsg)
|
||||
return false
|
||||
return existingMetadata.some(m => m.name === newName)
|
||||
}
|
||||
|
||||
expect(checkDuplicate('author')).toBe(true)
|
||||
expect(checkDuplicate('created_date')).toBe(true)
|
||||
expect(checkDuplicate('page_count')).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow names that do not conflict with existing metadata', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
const existingMetadata = createMetadataList()
|
||||
|
||||
const isNameAvailable = (newName: string): boolean => {
|
||||
const formatCheck = result.current.checkName(newName)
|
||||
if (formatCheck.errorMsg)
|
||||
return false
|
||||
return !existingMetadata.some(m => m.name === newName)
|
||||
}
|
||||
|
||||
expect(isNameAvailable('category')).toBe(true)
|
||||
expect(isNameAvailable('file_size')).toBe(true)
|
||||
expect(isNameAvailable('language')).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject names that fail format validation before duplicate check', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
|
||||
const validateAndCheckDuplicate = (newName: string): { valid: boolean, reason: string } => {
|
||||
const formatCheck = result.current.checkName(newName)
|
||||
if (formatCheck.errorMsg)
|
||||
return { valid: false, reason: 'format' }
|
||||
return { valid: true, reason: '' }
|
||||
}
|
||||
|
||||
expect(validateAndCheckDuplicate('Author').reason).toBe('format')
|
||||
expect(validateAndCheckDuplicate('').reason).toBe('format')
|
||||
expect(validateAndCheckDuplicate('valid_name').valid).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Name Uniqueness Across Edits: Rename Workflow', () => {
|
||||
it('should allow an existing metadata item to keep its own name', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
const existingMetadata = createMetadataList()
|
||||
|
||||
const isRenameValid = (itemId: string, newName: string): boolean => {
|
||||
const formatCheck = result.current.checkName(newName)
|
||||
if (formatCheck.errorMsg)
|
||||
return false
|
||||
// Allow keeping the same name (skip self in duplicate check)
|
||||
return !existingMetadata.some(m => m.name === newName && m.id !== itemId)
|
||||
}
|
||||
|
||||
// Author keeping its own name should be valid
|
||||
expect(isRenameValid('meta-1', 'author')).toBe(true)
|
||||
// page_count keeping its own name should be valid
|
||||
expect(isRenameValid('meta-3', 'page_count')).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject renaming to another existing metadata name', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
const existingMetadata = createMetadataList()
|
||||
|
||||
const isRenameValid = (itemId: string, newName: string): boolean => {
|
||||
const formatCheck = result.current.checkName(newName)
|
||||
if (formatCheck.errorMsg)
|
||||
return false
|
||||
return !existingMetadata.some(m => m.name === newName && m.id !== itemId)
|
||||
}
|
||||
|
||||
// Author trying to rename to "page_count" (taken by meta-3)
|
||||
expect(isRenameValid('meta-1', 'page_count')).toBe(false)
|
||||
// version trying to rename to "source_url" (taken by meta-4)
|
||||
expect(isRenameValid('meta-5', 'source_url')).toBe(false)
|
||||
})
|
||||
|
||||
it('should allow renaming to a completely new valid name', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
const existingMetadata = createMetadataList()
|
||||
|
||||
const isRenameValid = (itemId: string, newName: string): boolean => {
|
||||
const formatCheck = result.current.checkName(newName)
|
||||
if (formatCheck.errorMsg)
|
||||
return false
|
||||
return !existingMetadata.some(m => m.name === newName && m.id !== itemId)
|
||||
}
|
||||
|
||||
expect(isRenameValid('meta-1', 'document_author')).toBe(true)
|
||||
expect(isRenameValid('meta-2', 'publish_date')).toBe(true)
|
||||
expect(isRenameValid('meta-3', 'total_pages')).toBe(true)
|
||||
})
|
||||
|
||||
it('should reject renaming with an invalid format even if name is unique', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
const existingMetadata = createMetadataList()
|
||||
|
||||
const isRenameValid = (itemId: string, newName: string): boolean => {
|
||||
const formatCheck = result.current.checkName(newName)
|
||||
if (formatCheck.errorMsg)
|
||||
return false
|
||||
return !existingMetadata.some(m => m.name === newName && m.id !== itemId)
|
||||
}
|
||||
|
||||
expect(isRenameValid('meta-1', 'New Author')).toBe(false)
|
||||
expect(isRenameValid('meta-2', '2024_date')).toBe(false)
|
||||
expect(isRenameValid('meta-3', '')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Full Metadata Management Workflow', () => {
|
||||
it('should support a complete add-validate-check-duplicate cycle', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
const existingMetadata = createMetadataList()
|
||||
|
||||
const addMetadataField = (
|
||||
name: string,
|
||||
type: DataType,
|
||||
): { success: boolean, error?: string } => {
|
||||
const formatCheck = result.current.checkName(name)
|
||||
if (formatCheck.errorMsg)
|
||||
return { success: false, error: 'invalid_format' }
|
||||
|
||||
if (existingMetadata.some(m => m.name === name))
|
||||
return { success: false, error: 'duplicate_name' }
|
||||
|
||||
existingMetadata.push(createMetadataItem(`meta-${existingMetadata.length + 1}`, name, type))
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
// Add a valid new field
|
||||
const result1 = addMetadataField('department', DataType.string)
|
||||
expect(result1.success).toBe(true)
|
||||
expect(existingMetadata).toHaveLength(6)
|
||||
|
||||
// Try to add a duplicate
|
||||
const result2 = addMetadataField('author', DataType.string)
|
||||
expect(result2.success).toBe(false)
|
||||
expect(result2.error).toBe('duplicate_name')
|
||||
expect(existingMetadata).toHaveLength(6)
|
||||
|
||||
// Try to add an invalid name
|
||||
const result3 = addMetadataField('Invalid Name', DataType.string)
|
||||
expect(result3.success).toBe(false)
|
||||
expect(result3.error).toBe('invalid_format')
|
||||
expect(existingMetadata).toHaveLength(6)
|
||||
|
||||
// Add another valid field
|
||||
const result4 = addMetadataField('priority_level', DataType.number)
|
||||
expect(result4.success).toBe(true)
|
||||
expect(existingMetadata).toHaveLength(7)
|
||||
})
|
||||
|
||||
it('should support a complete rename workflow with validation chain', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
const existingMetadata = createMetadataList()
|
||||
|
||||
const renameMetadataField = (
|
||||
itemId: string,
|
||||
newName: string,
|
||||
): { success: boolean, error?: string } => {
|
||||
const formatCheck = result.current.checkName(newName)
|
||||
if (formatCheck.errorMsg)
|
||||
return { success: false, error: 'invalid_format' }
|
||||
|
||||
if (existingMetadata.some(m => m.name === newName && m.id !== itemId))
|
||||
return { success: false, error: 'duplicate_name' }
|
||||
|
||||
const item = existingMetadata.find(m => m.id === itemId)
|
||||
if (!item)
|
||||
return { success: false, error: 'not_found' }
|
||||
|
||||
// Simulate the rename in-place
|
||||
const index = existingMetadata.indexOf(item)
|
||||
existingMetadata[index] = { ...item, name: newName }
|
||||
return { success: true }
|
||||
}
|
||||
|
||||
// Rename author to document_author
|
||||
expect(renameMetadataField('meta-1', 'document_author').success).toBe(true)
|
||||
expect(existingMetadata.find(m => m.id === 'meta-1')?.name).toBe('document_author')
|
||||
|
||||
// Try renaming created_date to page_count (already taken)
|
||||
expect(renameMetadataField('meta-2', 'page_count').error).toBe('duplicate_name')
|
||||
|
||||
// Rename to invalid format
|
||||
expect(renameMetadataField('meta-3', 'Page Count').error).toBe('invalid_format')
|
||||
|
||||
// Rename non-existent item
|
||||
expect(renameMetadataField('meta-999', 'something').error).toBe('not_found')
|
||||
})
|
||||
|
||||
it('should maintain validation consistency across multiple operations', () => {
|
||||
const { result } = renderHook(() => useCheckMetadataName())
|
||||
|
||||
// Validate the same name multiple times for consistency
|
||||
const name = 'consistent_field'
|
||||
const results = Array.from({ length: 5 }, () => result.current.checkName(name))
|
||||
|
||||
expect(results.every(r => r.errorMsg === '')).toBe(true)
|
||||
|
||||
// Validate an invalid name multiple times
|
||||
const invalidResults = Array.from({ length: 5 }, () => result.current.checkName('Invalid'))
|
||||
expect(invalidResults.every(r => r.errorMsg !== '')).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
477
web/__tests__/datasets/pipeline-datasource-flow.test.tsx
Normal file
477
web/__tests__/datasets/pipeline-datasource-flow.test.tsx
Normal file
@ -0,0 +1,477 @@
|
||||
/**
|
||||
* Integration Test: Pipeline Data Source Store Composition
|
||||
*
|
||||
* Tests cross-slice interactions in the pipeline data source Zustand store.
|
||||
* The unit-level slice specs test each slice in isolation.
|
||||
* This integration test verifies:
|
||||
* - Store initialization produces correct defaults across all slices
|
||||
* - Cross-slice coordination (e.g. credential shared across slices)
|
||||
* - State isolation: changes in one slice do not affect others
|
||||
* - Full workflow simulation through credential → source → data path
|
||||
*/
|
||||
|
||||
import type { NotionPage } from '@/models/common'
|
||||
import type { CrawlResultItem, FileItem } from '@/models/datasets'
|
||||
import type { OnlineDriveFile } from '@/models/pipeline'
|
||||
import { createDataSourceStore } from '@/app/components/datasets/documents/create-from-pipeline/data-source/store'
|
||||
import { CrawlStep } from '@/models/datasets'
|
||||
import { OnlineDriveFileType } from '@/models/pipeline'
|
||||
|
||||
// --- Factory functions ---
|
||||
|
||||
const createFileItem = (id: string): FileItem => ({
|
||||
fileID: id,
|
||||
file: { id, name: `${id}.txt`, size: 1024 } as FileItem['file'],
|
||||
progress: 100,
|
||||
})
|
||||
|
||||
const createCrawlResultItem = (url: string, title?: string): CrawlResultItem => ({
|
||||
title: title ?? `Page: ${url}`,
|
||||
markdown: `# ${title ?? url}\n\nContent for ${url}`,
|
||||
description: `Description for ${url}`,
|
||||
source_url: url,
|
||||
})
|
||||
|
||||
const createOnlineDriveFile = (id: string, name: string, type = OnlineDriveFileType.file): OnlineDriveFile => ({
|
||||
id,
|
||||
name,
|
||||
size: 2048,
|
||||
type,
|
||||
})
|
||||
|
||||
const createNotionPage = (pageId: string): NotionPage => ({
|
||||
page_id: pageId,
|
||||
page_name: `Page ${pageId}`,
|
||||
page_icon: null,
|
||||
is_bound: true,
|
||||
parent_id: 'parent-1',
|
||||
type: 'page',
|
||||
workspace_id: 'ws-1',
|
||||
})
|
||||
|
||||
describe('Pipeline Data Source Store Composition - Cross-Slice Integration', () => {
|
||||
describe('Store Initialization → All Slices Have Correct Defaults', () => {
|
||||
it('should create a store with all five slices combined', () => {
|
||||
const store = createDataSourceStore()
|
||||
const state = store.getState()
|
||||
|
||||
// Common slice defaults
|
||||
expect(state.currentCredentialId).toBe('')
|
||||
expect(state.currentNodeIdRef.current).toBe('')
|
||||
|
||||
// Local file slice defaults
|
||||
expect(state.localFileList).toEqual([])
|
||||
expect(state.currentLocalFile).toBeUndefined()
|
||||
|
||||
// Online document slice defaults
|
||||
expect(state.documentsData).toEqual([])
|
||||
expect(state.onlineDocuments).toEqual([])
|
||||
expect(state.searchValue).toBe('')
|
||||
expect(state.selectedPagesId).toEqual(new Set())
|
||||
|
||||
// Website crawl slice defaults
|
||||
expect(state.websitePages).toEqual([])
|
||||
expect(state.step).toBe(CrawlStep.init)
|
||||
expect(state.previewIndex).toBe(-1)
|
||||
|
||||
// Online drive slice defaults
|
||||
expect(state.breadcrumbs).toEqual([])
|
||||
expect(state.prefix).toEqual([])
|
||||
expect(state.keywords).toBe('')
|
||||
expect(state.selectedFileIds).toEqual([])
|
||||
expect(state.onlineDriveFileList).toEqual([])
|
||||
expect(state.bucket).toBe('')
|
||||
expect(state.hasBucket).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cross-Slice Coordination: Shared Credential', () => {
|
||||
it('should set credential that is accessible from the common slice', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setCurrentCredentialId('cred-abc-123')
|
||||
|
||||
expect(store.getState().currentCredentialId).toBe('cred-abc-123')
|
||||
})
|
||||
|
||||
it('should allow credential update independently of all other slices', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setLocalFileList([createFileItem('f1')])
|
||||
store.getState().setCurrentCredentialId('cred-xyz')
|
||||
|
||||
expect(store.getState().currentCredentialId).toBe('cred-xyz')
|
||||
expect(store.getState().localFileList).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Local File Workflow: Set Files → Verify List → Clear', () => {
|
||||
it('should set and retrieve local file list', () => {
|
||||
const store = createDataSourceStore()
|
||||
const files = [createFileItem('f1'), createFileItem('f2'), createFileItem('f3')]
|
||||
|
||||
store.getState().setLocalFileList(files)
|
||||
|
||||
expect(store.getState().localFileList).toHaveLength(3)
|
||||
expect(store.getState().localFileList[0].fileID).toBe('f1')
|
||||
expect(store.getState().localFileList[2].fileID).toBe('f3')
|
||||
})
|
||||
|
||||
it('should update preview ref when setting file list', () => {
|
||||
const store = createDataSourceStore()
|
||||
const files = [createFileItem('f-preview')]
|
||||
|
||||
store.getState().setLocalFileList(files)
|
||||
|
||||
expect(store.getState().previewLocalFileRef.current).toBeDefined()
|
||||
})
|
||||
|
||||
it('should clear files by setting empty list', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setLocalFileList([createFileItem('f1')])
|
||||
expect(store.getState().localFileList).toHaveLength(1)
|
||||
|
||||
store.getState().setLocalFileList([])
|
||||
expect(store.getState().localFileList).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should set and clear current local file selection', () => {
|
||||
const store = createDataSourceStore()
|
||||
const file = { id: 'current-file', name: 'current.txt' } as FileItem['file']
|
||||
|
||||
store.getState().setCurrentLocalFile(file)
|
||||
expect(store.getState().currentLocalFile).toBeDefined()
|
||||
expect(store.getState().currentLocalFile?.id).toBe('current-file')
|
||||
|
||||
store.getState().setCurrentLocalFile(undefined)
|
||||
expect(store.getState().currentLocalFile).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Online Document Workflow: Set Documents → Select Pages → Verify', () => {
|
||||
it('should set documents data and online documents', () => {
|
||||
const store = createDataSourceStore()
|
||||
const pages = [createNotionPage('page-1'), createNotionPage('page-2')]
|
||||
|
||||
store.getState().setOnlineDocuments(pages)
|
||||
|
||||
expect(store.getState().onlineDocuments).toHaveLength(2)
|
||||
expect(store.getState().onlineDocuments[0].page_id).toBe('page-1')
|
||||
})
|
||||
|
||||
it('should update preview ref when setting online documents', () => {
|
||||
const store = createDataSourceStore()
|
||||
const pages = [createNotionPage('page-preview')]
|
||||
|
||||
store.getState().setOnlineDocuments(pages)
|
||||
|
||||
expect(store.getState().previewOnlineDocumentRef.current).toBeDefined()
|
||||
expect(store.getState().previewOnlineDocumentRef.current?.page_id).toBe('page-preview')
|
||||
})
|
||||
|
||||
it('should track selected page IDs', () => {
|
||||
const store = createDataSourceStore()
|
||||
const pages = [createNotionPage('p1'), createNotionPage('p2'), createNotionPage('p3')]
|
||||
|
||||
store.getState().setOnlineDocuments(pages)
|
||||
store.getState().setSelectedPagesId(new Set(['p1', 'p3']))
|
||||
|
||||
expect(store.getState().selectedPagesId.size).toBe(2)
|
||||
expect(store.getState().selectedPagesId.has('p1')).toBe(true)
|
||||
expect(store.getState().selectedPagesId.has('p2')).toBe(false)
|
||||
expect(store.getState().selectedPagesId.has('p3')).toBe(true)
|
||||
})
|
||||
|
||||
it('should manage search value for filtering documents', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setSearchValue('meeting notes')
|
||||
|
||||
expect(store.getState().searchValue).toBe('meeting notes')
|
||||
})
|
||||
|
||||
it('should set and clear current document selection', () => {
|
||||
const store = createDataSourceStore()
|
||||
const page = createNotionPage('current-page')
|
||||
|
||||
store.getState().setCurrentDocument(page)
|
||||
expect(store.getState().currentDocument?.page_id).toBe('current-page')
|
||||
|
||||
store.getState().setCurrentDocument(undefined)
|
||||
expect(store.getState().currentDocument).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Website Crawl Workflow: Set Pages → Track Step → Preview', () => {
|
||||
it('should set website pages and update preview ref', () => {
|
||||
const store = createDataSourceStore()
|
||||
const pages = [
|
||||
createCrawlResultItem('https://example.com'),
|
||||
createCrawlResultItem('https://example.com/about'),
|
||||
]
|
||||
|
||||
store.getState().setWebsitePages(pages)
|
||||
|
||||
expect(store.getState().websitePages).toHaveLength(2)
|
||||
expect(store.getState().previewWebsitePageRef.current?.source_url).toBe('https://example.com')
|
||||
})
|
||||
|
||||
it('should manage crawl step transitions', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
expect(store.getState().step).toBe(CrawlStep.init)
|
||||
|
||||
store.getState().setStep(CrawlStep.running)
|
||||
expect(store.getState().step).toBe(CrawlStep.running)
|
||||
|
||||
store.getState().setStep(CrawlStep.finished)
|
||||
expect(store.getState().step).toBe(CrawlStep.finished)
|
||||
})
|
||||
|
||||
it('should set crawl result with data and timing', () => {
|
||||
const store = createDataSourceStore()
|
||||
const result = {
|
||||
data: [createCrawlResultItem('https://test.com')],
|
||||
time_consuming: 3.5,
|
||||
}
|
||||
|
||||
store.getState().setCrawlResult(result)
|
||||
|
||||
expect(store.getState().crawlResult?.data).toHaveLength(1)
|
||||
expect(store.getState().crawlResult?.time_consuming).toBe(3.5)
|
||||
})
|
||||
|
||||
it('should manage preview index for page navigation', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setPreviewIndex(2)
|
||||
expect(store.getState().previewIndex).toBe(2)
|
||||
|
||||
store.getState().setPreviewIndex(-1)
|
||||
expect(store.getState().previewIndex).toBe(-1)
|
||||
})
|
||||
|
||||
it('should set and clear current website selection', () => {
|
||||
const store = createDataSourceStore()
|
||||
const page = createCrawlResultItem('https://current.com')
|
||||
|
||||
store.getState().setCurrentWebsite(page)
|
||||
expect(store.getState().currentWebsite?.source_url).toBe('https://current.com')
|
||||
|
||||
store.getState().setCurrentWebsite(undefined)
|
||||
expect(store.getState().currentWebsite).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Online Drive Workflow: Breadcrumbs → File Selection → Navigation', () => {
|
||||
it('should manage breadcrumb navigation', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setBreadcrumbs(['root', 'folder-a', 'subfolder'])
|
||||
|
||||
expect(store.getState().breadcrumbs).toEqual(['root', 'folder-a', 'subfolder'])
|
||||
})
|
||||
|
||||
it('should support breadcrumb push/pop pattern', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setBreadcrumbs(['root'])
|
||||
store.getState().setBreadcrumbs([...store.getState().breadcrumbs, 'level-1'])
|
||||
store.getState().setBreadcrumbs([...store.getState().breadcrumbs, 'level-2'])
|
||||
|
||||
expect(store.getState().breadcrumbs).toEqual(['root', 'level-1', 'level-2'])
|
||||
|
||||
// Pop back one level
|
||||
store.getState().setBreadcrumbs(store.getState().breadcrumbs.slice(0, -1))
|
||||
expect(store.getState().breadcrumbs).toEqual(['root', 'level-1'])
|
||||
})
|
||||
|
||||
it('should manage file list and selection', () => {
|
||||
const store = createDataSourceStore()
|
||||
const files = [
|
||||
createOnlineDriveFile('drive-1', 'report.pdf'),
|
||||
createOnlineDriveFile('drive-2', 'data.csv'),
|
||||
createOnlineDriveFile('drive-3', 'images', OnlineDriveFileType.folder),
|
||||
]
|
||||
|
||||
store.getState().setOnlineDriveFileList(files)
|
||||
expect(store.getState().onlineDriveFileList).toHaveLength(3)
|
||||
|
||||
store.getState().setSelectedFileIds(['drive-1', 'drive-2'])
|
||||
expect(store.getState().selectedFileIds).toEqual(['drive-1', 'drive-2'])
|
||||
})
|
||||
|
||||
it('should update preview ref when selecting files', () => {
|
||||
const store = createDataSourceStore()
|
||||
const files = [
|
||||
createOnlineDriveFile('drive-a', 'file-a.txt'),
|
||||
createOnlineDriveFile('drive-b', 'file-b.txt'),
|
||||
]
|
||||
|
||||
store.getState().setOnlineDriveFileList(files)
|
||||
store.getState().setSelectedFileIds(['drive-b'])
|
||||
|
||||
expect(store.getState().previewOnlineDriveFileRef.current?.id).toBe('drive-b')
|
||||
})
|
||||
|
||||
it('should manage bucket and prefix for S3-like navigation', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setBucket('my-data-bucket')
|
||||
store.getState().setPrefix(['data', '2024'])
|
||||
store.getState().setHasBucket(true)
|
||||
|
||||
expect(store.getState().bucket).toBe('my-data-bucket')
|
||||
expect(store.getState().prefix).toEqual(['data', '2024'])
|
||||
expect(store.getState().hasBucket).toBe(true)
|
||||
})
|
||||
|
||||
it('should manage keywords for search filtering', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setKeywords('quarterly report')
|
||||
expect(store.getState().keywords).toBe('quarterly report')
|
||||
})
|
||||
})
|
||||
|
||||
describe('State Isolation: Changes to One Slice Do Not Affect Others', () => {
|
||||
it('should keep local file state independent from online document state', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setLocalFileList([createFileItem('local-1')])
|
||||
store.getState().setOnlineDocuments([createNotionPage('notion-1')])
|
||||
|
||||
expect(store.getState().localFileList).toHaveLength(1)
|
||||
expect(store.getState().onlineDocuments).toHaveLength(1)
|
||||
|
||||
// Clearing local files should not affect online documents
|
||||
store.getState().setLocalFileList([])
|
||||
expect(store.getState().localFileList).toHaveLength(0)
|
||||
expect(store.getState().onlineDocuments).toHaveLength(1)
|
||||
})
|
||||
|
||||
it('should keep website crawl state independent from online drive state', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
store.getState().setWebsitePages([createCrawlResultItem('https://site.com')])
|
||||
store.getState().setOnlineDriveFileList([createOnlineDriveFile('d1', 'file.txt')])
|
||||
|
||||
expect(store.getState().websitePages).toHaveLength(1)
|
||||
expect(store.getState().onlineDriveFileList).toHaveLength(1)
|
||||
|
||||
// Clearing website pages should not affect drive files
|
||||
store.getState().setWebsitePages([])
|
||||
expect(store.getState().websitePages).toHaveLength(0)
|
||||
expect(store.getState().onlineDriveFileList).toHaveLength(1)
|
||||
})
|
||||
|
||||
it('should create fully independent store instances', () => {
|
||||
const storeA = createDataSourceStore()
|
||||
const storeB = createDataSourceStore()
|
||||
|
||||
storeA.getState().setCurrentCredentialId('cred-A')
|
||||
storeA.getState().setLocalFileList([createFileItem('fa-1')])
|
||||
|
||||
expect(storeA.getState().currentCredentialId).toBe('cred-A')
|
||||
expect(storeB.getState().currentCredentialId).toBe('')
|
||||
expect(storeB.getState().localFileList).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('Full Workflow Simulation: Credential → Source → Data → Verify', () => {
|
||||
it('should support a complete local file upload workflow', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
// Step 1: Set credential
|
||||
store.getState().setCurrentCredentialId('upload-cred-1')
|
||||
|
||||
// Step 2: Set file list
|
||||
const files = [createFileItem('upload-1'), createFileItem('upload-2')]
|
||||
store.getState().setLocalFileList(files)
|
||||
|
||||
// Step 3: Select current file for preview
|
||||
store.getState().setCurrentLocalFile(files[0].file)
|
||||
|
||||
// Verify all state is consistent
|
||||
expect(store.getState().currentCredentialId).toBe('upload-cred-1')
|
||||
expect(store.getState().localFileList).toHaveLength(2)
|
||||
expect(store.getState().currentLocalFile?.id).toBe('upload-1')
|
||||
expect(store.getState().previewLocalFileRef.current).toBeDefined()
|
||||
})
|
||||
|
||||
it('should support a complete website crawl workflow', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
// Step 1: Set credential
|
||||
store.getState().setCurrentCredentialId('crawl-cred-1')
|
||||
|
||||
// Step 2: Init crawl
|
||||
store.getState().setStep(CrawlStep.running)
|
||||
|
||||
// Step 3: Crawl completes with results
|
||||
const crawledPages = [
|
||||
createCrawlResultItem('https://docs.example.com/guide'),
|
||||
createCrawlResultItem('https://docs.example.com/api'),
|
||||
createCrawlResultItem('https://docs.example.com/faq'),
|
||||
]
|
||||
store.getState().setCrawlResult({ data: crawledPages, time_consuming: 12.5 })
|
||||
store.getState().setStep(CrawlStep.finished)
|
||||
|
||||
// Step 4: Set website pages from results
|
||||
store.getState().setWebsitePages(crawledPages)
|
||||
|
||||
// Step 5: Set preview
|
||||
store.getState().setPreviewIndex(1)
|
||||
|
||||
// Verify all state
|
||||
expect(store.getState().currentCredentialId).toBe('crawl-cred-1')
|
||||
expect(store.getState().step).toBe(CrawlStep.finished)
|
||||
expect(store.getState().websitePages).toHaveLength(3)
|
||||
expect(store.getState().crawlResult?.time_consuming).toBe(12.5)
|
||||
expect(store.getState().previewIndex).toBe(1)
|
||||
expect(store.getState().previewWebsitePageRef.current?.source_url).toBe('https://docs.example.com/guide')
|
||||
})
|
||||
|
||||
it('should support a complete online drive navigation workflow', () => {
|
||||
const store = createDataSourceStore()
|
||||
|
||||
// Step 1: Set credential
|
||||
store.getState().setCurrentCredentialId('drive-cred-1')
|
||||
|
||||
// Step 2: Set bucket
|
||||
store.getState().setBucket('company-docs')
|
||||
store.getState().setHasBucket(true)
|
||||
|
||||
// Step 3: Navigate into folders
|
||||
store.getState().setBreadcrumbs(['company-docs'])
|
||||
store.getState().setPrefix(['projects'])
|
||||
const folderFiles = [
|
||||
createOnlineDriveFile('proj-1', 'project-alpha', OnlineDriveFileType.folder),
|
||||
createOnlineDriveFile('proj-2', 'project-beta', OnlineDriveFileType.folder),
|
||||
createOnlineDriveFile('readme', 'README.md', OnlineDriveFileType.file),
|
||||
]
|
||||
store.getState().setOnlineDriveFileList(folderFiles)
|
||||
|
||||
// Step 4: Navigate deeper
|
||||
store.getState().setBreadcrumbs([...store.getState().breadcrumbs, 'project-alpha'])
|
||||
store.getState().setPrefix([...store.getState().prefix, 'project-alpha'])
|
||||
|
||||
// Step 5: Select files
|
||||
store.getState().setOnlineDriveFileList([
|
||||
createOnlineDriveFile('doc-1', 'spec.pdf'),
|
||||
createOnlineDriveFile('doc-2', 'design.fig'),
|
||||
])
|
||||
store.getState().setSelectedFileIds(['doc-1'])
|
||||
|
||||
// Verify full state
|
||||
expect(store.getState().currentCredentialId).toBe('drive-cred-1')
|
||||
expect(store.getState().bucket).toBe('company-docs')
|
||||
expect(store.getState().breadcrumbs).toEqual(['company-docs', 'project-alpha'])
|
||||
expect(store.getState().prefix).toEqual(['projects', 'project-alpha'])
|
||||
expect(store.getState().onlineDriveFileList).toHaveLength(2)
|
||||
expect(store.getState().selectedFileIds).toEqual(['doc-1'])
|
||||
expect(store.getState().previewOnlineDriveFileRef.current?.name).toBe('spec.pdf')
|
||||
})
|
||||
})
|
||||
})
|
||||
301
web/__tests__/datasets/segment-crud.test.tsx
Normal file
301
web/__tests__/datasets/segment-crud.test.tsx
Normal file
@ -0,0 +1,301 @@
|
||||
/**
|
||||
* Integration Test: Segment CRUD Flow
|
||||
*
|
||||
* Tests segment selection, search/filter, and modal state management across hooks.
|
||||
* Validates cross-hook data contracts in the completed segment module.
|
||||
*/
|
||||
|
||||
import type { SegmentDetailModel } from '@/models/datasets'
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { useModalState } from '@/app/components/datasets/documents/detail/completed/hooks/use-modal-state'
|
||||
import { useSearchFilter } from '@/app/components/datasets/documents/detail/completed/hooks/use-search-filter'
|
||||
import { useSegmentSelection } from '@/app/components/datasets/documents/detail/completed/hooks/use-segment-selection'
|
||||
|
||||
const createSegment = (id: string, content = 'Test segment content'): SegmentDetailModel => ({
|
||||
id,
|
||||
position: 1,
|
||||
document_id: 'doc-1',
|
||||
content,
|
||||
sign_content: content,
|
||||
answer: '',
|
||||
word_count: 50,
|
||||
tokens: 25,
|
||||
keywords: ['test'],
|
||||
index_node_id: 'idx-1',
|
||||
index_node_hash: 'hash-1',
|
||||
hit_count: 0,
|
||||
enabled: true,
|
||||
disabled_at: 0,
|
||||
disabled_by: '',
|
||||
status: 'completed',
|
||||
created_by: 'user-1',
|
||||
created_at: Date.now(),
|
||||
indexing_at: Date.now(),
|
||||
completed_at: Date.now(),
|
||||
error: null,
|
||||
stopped_at: 0,
|
||||
updated_at: Date.now(),
|
||||
attachments: [],
|
||||
} as SegmentDetailModel)
|
||||
|
||||
describe('Segment CRUD Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Search and Filter → Segment List Query', () => {
|
||||
it('should manage search input with debounce', () => {
|
||||
vi.useFakeTimers()
|
||||
const onPageChange = vi.fn()
|
||||
const { result } = renderHook(() => useSearchFilter({ onPageChange }))
|
||||
|
||||
act(() => {
|
||||
result.current.handleInputChange('keyword')
|
||||
})
|
||||
|
||||
expect(result.current.inputValue).toBe('keyword')
|
||||
expect(result.current.searchValue).toBe('')
|
||||
|
||||
act(() => {
|
||||
vi.advanceTimersByTime(500)
|
||||
})
|
||||
expect(result.current.searchValue).toBe('keyword')
|
||||
expect(onPageChange).toHaveBeenCalledWith(1)
|
||||
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
it('should manage status filter state', () => {
|
||||
const onPageChange = vi.fn()
|
||||
const { result } = renderHook(() => useSearchFilter({ onPageChange }))
|
||||
|
||||
// status value 1 maps to !!1 = true (enabled)
|
||||
act(() => {
|
||||
result.current.onChangeStatus({ value: 1, name: 'enabled' })
|
||||
})
|
||||
// onChangeStatus converts: value === 'all' ? 'all' : !!value
|
||||
expect(result.current.selectedStatus).toBe(true)
|
||||
|
||||
act(() => {
|
||||
result.current.onClearFilter()
|
||||
})
|
||||
expect(result.current.selectedStatus).toBe('all')
|
||||
expect(result.current.inputValue).toBe('')
|
||||
})
|
||||
|
||||
it('should provide status list for filter dropdown', () => {
|
||||
const { result } = renderHook(() => useSearchFilter({ onPageChange: vi.fn() }))
|
||||
expect(result.current.statusList).toBeInstanceOf(Array)
|
||||
expect(result.current.statusList.length).toBe(3) // all, disabled, enabled
|
||||
})
|
||||
|
||||
it('should compute selectDefaultValue based on selectedStatus', () => {
|
||||
const { result } = renderHook(() => useSearchFilter({ onPageChange: vi.fn() }))
|
||||
|
||||
// Initial state: 'all'
|
||||
expect(result.current.selectDefaultValue).toBe('all')
|
||||
|
||||
// Set to enabled (true)
|
||||
act(() => {
|
||||
result.current.onChangeStatus({ value: 1, name: 'enabled' })
|
||||
})
|
||||
expect(result.current.selectDefaultValue).toBe(1)
|
||||
|
||||
// Set to disabled (false)
|
||||
act(() => {
|
||||
result.current.onChangeStatus({ value: 0, name: 'disabled' })
|
||||
})
|
||||
expect(result.current.selectDefaultValue).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Segment Selection → Batch Operations', () => {
|
||||
const segments = [
|
||||
createSegment('seg-1'),
|
||||
createSegment('seg-2'),
|
||||
createSegment('seg-3'),
|
||||
]
|
||||
|
||||
it('should manage individual segment selection', () => {
|
||||
const { result } = renderHook(() => useSegmentSelection(segments))
|
||||
|
||||
act(() => {
|
||||
result.current.onSelected('seg-1')
|
||||
})
|
||||
expect(result.current.selectedSegmentIds).toContain('seg-1')
|
||||
|
||||
act(() => {
|
||||
result.current.onSelected('seg-2')
|
||||
})
|
||||
expect(result.current.selectedSegmentIds).toContain('seg-1')
|
||||
expect(result.current.selectedSegmentIds).toContain('seg-2')
|
||||
expect(result.current.selectedSegmentIds).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('should toggle selection on repeated click', () => {
|
||||
const { result } = renderHook(() => useSegmentSelection(segments))
|
||||
|
||||
act(() => {
|
||||
result.current.onSelected('seg-1')
|
||||
})
|
||||
expect(result.current.selectedSegmentIds).toContain('seg-1')
|
||||
|
||||
act(() => {
|
||||
result.current.onSelected('seg-1')
|
||||
})
|
||||
expect(result.current.selectedSegmentIds).not.toContain('seg-1')
|
||||
})
|
||||
|
||||
it('should support select all toggle', () => {
|
||||
const { result } = renderHook(() => useSegmentSelection(segments))
|
||||
|
||||
act(() => {
|
||||
result.current.onSelectedAll()
|
||||
})
|
||||
expect(result.current.selectedSegmentIds).toHaveLength(3)
|
||||
expect(result.current.isAllSelected).toBe(true)
|
||||
|
||||
act(() => {
|
||||
result.current.onSelectedAll()
|
||||
})
|
||||
expect(result.current.selectedSegmentIds).toHaveLength(0)
|
||||
expect(result.current.isAllSelected).toBe(false)
|
||||
})
|
||||
|
||||
it('should detect partial selection via isSomeSelected', () => {
|
||||
const { result } = renderHook(() => useSegmentSelection(segments))
|
||||
|
||||
act(() => {
|
||||
result.current.onSelected('seg-1')
|
||||
})
|
||||
|
||||
// After selecting one of three, isSomeSelected should be true
|
||||
expect(result.current.selectedSegmentIds).toEqual(['seg-1'])
|
||||
expect(result.current.isSomeSelected).toBe(true)
|
||||
expect(result.current.isAllSelected).toBe(false)
|
||||
})
|
||||
|
||||
it('should clear selection via onCancelBatchOperation', () => {
|
||||
const { result } = renderHook(() => useSegmentSelection(segments))
|
||||
|
||||
act(() => {
|
||||
result.current.onSelected('seg-1')
|
||||
result.current.onSelected('seg-2')
|
||||
})
|
||||
expect(result.current.selectedSegmentIds).toHaveLength(2)
|
||||
|
||||
act(() => {
|
||||
result.current.onCancelBatchOperation()
|
||||
})
|
||||
expect(result.current.selectedSegmentIds).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Modal State Management', () => {
|
||||
const onNewSegmentModalChange = vi.fn()
|
||||
|
||||
it('should open segment detail modal on card click', () => {
|
||||
const { result } = renderHook(() => useModalState({ onNewSegmentModalChange }))
|
||||
|
||||
const segment = createSegment('seg-detail-1', 'Detail content')
|
||||
act(() => {
|
||||
result.current.onClickCard(segment)
|
||||
})
|
||||
expect(result.current.currSegment.showModal).toBe(true)
|
||||
expect(result.current.currSegment.segInfo).toBeDefined()
|
||||
expect(result.current.currSegment.segInfo!.id).toBe('seg-detail-1')
|
||||
})
|
||||
|
||||
it('should close segment detail modal', () => {
|
||||
const { result } = renderHook(() => useModalState({ onNewSegmentModalChange }))
|
||||
|
||||
const segment = createSegment('seg-1')
|
||||
act(() => {
|
||||
result.current.onClickCard(segment)
|
||||
})
|
||||
expect(result.current.currSegment.showModal).toBe(true)
|
||||
|
||||
act(() => {
|
||||
result.current.onCloseSegmentDetail()
|
||||
})
|
||||
expect(result.current.currSegment.showModal).toBe(false)
|
||||
})
|
||||
|
||||
it('should manage full screen toggle', () => {
|
||||
const { result } = renderHook(() => useModalState({ onNewSegmentModalChange }))
|
||||
|
||||
expect(result.current.fullScreen).toBe(false)
|
||||
act(() => {
|
||||
result.current.toggleFullScreen()
|
||||
})
|
||||
expect(result.current.fullScreen).toBe(true)
|
||||
act(() => {
|
||||
result.current.toggleFullScreen()
|
||||
})
|
||||
expect(result.current.fullScreen).toBe(false)
|
||||
})
|
||||
|
||||
it('should manage collapsed state', () => {
|
||||
const { result } = renderHook(() => useModalState({ onNewSegmentModalChange }))
|
||||
|
||||
expect(result.current.isCollapsed).toBe(true)
|
||||
act(() => {
|
||||
result.current.toggleCollapsed()
|
||||
})
|
||||
expect(result.current.isCollapsed).toBe(false)
|
||||
})
|
||||
|
||||
it('should manage new child segment modal', () => {
|
||||
const { result } = renderHook(() => useModalState({ onNewSegmentModalChange }))
|
||||
|
||||
expect(result.current.showNewChildSegmentModal).toBe(false)
|
||||
act(() => {
|
||||
result.current.handleAddNewChildChunk('chunk-parent-1')
|
||||
})
|
||||
expect(result.current.showNewChildSegmentModal).toBe(true)
|
||||
expect(result.current.currChunkId).toBe('chunk-parent-1')
|
||||
|
||||
act(() => {
|
||||
result.current.onCloseNewChildChunkModal()
|
||||
})
|
||||
expect(result.current.showNewChildSegmentModal).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cross-Hook Data Flow: Search → Selection → Modal', () => {
|
||||
it('should maintain independent state across all three hooks', () => {
|
||||
const segments = [createSegment('seg-1'), createSegment('seg-2')]
|
||||
|
||||
const { result: filterResult } = renderHook(() =>
|
||||
useSearchFilter({ onPageChange: vi.fn() }),
|
||||
)
|
||||
const { result: selectionResult } = renderHook(() =>
|
||||
useSegmentSelection(segments),
|
||||
)
|
||||
const { result: modalResult } = renderHook(() =>
|
||||
useModalState({ onNewSegmentModalChange: vi.fn() }),
|
||||
)
|
||||
|
||||
// Set search filter to enabled
|
||||
act(() => {
|
||||
filterResult.current.onChangeStatus({ value: 1, name: 'enabled' })
|
||||
})
|
||||
|
||||
// Select a segment
|
||||
act(() => {
|
||||
selectionResult.current.onSelected('seg-1')
|
||||
})
|
||||
|
||||
// Open detail modal
|
||||
act(() => {
|
||||
modalResult.current.onClickCard(segments[0])
|
||||
})
|
||||
|
||||
// All states should be independent
|
||||
expect(filterResult.current.selectedStatus).toBe(true) // !!1
|
||||
expect(selectionResult.current.selectedSegmentIds).toContain('seg-1')
|
||||
expect(modalResult.current.currSegment.showModal).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
192
web/__tests__/develop/api-key-management-flow.test.tsx
Normal file
192
web/__tests__/develop/api-key-management-flow.test.tsx
Normal file
@ -0,0 +1,192 @@
|
||||
/**
|
||||
* Integration test: API Key management flow
|
||||
*
|
||||
* Tests the cross-component interaction:
|
||||
* ApiServer → SecretKeyButton → SecretKeyModal
|
||||
*
|
||||
* Renders real ApiServer, SecretKeyButton, and SecretKeyModal together
|
||||
* with only service-layer mocks. Deep modal interactions (create/delete)
|
||||
* are covered by unit tests in secret-key-modal.spec.tsx.
|
||||
*/
|
||||
import { act, render, screen, waitFor } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import ApiServer from '@/app/components/develop/ApiServer'
|
||||
|
||||
// ---------- fake timers (HeadlessUI Dialog transitions) ----------
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true })
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.runOnlyPendingTimers()
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
async function flushUI() {
|
||||
await act(async () => {
|
||||
vi.runAllTimers()
|
||||
})
|
||||
}
|
||||
|
||||
// ---------- mocks ----------
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => ({
|
||||
currentWorkspace: { id: 'ws-1', name: 'Workspace' },
|
||||
isCurrentWorkspaceManager: true,
|
||||
isCurrentWorkspaceEditor: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-timestamp', () => ({
|
||||
default: () => ({
|
||||
formatTime: vi.fn((val: number) => `Time:${val}`),
|
||||
formatDate: vi.fn((val: string) => `Date:${val}`),
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/apps', () => ({
|
||||
createApikey: vi.fn().mockResolvedValue({ token: 'sk-new-token-1234567890abcdef' }),
|
||||
delApikey: vi.fn().mockResolvedValue({}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/datasets', () => ({
|
||||
createApikey: vi.fn().mockResolvedValue({ token: 'dk-new' }),
|
||||
delApikey: vi.fn().mockResolvedValue({}),
|
||||
}))
|
||||
|
||||
const mockApiKeys = vi.fn().mockReturnValue({ data: [] })
|
||||
const mockIsLoading = vi.fn().mockReturnValue(false)
|
||||
|
||||
vi.mock('@/service/use-apps', () => ({
|
||||
useAppApiKeys: () => ({
|
||||
data: mockApiKeys(),
|
||||
isLoading: mockIsLoading(),
|
||||
}),
|
||||
useInvalidateAppApiKeys: () => vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/knowledge/use-dataset', () => ({
|
||||
useDatasetApiKeys: () => ({ data: null, isLoading: false }),
|
||||
useInvalidateDatasetApiKeys: () => vi.fn(),
|
||||
}))
|
||||
|
||||
// ---------- tests ----------
|
||||
|
||||
describe('API Key management flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockApiKeys.mockReturnValue({ data: [] })
|
||||
mockIsLoading.mockReturnValue(false)
|
||||
})
|
||||
|
||||
it('ApiServer renders URL, status badge, and API Key button', () => {
|
||||
render(<ApiServer apiBaseUrl="https://api.dify.ai/v1" appId="app-1" />)
|
||||
|
||||
expect(screen.getByText('https://api.dify.ai/v1')).toBeInTheDocument()
|
||||
expect(screen.getByText('appApi.ok')).toBeInTheDocument()
|
||||
expect(screen.getByText('appApi.apiKey')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('clicking API Key button opens SecretKeyModal with real modal content', async () => {
|
||||
const user = userEvent.setup({ advanceTimers: vi.advanceTimersByTime })
|
||||
|
||||
render(<ApiServer apiBaseUrl="https://api.dify.ai/v1" appId="app-1" />)
|
||||
|
||||
// Click API Key button (rendered by SecretKeyButton)
|
||||
await act(async () => {
|
||||
await user.click(screen.getByText('appApi.apiKey'))
|
||||
})
|
||||
await flushUI()
|
||||
|
||||
// SecretKeyModal should render with real HeadlessUI Dialog
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('appApi.apiKeyModal.apiSecretKey')).toBeInTheDocument()
|
||||
expect(screen.getByText('appApi.apiKeyModal.apiSecretKeyTips')).toBeInTheDocument()
|
||||
expect(screen.getByText('appApi.apiKeyModal.createNewSecretKey')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('modal shows loading state when API keys are being fetched', async () => {
|
||||
const user = userEvent.setup({ advanceTimers: vi.advanceTimersByTime })
|
||||
mockIsLoading.mockReturnValue(true)
|
||||
|
||||
render(<ApiServer apiBaseUrl="https://api.dify.ai/v1" appId="app-1" />)
|
||||
|
||||
await act(async () => {
|
||||
await user.click(screen.getByText('appApi.apiKey'))
|
||||
})
|
||||
await flushUI()
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('appApi.apiKeyModal.apiSecretKey')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Loading indicator should be present
|
||||
expect(document.body.querySelector('[role="status"]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('modal can be closed by clicking X icon', async () => {
|
||||
const user = userEvent.setup({ advanceTimers: vi.advanceTimersByTime })
|
||||
|
||||
render(<ApiServer apiBaseUrl="https://api.dify.ai/v1" appId="app-1" />)
|
||||
|
||||
// Open modal
|
||||
await act(async () => {
|
||||
await user.click(screen.getByText('appApi.apiKey'))
|
||||
})
|
||||
await flushUI()
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('appApi.apiKeyModal.apiSecretKey')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Click X icon to close
|
||||
const closeIcon = document.body.querySelector('svg.cursor-pointer')
|
||||
expect(closeIcon).toBeInTheDocument()
|
||||
|
||||
await act(async () => {
|
||||
await user.click(closeIcon!)
|
||||
})
|
||||
await flushUI()
|
||||
|
||||
// Modal should close
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('appApi.apiKeyModal.apiSecretKeyTips')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('renders correctly with different API URLs', async () => {
|
||||
const user = userEvent.setup({ advanceTimers: vi.advanceTimersByTime })
|
||||
|
||||
const { rerender } = render(
|
||||
<ApiServer apiBaseUrl="http://localhost:5001/v1" appId="app-dev" />,
|
||||
)
|
||||
|
||||
expect(screen.getByText('http://localhost:5001/v1')).toBeInTheDocument()
|
||||
|
||||
// Open modal and verify it works with the same appId
|
||||
await act(async () => {
|
||||
await user.click(screen.getByText('appApi.apiKey'))
|
||||
})
|
||||
await flushUI()
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('appApi.apiKeyModal.apiSecretKey')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Close modal, update URL and re-verify
|
||||
const xIcon = document.body.querySelector('svg.cursor-pointer')
|
||||
await act(async () => {
|
||||
await user.click(xIcon!)
|
||||
})
|
||||
await flushUI()
|
||||
|
||||
rerender(
|
||||
<ApiServer apiBaseUrl="https://api.production.com/v1" appId="app-prod" />,
|
||||
)
|
||||
|
||||
expect(screen.getByText('https://api.production.com/v1')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
241
web/__tests__/develop/develop-page-flow.test.tsx
Normal file
241
web/__tests__/develop/develop-page-flow.test.tsx
Normal file
@ -0,0 +1,241 @@
|
||||
/**
|
||||
* Integration test: DevelopMain page flow
|
||||
*
|
||||
* Tests the full page lifecycle:
|
||||
* Loading state → App loaded → Header (ApiServer) + Content (Doc) rendered
|
||||
*
|
||||
* Uses real DevelopMain, ApiServer, and Doc components with minimal mocks.
|
||||
*/
|
||||
import { act, render, screen, waitFor } from '@testing-library/react'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import DevelopMain from '@/app/components/develop'
|
||||
import { AppModeEnum, Theme } from '@/types/app'
|
||||
|
||||
// ---------- fake timers ----------
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers({ shouldAdvanceTime: true })
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.runOnlyPendingTimers()
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
async function flushUI() {
|
||||
await act(async () => {
|
||||
vi.runAllTimers()
|
||||
})
|
||||
}
|
||||
|
||||
// ---------- store mock ----------
|
||||
|
||||
let storeAppDetail: unknown
|
||||
|
||||
vi.mock('@/app/components/app/store', () => ({
|
||||
useStore: (selector: (state: Record<string, unknown>) => unknown) => {
|
||||
return selector({ appDetail: storeAppDetail })
|
||||
},
|
||||
}))
|
||||
|
||||
// ---------- Doc dependencies ----------
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useLocale: () => 'en-US',
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-theme', () => ({
|
||||
default: () => ({ theme: Theme.light }),
|
||||
}))
|
||||
|
||||
vi.mock('@/i18n-config/language', () => ({
|
||||
LanguagesSupported: ['en-US', 'zh-Hans', 'zh-Hant', 'pt-BR', 'es-ES', 'fr-FR', 'de-DE', 'ja-JP'],
|
||||
}))
|
||||
|
||||
// ---------- SecretKeyModal dependencies ----------
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => ({
|
||||
currentWorkspace: { id: 'ws-1', name: 'Workspace' },
|
||||
isCurrentWorkspaceManager: true,
|
||||
isCurrentWorkspaceEditor: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-timestamp', () => ({
|
||||
default: () => ({
|
||||
formatTime: vi.fn((val: number) => `Time:${val}`),
|
||||
formatDate: vi.fn((val: string) => `Date:${val}`),
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/apps', () => ({
|
||||
createApikey: vi.fn().mockResolvedValue({ token: 'sk-new-1234567890' }),
|
||||
delApikey: vi.fn().mockResolvedValue({}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/datasets', () => ({
|
||||
createApikey: vi.fn().mockResolvedValue({ token: 'dk-new' }),
|
||||
delApikey: vi.fn().mockResolvedValue({}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-apps', () => ({
|
||||
useAppApiKeys: () => ({ data: { data: [] }, isLoading: false }),
|
||||
useInvalidateAppApiKeys: () => vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/knowledge/use-dataset', () => ({
|
||||
useDatasetApiKeys: () => ({ data: null, isLoading: false }),
|
||||
useInvalidateDatasetApiKeys: () => vi.fn(),
|
||||
}))
|
||||
|
||||
// ---------- tests ----------
|
||||
|
||||
describe('DevelopMain page flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
storeAppDetail = undefined
|
||||
})
|
||||
|
||||
it('should show loading indicator when appDetail is not available', () => {
|
||||
storeAppDetail = undefined
|
||||
render(<DevelopMain appId="app-1" />)
|
||||
|
||||
expect(screen.getByRole('status')).toBeInTheDocument()
|
||||
// No content should be visible
|
||||
expect(screen.queryByText('appApi.apiServer')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render full page when appDetail is loaded', () => {
|
||||
storeAppDetail = {
|
||||
id: 'app-1',
|
||||
name: 'Test App',
|
||||
api_base_url: 'https://api.test.com/v1',
|
||||
mode: AppModeEnum.CHAT,
|
||||
}
|
||||
|
||||
render(<DevelopMain appId="app-1" />)
|
||||
|
||||
// ApiServer section should be visible
|
||||
expect(screen.getByText('appApi.apiServer')).toBeInTheDocument()
|
||||
expect(screen.getByText('https://api.test.com/v1')).toBeInTheDocument()
|
||||
expect(screen.getByText('appApi.ok')).toBeInTheDocument()
|
||||
expect(screen.getByText('appApi.apiKey')).toBeInTheDocument()
|
||||
|
||||
// Loading should NOT be visible
|
||||
expect(screen.queryByRole('status')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Doc component with correct app mode template', () => {
|
||||
storeAppDetail = {
|
||||
id: 'app-1',
|
||||
name: 'Chat App',
|
||||
api_base_url: 'https://api.test.com/v1',
|
||||
mode: AppModeEnum.CHAT,
|
||||
}
|
||||
|
||||
const { container } = render(<DevelopMain appId="app-1" />)
|
||||
|
||||
// Doc renders an article element with prose classes
|
||||
const article = container.querySelector('article')
|
||||
expect(article).toBeInTheDocument()
|
||||
expect(article?.className).toContain('prose')
|
||||
})
|
||||
|
||||
it('should transition from loading to content when appDetail becomes available', () => {
|
||||
// Start with no data
|
||||
storeAppDetail = undefined
|
||||
const { rerender } = render(<DevelopMain appId="app-1" />)
|
||||
expect(screen.getByRole('status')).toBeInTheDocument()
|
||||
|
||||
// Simulate store update
|
||||
storeAppDetail = {
|
||||
id: 'app-1',
|
||||
name: 'My App',
|
||||
api_base_url: 'https://api.example.com/v1',
|
||||
mode: AppModeEnum.COMPLETION,
|
||||
}
|
||||
rerender(<DevelopMain appId="app-1" />)
|
||||
|
||||
// Content should now be visible
|
||||
expect(screen.queryByRole('status')).not.toBeInTheDocument()
|
||||
expect(screen.getByText('https://api.example.com/v1')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should open API key modal from the page', async () => {
|
||||
const user = userEvent.setup({ advanceTimers: vi.advanceTimersByTime })
|
||||
|
||||
storeAppDetail = {
|
||||
id: 'app-1',
|
||||
name: 'Test App',
|
||||
api_base_url: 'https://api.test.com/v1',
|
||||
mode: AppModeEnum.WORKFLOW,
|
||||
}
|
||||
|
||||
render(<DevelopMain appId="app-1" />)
|
||||
|
||||
// Click API Key button in the header
|
||||
await act(async () => {
|
||||
await user.click(screen.getByText('appApi.apiKey'))
|
||||
})
|
||||
await flushUI()
|
||||
|
||||
// SecretKeyModal should open
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('appApi.apiKeyModal.apiSecretKey')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('should render correctly for different app modes', () => {
|
||||
const modes = [
|
||||
AppModeEnum.CHAT,
|
||||
AppModeEnum.COMPLETION,
|
||||
AppModeEnum.ADVANCED_CHAT,
|
||||
AppModeEnum.WORKFLOW,
|
||||
]
|
||||
|
||||
for (const mode of modes) {
|
||||
storeAppDetail = {
|
||||
id: 'app-1',
|
||||
name: `${mode} App`,
|
||||
api_base_url: 'https://api.test.com/v1',
|
||||
mode,
|
||||
}
|
||||
|
||||
const { container, unmount } = render(<DevelopMain appId="app-1" />)
|
||||
|
||||
// ApiServer should always be present
|
||||
expect(screen.getByText('appApi.apiServer')).toBeInTheDocument()
|
||||
|
||||
// Doc should render an article
|
||||
expect(container.querySelector('article')).toBeInTheDocument()
|
||||
|
||||
unmount()
|
||||
}
|
||||
})
|
||||
|
||||
it('should have correct page layout structure', () => {
|
||||
storeAppDetail = {
|
||||
id: 'app-1',
|
||||
name: 'Test App',
|
||||
api_base_url: 'https://api.test.com/v1',
|
||||
mode: AppModeEnum.CHAT,
|
||||
}
|
||||
|
||||
render(<DevelopMain appId="app-1" />)
|
||||
|
||||
// Main container: flex column with full height
|
||||
const mainDiv = screen.getByTestId('develop-main')
|
||||
expect(mainDiv.className).toContain('flex')
|
||||
expect(mainDiv.className).toContain('flex-col')
|
||||
expect(mainDiv.className).toContain('h-full')
|
||||
|
||||
// Header section with border
|
||||
const header = mainDiv.querySelector('.border-b')
|
||||
expect(header).toBeInTheDocument()
|
||||
|
||||
// Content section with overflow scroll
|
||||
const content = mainDiv.querySelector('.overflow-auto')
|
||||
expect(content).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@ -1,18 +1,23 @@
|
||||
/**
|
||||
* Integration test: Explore App List Flow
|
||||
*
|
||||
* Tests the end-to-end user flow of browsing, filtering, searching,
|
||||
* and adding apps to workspace from the explore page.
|
||||
*/
|
||||
import type { Mock } from 'vitest'
|
||||
import type { CreateAppModalProps } from '@/app/components/explore/create-app-modal'
|
||||
import type { App } from '@/models/explore'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import AppList from '@/app/components/explore/app-list'
|
||||
import ExploreContext from '@/context/explore-context'
|
||||
import { fetchAppDetail } from '@/service/explore'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import AppList from './index'
|
||||
|
||||
const allCategoriesEn = 'explore.apps.allCategories:{"lng":"en"}'
|
||||
let mockTabValue = allCategoriesEn
|
||||
const mockSetTab = vi.fn()
|
||||
let mockExploreData: { categories: string[], allList: App[] } | undefined = { categories: [], allList: [] }
|
||||
let mockExploreData: { categories: string[], allList: App[] } | undefined
|
||||
let mockIsLoading = false
|
||||
let mockIsError = false
|
||||
const mockHandleImportDSL = vi.fn()
|
||||
const mockHandleImportDSLConfirm = vi.fn()
|
||||
|
||||
@ -43,7 +48,7 @@ vi.mock('@/service/use-explore', () => ({
|
||||
useExploreAppList: () => ({
|
||||
data: mockExploreData,
|
||||
isLoading: mockIsLoading,
|
||||
isError: mockIsError,
|
||||
isError: false,
|
||||
}),
|
||||
}))
|
||||
|
||||
@ -96,7 +101,7 @@ vi.mock('@/app/components/app/create-from-dsl-modal/dsl-confirm-modal', () => ({
|
||||
|
||||
const createApp = (overrides: Partial<App> = {}): App => ({
|
||||
app: {
|
||||
id: overrides.app?.id ?? 'app-basic-id',
|
||||
id: overrides.app?.id ?? 'app-id',
|
||||
mode: overrides.app?.mode ?? AppModeEnum.CHAT,
|
||||
icon_type: overrides.app?.icon_type ?? 'emoji',
|
||||
icon: overrides.app?.icon ?? '😀',
|
||||
@ -121,113 +126,80 @@ const createApp = (overrides: Partial<App> = {}): App => ({
|
||||
is_agent: overrides.is_agent ?? false,
|
||||
})
|
||||
|
||||
const renderWithContext = (hasEditPermission = false, onSuccess?: () => void) => {
|
||||
return render(
|
||||
<ExploreContext.Provider
|
||||
value={{
|
||||
controlUpdateInstalledApps: 0,
|
||||
setControlUpdateInstalledApps: vi.fn(),
|
||||
hasEditPermission,
|
||||
installedApps: [],
|
||||
setInstalledApps: vi.fn(),
|
||||
isFetchingInstalledApps: false,
|
||||
setIsFetchingInstalledApps: vi.fn(),
|
||||
isShowTryAppPanel: false,
|
||||
setShowTryAppPanel: vi.fn(),
|
||||
}}
|
||||
>
|
||||
<AppList onSuccess={onSuccess} />
|
||||
</ExploreContext.Provider>,
|
||||
)
|
||||
const createContextValue = (hasEditPermission = true) => ({
|
||||
controlUpdateInstalledApps: 0,
|
||||
setControlUpdateInstalledApps: vi.fn(),
|
||||
hasEditPermission,
|
||||
installedApps: [] as never[],
|
||||
setInstalledApps: vi.fn(),
|
||||
isFetchingInstalledApps: false,
|
||||
setIsFetchingInstalledApps: vi.fn(),
|
||||
isShowTryAppPanel: false,
|
||||
setShowTryAppPanel: vi.fn(),
|
||||
})
|
||||
|
||||
const wrapWithContext = (hasEditPermission = true, onSuccess?: () => void) => (
|
||||
<ExploreContext.Provider value={createContextValue(hasEditPermission)}>
|
||||
<AppList onSuccess={onSuccess} />
|
||||
</ExploreContext.Provider>
|
||||
)
|
||||
|
||||
const renderWithContext = (hasEditPermission = true, onSuccess?: () => void) => {
|
||||
return render(wrapWithContext(hasEditPermission, onSuccess))
|
||||
}
|
||||
|
||||
describe('AppList', () => {
|
||||
describe('Explore App List Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockTabValue = allCategoriesEn
|
||||
mockExploreData = { categories: [], allList: [] }
|
||||
mockIsLoading = false
|
||||
mockIsError = false
|
||||
mockExploreData = {
|
||||
categories: ['Writing', 'Translate', 'Programming'],
|
||||
allList: [
|
||||
createApp({ app_id: 'app-1', app: { ...createApp().app, name: 'Writer Bot' }, category: 'Writing' }),
|
||||
createApp({ app_id: 'app-2', app: { ...createApp().app, id: 'app-id-2', name: 'Translator' }, category: 'Translate' }),
|
||||
createApp({ app_id: 'app-3', app: { ...createApp().app, id: 'app-id-3', name: 'Code Helper' }, category: 'Programming' }),
|
||||
],
|
||||
}
|
||||
})
|
||||
|
||||
// Rendering: show loading when categories are not ready.
|
||||
describe('Rendering', () => {
|
||||
it('should render loading when the query is loading', () => {
|
||||
// Arrange
|
||||
mockExploreData = undefined
|
||||
mockIsLoading = true
|
||||
|
||||
// Act
|
||||
describe('Browse and Filter Flow', () => {
|
||||
it('should display all apps when no category filter is applied', () => {
|
||||
renderWithContext()
|
||||
|
||||
// Assert
|
||||
expect(screen.getByRole('status')).toBeInTheDocument()
|
||||
expect(screen.getByText('Writer Bot')).toBeInTheDocument()
|
||||
expect(screen.getByText('Translator')).toBeInTheDocument()
|
||||
expect(screen.getByText('Code Helper')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render app cards when data is available', () => {
|
||||
// Arrange
|
||||
mockExploreData = {
|
||||
categories: ['Writing', 'Translate'],
|
||||
allList: [createApp(), createApp({ app_id: 'app-2', app: { ...createApp().app, name: 'Beta' }, category: 'Translate' })],
|
||||
}
|
||||
|
||||
// Act
|
||||
renderWithContext()
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('Alpha')).toBeInTheDocument()
|
||||
expect(screen.getByText('Beta')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// Props: category selection filters the list.
|
||||
describe('Props', () => {
|
||||
it('should filter apps by selected category', () => {
|
||||
// Arrange
|
||||
mockTabValue = 'Writing'
|
||||
mockExploreData = {
|
||||
categories: ['Writing', 'Translate'],
|
||||
allList: [createApp(), createApp({ app_id: 'app-2', app: { ...createApp().app, name: 'Beta' }, category: 'Translate' })],
|
||||
}
|
||||
|
||||
// Act
|
||||
renderWithContext()
|
||||
|
||||
// Assert
|
||||
expect(screen.getByText('Alpha')).toBeInTheDocument()
|
||||
expect(screen.queryByText('Beta')).not.toBeInTheDocument()
|
||||
expect(screen.getByText('Writer Bot')).toBeInTheDocument()
|
||||
expect(screen.queryByText('Translator')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('Code Helper')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
// User interactions: search and create flow.
|
||||
describe('User Interactions', () => {
|
||||
it('should filter apps by search keywords', async () => {
|
||||
// Arrange
|
||||
mockExploreData = {
|
||||
categories: ['Writing'],
|
||||
allList: [createApp(), createApp({ app_id: 'app-2', app: { ...createApp().app, name: 'Gamma' } })],
|
||||
}
|
||||
it('should filter apps by search keyword', async () => {
|
||||
renderWithContext()
|
||||
|
||||
// Act
|
||||
const input = screen.getByPlaceholderText('common.operation.search')
|
||||
fireEvent.change(input, { target: { value: 'gam' } })
|
||||
fireEvent.change(input, { target: { value: 'trans' } })
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('Alpha')).not.toBeInTheDocument()
|
||||
expect(screen.getByText('Gamma')).toBeInTheDocument()
|
||||
expect(screen.getByText('Translator')).toBeInTheDocument()
|
||||
expect(screen.queryByText('Writer Bot')).not.toBeInTheDocument()
|
||||
expect(screen.queryByText('Code Helper')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle create flow and confirm DSL when pending', async () => {
|
||||
// Arrange
|
||||
describe('Add to Workspace Flow', () => {
|
||||
it('should complete the full add-to-workspace flow with DSL confirmation', async () => {
|
||||
// Step 1: User clicks "Add to Workspace" on an app card
|
||||
const onSuccess = vi.fn()
|
||||
mockExploreData = {
|
||||
categories: ['Writing'],
|
||||
allList: [createApp()],
|
||||
};
|
||||
(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml-content' })
|
||||
;(fetchAppDetail as unknown as Mock).mockResolvedValue({ export_data: 'yaml-content' })
|
||||
mockHandleImportDSL.mockImplementation(async (_payload: unknown, options: { onSuccess?: () => void, onPending?: () => void }) => {
|
||||
options.onPending?.()
|
||||
})
|
||||
@ -235,19 +207,27 @@ describe('AppList', () => {
|
||||
options.onSuccess?.()
|
||||
})
|
||||
|
||||
// Act
|
||||
renderWithContext(true, onSuccess)
|
||||
fireEvent.click(screen.getByText('explore.appCard.addToWorkspace'))
|
||||
|
||||
// Step 2: Click add to workspace button - opens create modal
|
||||
fireEvent.click(screen.getAllByText('explore.appCard.addToWorkspace')[0])
|
||||
|
||||
// Step 3: Confirm creation in modal
|
||||
fireEvent.click(await screen.findByTestId('confirm-create'))
|
||||
|
||||
// Assert
|
||||
// Step 4: API fetches app detail
|
||||
await waitFor(() => {
|
||||
expect(fetchAppDetail).toHaveBeenCalledWith('app-basic-id')
|
||||
expect(fetchAppDetail).toHaveBeenCalledWith('app-id')
|
||||
})
|
||||
expect(mockHandleImportDSL).toHaveBeenCalledTimes(1)
|
||||
expect(await screen.findByTestId('dsl-confirm-modal')).toBeInTheDocument()
|
||||
|
||||
// Step 5: DSL import triggers pending confirmation
|
||||
expect(mockHandleImportDSL).toHaveBeenCalledTimes(1)
|
||||
|
||||
// Step 6: DSL confirm modal appears and user confirms
|
||||
expect(await screen.findByTestId('dsl-confirm-modal')).toBeInTheDocument()
|
||||
fireEvent.click(screen.getByTestId('dsl-confirm'))
|
||||
|
||||
// Step 7: Flow completes successfully
|
||||
await waitFor(() => {
|
||||
expect(mockHandleImportDSLConfirm).toHaveBeenCalledTimes(1)
|
||||
expect(onSuccess).toHaveBeenCalledTimes(1)
|
||||
@ -255,30 +235,39 @@ describe('AppList', () => {
|
||||
})
|
||||
})
|
||||
|
||||
// Edge cases: handle clearing search keywords.
|
||||
describe('Edge Cases', () => {
|
||||
it('should reset search results when clear icon is clicked', async () => {
|
||||
// Arrange
|
||||
describe('Loading and Empty States', () => {
|
||||
it('should transition from loading to content', () => {
|
||||
// Step 1: Loading state
|
||||
mockIsLoading = true
|
||||
mockExploreData = undefined
|
||||
const { rerender } = render(wrapWithContext())
|
||||
|
||||
expect(screen.getByRole('status')).toBeInTheDocument()
|
||||
|
||||
// Step 2: Data loads
|
||||
mockIsLoading = false
|
||||
mockExploreData = {
|
||||
categories: ['Writing'],
|
||||
allList: [createApp(), createApp({ app_id: 'app-2', app: { ...createApp().app, name: 'Gamma' } })],
|
||||
allList: [createApp()],
|
||||
}
|
||||
renderWithContext()
|
||||
rerender(wrapWithContext())
|
||||
|
||||
// Act
|
||||
const input = screen.getByPlaceholderText('common.operation.search')
|
||||
fireEvent.change(input, { target: { value: 'gam' } })
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('Alpha')).not.toBeInTheDocument()
|
||||
})
|
||||
expect(screen.queryByRole('status')).not.toBeInTheDocument()
|
||||
expect(screen.getByText('Alpha')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('input-clear'))
|
||||
describe('Permission-Based Behavior', () => {
|
||||
it('should hide add-to-workspace button when user has no edit permission', () => {
|
||||
renderWithContext(false)
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Alpha')).toBeInTheDocument()
|
||||
expect(screen.getByText('Gamma')).toBeInTheDocument()
|
||||
})
|
||||
expect(screen.queryByText('explore.appCard.addToWorkspace')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show add-to-workspace button when user has edit permission', () => {
|
||||
renderWithContext(true)
|
||||
|
||||
expect(screen.getAllByText('explore.appCard.addToWorkspace').length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
260
web/__tests__/explore/installed-app-flow.test.tsx
Normal file
260
web/__tests__/explore/installed-app-flow.test.tsx
Normal file
@ -0,0 +1,260 @@
|
||||
/**
|
||||
* Integration test: Installed App Flow
|
||||
*
|
||||
* Tests the end-to-end user flow of installed apps: sidebar navigation,
|
||||
* mode-based routing (Chat / Completion / Workflow), and lifecycle
|
||||
* operations (pin/unpin, delete).
|
||||
*/
|
||||
import type { Mock } from 'vitest'
|
||||
import type { InstalledApp as InstalledAppModel } from '@/models/explore'
|
||||
import { render, screen, waitFor } from '@testing-library/react'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import InstalledApp from '@/app/components/explore/installed-app'
|
||||
import { useWebAppStore } from '@/context/web-app-context'
|
||||
import { AccessMode } from '@/models/access-control'
|
||||
import { useGetUserCanAccessApp } from '@/service/access-control'
|
||||
import { useGetInstalledAppAccessModeByAppId, useGetInstalledAppMeta, useGetInstalledAppParams } from '@/service/use-explore'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
|
||||
// Mock external dependencies
|
||||
vi.mock('use-context-selector', () => ({
|
||||
useContext: vi.fn(),
|
||||
createContext: vi.fn(() => ({})),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/web-app-context', () => ({
|
||||
useWebAppStore: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/access-control', () => ({
|
||||
useGetUserCanAccessApp: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-explore', () => ({
|
||||
useGetInstalledAppAccessModeByAppId: vi.fn(),
|
||||
useGetInstalledAppParams: vi.fn(),
|
||||
useGetInstalledAppMeta: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/share/text-generation', () => ({
|
||||
default: ({ isWorkflow }: { isWorkflow?: boolean }) => (
|
||||
<div data-testid="text-generation-app">
|
||||
Text Generation
|
||||
{isWorkflow && ' (Workflow)'}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/chat/chat-with-history', () => ({
|
||||
default: ({ installedAppInfo }: { installedAppInfo?: InstalledAppModel }) => (
|
||||
<div data-testid="chat-with-history">
|
||||
Chat -
|
||||
{' '}
|
||||
{installedAppInfo?.app.name}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
describe('Installed App Flow', () => {
|
||||
const mockUpdateAppInfo = vi.fn()
|
||||
const mockUpdateWebAppAccessMode = vi.fn()
|
||||
const mockUpdateAppParams = vi.fn()
|
||||
const mockUpdateWebAppMeta = vi.fn()
|
||||
const mockUpdateUserCanAccessApp = vi.fn()
|
||||
|
||||
const createInstalledApp = (mode: AppModeEnum = AppModeEnum.CHAT): InstalledAppModel => ({
|
||||
id: 'installed-app-1',
|
||||
app: {
|
||||
id: 'real-app-id',
|
||||
name: 'Integration Test App',
|
||||
mode,
|
||||
icon_type: 'emoji',
|
||||
icon: '🧪',
|
||||
icon_background: '#FFFFFF',
|
||||
icon_url: '',
|
||||
description: 'Test app for integration',
|
||||
use_icon_as_answer_icon: false,
|
||||
},
|
||||
uninstallable: true,
|
||||
is_pinned: false,
|
||||
})
|
||||
|
||||
const mockAppParams = {
|
||||
user_input_form: [],
|
||||
file_upload: { image: { enabled: false, number_limits: 0, transfer_methods: [] } },
|
||||
system_parameters: {},
|
||||
}
|
||||
|
||||
type MockOverrides = {
|
||||
context?: { installedApps?: InstalledAppModel[], isFetchingInstalledApps?: boolean }
|
||||
accessMode?: { isFetching?: boolean, data?: unknown, error?: unknown }
|
||||
params?: { isFetching?: boolean, data?: unknown, error?: unknown }
|
||||
meta?: { isFetching?: boolean, data?: unknown, error?: unknown }
|
||||
userAccess?: { data?: unknown, error?: unknown }
|
||||
}
|
||||
|
||||
const setupDefaultMocks = (app?: InstalledAppModel, overrides: MockOverrides = {}) => {
|
||||
;(useContext as Mock).mockReturnValue({
|
||||
installedApps: app ? [app] : [],
|
||||
isFetchingInstalledApps: false,
|
||||
...overrides.context,
|
||||
})
|
||||
|
||||
;(useWebAppStore as unknown as Mock).mockImplementation((selector: (state: Record<string, Mock>) => unknown) => {
|
||||
return selector({
|
||||
updateAppInfo: mockUpdateAppInfo,
|
||||
updateWebAppAccessMode: mockUpdateWebAppAccessMode,
|
||||
updateAppParams: mockUpdateAppParams,
|
||||
updateWebAppMeta: mockUpdateWebAppMeta,
|
||||
updateUserCanAccessApp: mockUpdateUserCanAccessApp,
|
||||
})
|
||||
})
|
||||
|
||||
;(useGetInstalledAppAccessModeByAppId as Mock).mockReturnValue({
|
||||
isFetching: false,
|
||||
data: { accessMode: AccessMode.PUBLIC },
|
||||
error: null,
|
||||
...overrides.accessMode,
|
||||
})
|
||||
|
||||
;(useGetInstalledAppParams as Mock).mockReturnValue({
|
||||
isFetching: false,
|
||||
data: mockAppParams,
|
||||
error: null,
|
||||
...overrides.params,
|
||||
})
|
||||
|
||||
;(useGetInstalledAppMeta as Mock).mockReturnValue({
|
||||
isFetching: false,
|
||||
data: { tool_icons: {} },
|
||||
error: null,
|
||||
...overrides.meta,
|
||||
})
|
||||
|
||||
;(useGetUserCanAccessApp as Mock).mockReturnValue({
|
||||
data: { result: true },
|
||||
error: null,
|
||||
...overrides.userAccess,
|
||||
})
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Mode-Based Routing', () => {
|
||||
it.each([
|
||||
[AppModeEnum.CHAT, 'chat-with-history'],
|
||||
[AppModeEnum.ADVANCED_CHAT, 'chat-with-history'],
|
||||
[AppModeEnum.AGENT_CHAT, 'chat-with-history'],
|
||||
])('should render ChatWithHistory for %s mode', (mode, testId) => {
|
||||
const app = createInstalledApp(mode)
|
||||
setupDefaultMocks(app)
|
||||
|
||||
render(<InstalledApp id="installed-app-1" />)
|
||||
|
||||
expect(screen.getByTestId(testId)).toBeInTheDocument()
|
||||
expect(screen.getByText(/Integration Test App/)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render TextGenerationApp for COMPLETION mode', () => {
|
||||
const app = createInstalledApp(AppModeEnum.COMPLETION)
|
||||
setupDefaultMocks(app)
|
||||
|
||||
render(<InstalledApp id="installed-app-1" />)
|
||||
|
||||
expect(screen.getByTestId('text-generation-app')).toBeInTheDocument()
|
||||
expect(screen.getByText('Text Generation')).toBeInTheDocument()
|
||||
expect(screen.queryByText(/Workflow/)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render TextGenerationApp with workflow flag for WORKFLOW mode', () => {
|
||||
const app = createInstalledApp(AppModeEnum.WORKFLOW)
|
||||
setupDefaultMocks(app)
|
||||
|
||||
render(<InstalledApp id="installed-app-1" />)
|
||||
|
||||
expect(screen.getByTestId('text-generation-app')).toBeInTheDocument()
|
||||
expect(screen.getByText(/Workflow/)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Data Loading Flow', () => {
|
||||
it('should show loading spinner when params are being fetched', () => {
|
||||
const app = createInstalledApp()
|
||||
setupDefaultMocks(app, { params: { isFetching: true, data: null } })
|
||||
|
||||
const { container } = render(<InstalledApp id="installed-app-1" />)
|
||||
|
||||
expect(container.querySelector('svg.spin-animation')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('chat-with-history')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render content when all data is available', () => {
|
||||
const app = createInstalledApp()
|
||||
setupDefaultMocks(app)
|
||||
|
||||
render(<InstalledApp id="installed-app-1" />)
|
||||
|
||||
expect(screen.getByTestId('chat-with-history')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error Handling Flow', () => {
|
||||
it('should show error state when API fails', () => {
|
||||
const app = createInstalledApp()
|
||||
setupDefaultMocks(app, { params: { data: null, error: new Error('Network error') } })
|
||||
|
||||
render(<InstalledApp id="installed-app-1" />)
|
||||
|
||||
expect(screen.getByText(/Network error/)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show 404 when app is not found', () => {
|
||||
setupDefaultMocks(undefined, {
|
||||
accessMode: { data: null },
|
||||
params: { data: null },
|
||||
meta: { data: null },
|
||||
userAccess: { data: null },
|
||||
})
|
||||
|
||||
render(<InstalledApp id="nonexistent" />)
|
||||
|
||||
expect(screen.getByText(/404/)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show 403 when user has no permission', () => {
|
||||
const app = createInstalledApp()
|
||||
setupDefaultMocks(app, { userAccess: { data: { result: false } } })
|
||||
|
||||
render(<InstalledApp id="installed-app-1" />)
|
||||
|
||||
expect(screen.getByText(/403/)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('State Synchronization', () => {
|
||||
it('should update all stores when app data is loaded', async () => {
|
||||
const app = createInstalledApp()
|
||||
setupDefaultMocks(app)
|
||||
|
||||
render(<InstalledApp id="installed-app-1" />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockUpdateAppInfo).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
app_id: 'installed-app-1',
|
||||
site: expect.objectContaining({
|
||||
title: 'Integration Test App',
|
||||
icon: '🧪',
|
||||
}),
|
||||
}),
|
||||
)
|
||||
expect(mockUpdateAppParams).toHaveBeenCalledWith(mockAppParams)
|
||||
expect(mockUpdateWebAppMeta).toHaveBeenCalledWith({ tool_icons: {} })
|
||||
expect(mockUpdateWebAppAccessMode).toHaveBeenCalledWith(AccessMode.PUBLIC)
|
||||
expect(mockUpdateUserCanAccessApp).toHaveBeenCalledWith(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
225
web/__tests__/explore/sidebar-lifecycle-flow.test.tsx
Normal file
225
web/__tests__/explore/sidebar-lifecycle-flow.test.tsx
Normal file
@ -0,0 +1,225 @@
|
||||
import type { IExplore } from '@/context/explore-context'
|
||||
/**
|
||||
* Integration test: Sidebar Lifecycle Flow
|
||||
*
|
||||
* Tests the sidebar interactions for installed apps lifecycle:
|
||||
* navigation, pin/unpin ordering, delete confirmation, and
|
||||
* fold/unfold behavior.
|
||||
*/
|
||||
import type { InstalledApp } from '@/models/explore'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import Toast from '@/app/components/base/toast'
|
||||
import SideBar from '@/app/components/explore/sidebar'
|
||||
import ExploreContext from '@/context/explore-context'
|
||||
import { MediaType } from '@/hooks/use-breakpoints'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
|
||||
let mockMediaType: string = MediaType.pc
|
||||
const mockSegments = ['apps']
|
||||
const mockPush = vi.fn()
|
||||
const mockRefetch = vi.fn()
|
||||
const mockUninstall = vi.fn()
|
||||
const mockUpdatePinStatus = vi.fn()
|
||||
let mockInstalledApps: InstalledApp[] = []
|
||||
|
||||
vi.mock('next/navigation', () => ({
|
||||
useSelectedLayoutSegments: () => mockSegments,
|
||||
useRouter: () => ({
|
||||
push: mockPush,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-breakpoints', () => ({
|
||||
default: () => mockMediaType,
|
||||
MediaType: {
|
||||
mobile: 'mobile',
|
||||
tablet: 'tablet',
|
||||
pc: 'pc',
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-explore', () => ({
|
||||
useGetInstalledApps: () => ({
|
||||
isFetching: false,
|
||||
data: { installed_apps: mockInstalledApps },
|
||||
refetch: mockRefetch,
|
||||
}),
|
||||
useUninstallApp: () => ({
|
||||
mutateAsync: mockUninstall,
|
||||
}),
|
||||
useUpdateAppPinStatus: () => ({
|
||||
mutateAsync: mockUpdatePinStatus,
|
||||
}),
|
||||
}))
|
||||
|
||||
const createInstalledApp = (overrides: Partial<InstalledApp> = {}): InstalledApp => ({
|
||||
id: overrides.id ?? 'app-1',
|
||||
uninstallable: overrides.uninstallable ?? false,
|
||||
is_pinned: overrides.is_pinned ?? false,
|
||||
app: {
|
||||
id: overrides.app?.id ?? 'app-basic-id',
|
||||
mode: overrides.app?.mode ?? AppModeEnum.CHAT,
|
||||
icon_type: overrides.app?.icon_type ?? 'emoji',
|
||||
icon: overrides.app?.icon ?? '🤖',
|
||||
icon_background: overrides.app?.icon_background ?? '#fff',
|
||||
icon_url: overrides.app?.icon_url ?? '',
|
||||
name: overrides.app?.name ?? 'App One',
|
||||
description: overrides.app?.description ?? 'desc',
|
||||
use_icon_as_answer_icon: overrides.app?.use_icon_as_answer_icon ?? false,
|
||||
},
|
||||
})
|
||||
|
||||
const createContextValue = (installedApps: InstalledApp[] = []): IExplore => ({
|
||||
controlUpdateInstalledApps: 0,
|
||||
setControlUpdateInstalledApps: vi.fn(),
|
||||
hasEditPermission: true,
|
||||
installedApps,
|
||||
setInstalledApps: vi.fn(),
|
||||
isFetchingInstalledApps: false,
|
||||
setIsFetchingInstalledApps: vi.fn(),
|
||||
isShowTryAppPanel: false,
|
||||
setShowTryAppPanel: vi.fn(),
|
||||
})
|
||||
|
||||
const renderSidebar = (installedApps: InstalledApp[] = []) => {
|
||||
return render(
|
||||
<ExploreContext.Provider value={createContextValue(installedApps)}>
|
||||
<SideBar controlUpdateInstalledApps={0} />
|
||||
</ExploreContext.Provider>,
|
||||
)
|
||||
}
|
||||
|
||||
describe('Sidebar Lifecycle Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockMediaType = MediaType.pc
|
||||
mockInstalledApps = []
|
||||
vi.spyOn(Toast, 'notify').mockImplementation(() => ({ clear: vi.fn() }))
|
||||
})
|
||||
|
||||
describe('Pin / Unpin / Delete Flow', () => {
|
||||
it('should complete pin → unpin cycle for an app', async () => {
|
||||
mockUpdatePinStatus.mockResolvedValue(undefined)
|
||||
|
||||
// Step 1: Start with an unpinned app and pin it
|
||||
const unpinnedApp = createInstalledApp({ is_pinned: false })
|
||||
mockInstalledApps = [unpinnedApp]
|
||||
const { unmount } = renderSidebar(mockInstalledApps)
|
||||
|
||||
fireEvent.click(screen.getByTestId('item-operation-trigger'))
|
||||
fireEvent.click(await screen.findByText('explore.sidebar.action.pin'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockUpdatePinStatus).toHaveBeenCalledWith({ appId: 'app-1', isPinned: true })
|
||||
expect(Toast.notify).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'success',
|
||||
}))
|
||||
})
|
||||
|
||||
// Step 2: Simulate refetch returning pinned state, then unpin
|
||||
unmount()
|
||||
vi.clearAllMocks()
|
||||
mockUpdatePinStatus.mockResolvedValue(undefined)
|
||||
|
||||
const pinnedApp = createInstalledApp({ is_pinned: true })
|
||||
mockInstalledApps = [pinnedApp]
|
||||
renderSidebar(mockInstalledApps)
|
||||
|
||||
fireEvent.click(screen.getByTestId('item-operation-trigger'))
|
||||
fireEvent.click(await screen.findByText('explore.sidebar.action.unpin'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockUpdatePinStatus).toHaveBeenCalledWith({ appId: 'app-1', isPinned: false })
|
||||
expect(Toast.notify).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'success',
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
it('should complete the delete flow with confirmation', async () => {
|
||||
const app = createInstalledApp()
|
||||
mockInstalledApps = [app]
|
||||
mockUninstall.mockResolvedValue(undefined)
|
||||
|
||||
renderSidebar(mockInstalledApps)
|
||||
|
||||
// Step 1: Open operation menu and click delete
|
||||
fireEvent.click(screen.getByTestId('item-operation-trigger'))
|
||||
fireEvent.click(await screen.findByText('explore.sidebar.action.delete'))
|
||||
|
||||
// Step 2: Confirm dialog appears
|
||||
expect(await screen.findByText('explore.sidebar.delete.title')).toBeInTheDocument()
|
||||
|
||||
// Step 3: Confirm deletion
|
||||
fireEvent.click(screen.getByText('common.operation.confirm'))
|
||||
|
||||
// Step 4: Uninstall API called and success toast shown
|
||||
await waitFor(() => {
|
||||
expect(mockUninstall).toHaveBeenCalledWith('app-1')
|
||||
expect(Toast.notify).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'success',
|
||||
message: 'common.api.remove',
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
it('should cancel deletion when user clicks cancel', async () => {
|
||||
const app = createInstalledApp()
|
||||
mockInstalledApps = [app]
|
||||
|
||||
renderSidebar(mockInstalledApps)
|
||||
|
||||
// Open delete flow
|
||||
fireEvent.click(screen.getByTestId('item-operation-trigger'))
|
||||
fireEvent.click(await screen.findByText('explore.sidebar.action.delete'))
|
||||
|
||||
// Cancel the deletion
|
||||
fireEvent.click(await screen.findByText('common.operation.cancel'))
|
||||
|
||||
// Uninstall should not be called
|
||||
expect(mockUninstall).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Multi-App Ordering', () => {
|
||||
it('should display pinned apps before unpinned apps with divider', () => {
|
||||
mockInstalledApps = [
|
||||
createInstalledApp({ id: 'pinned-1', is_pinned: true, app: { ...createInstalledApp().app, name: 'Pinned App' } }),
|
||||
createInstalledApp({ id: 'unpinned-1', is_pinned: false, app: { ...createInstalledApp().app, name: 'Regular App' } }),
|
||||
]
|
||||
|
||||
const { container } = renderSidebar(mockInstalledApps)
|
||||
|
||||
// Both apps are rendered
|
||||
const pinnedApp = screen.getByText('Pinned App')
|
||||
const regularApp = screen.getByText('Regular App')
|
||||
expect(pinnedApp).toBeInTheDocument()
|
||||
expect(regularApp).toBeInTheDocument()
|
||||
|
||||
// Pinned app appears before unpinned app in the DOM
|
||||
const pinnedItem = pinnedApp.closest('[class*="rounded-lg"]')!
|
||||
const regularItem = regularApp.closest('[class*="rounded-lg"]')!
|
||||
expect(pinnedItem.compareDocumentPosition(regularItem) & Node.DOCUMENT_POSITION_FOLLOWING).toBeTruthy()
|
||||
|
||||
// Divider is rendered between pinned and unpinned sections
|
||||
const divider = container.querySelector('[class*="bg-divider-regular"]')
|
||||
expect(divider).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Empty State', () => {
|
||||
it('should show NoApps component when no apps are installed on desktop', () => {
|
||||
mockMediaType = MediaType.pc
|
||||
renderSidebar([])
|
||||
|
||||
expect(screen.getByText('explore.sidebar.noApps.title')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should hide NoApps on mobile', () => {
|
||||
mockMediaType = MediaType.mobile
|
||||
renderSidebar([])
|
||||
|
||||
expect(screen.queryByText('explore.sidebar.noApps.title')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -49,14 +49,14 @@ describe('Slash Command Dual-Mode System', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
;(slashCommandRegistry as any).findCommand = vi.fn((name: string) => {
|
||||
vi.mocked(slashCommandRegistry.findCommand).mockImplementation((name: string) => {
|
||||
if (name === 'docs')
|
||||
return mockDirectCommand
|
||||
if (name === 'theme')
|
||||
return mockSubmenuCommand
|
||||
return null
|
||||
return undefined
|
||||
})
|
||||
;(slashCommandRegistry as any).getAllCommands = vi.fn(() => [
|
||||
vi.mocked(slashCommandRegistry.getAllCommands).mockReturnValue([
|
||||
mockDirectCommand,
|
||||
mockSubmenuCommand,
|
||||
])
|
||||
@ -147,7 +147,7 @@ describe('Slash Command Dual-Mode System', () => {
|
||||
unregister: vi.fn(),
|
||||
}
|
||||
|
||||
;(slashCommandRegistry as any).findCommand = vi.fn(() => commandWithoutMode)
|
||||
vi.mocked(slashCommandRegistry.findCommand).mockReturnValue(commandWithoutMode)
|
||||
|
||||
const handler = slashCommandRegistry.findCommand('test')
|
||||
// Default behavior should be submenu when mode is not specified
|
||||
|
||||
271
web/__tests__/plugins/plugin-auth-flow.test.tsx
Normal file
271
web/__tests__/plugins/plugin-auth-flow.test.tsx
Normal file
@ -0,0 +1,271 @@
|
||||
/**
|
||||
* Integration Test: Plugin Authentication Flow
|
||||
*
|
||||
* Tests the integration between PluginAuth, usePluginAuth hook,
|
||||
* Authorize/Authorized components, and credential management.
|
||||
* Verifies the complete auth flow from checking authorization status
|
||||
* to rendering the correct UI state.
|
||||
*/
|
||||
import { cleanup, render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { AuthCategory, CredentialTypeEnum } from '@/app/components/plugins/plugin-auth/types'
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
const map: Record<string, string> = {
|
||||
'plugin.auth.setUpTip': 'Set up your credentials',
|
||||
'plugin.auth.authorized': 'Authorized',
|
||||
'plugin.auth.apiKey': 'API Key',
|
||||
'plugin.auth.oauth': 'OAuth',
|
||||
}
|
||||
return map[key] ?? key
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => ({
|
||||
isCurrentWorkspaceManager: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/classnames', () => ({
|
||||
cn: (...args: unknown[]) => args.filter(Boolean).join(' '),
|
||||
}))
|
||||
|
||||
const mockUsePluginAuth = vi.fn()
|
||||
vi.mock('@/app/components/plugins/plugin-auth/hooks/use-plugin-auth', () => ({
|
||||
usePluginAuth: (...args: unknown[]) => mockUsePluginAuth(...args),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/plugin-auth/authorize', () => ({
|
||||
default: ({ pluginPayload, canOAuth, canApiKey }: {
|
||||
pluginPayload: { provider: string }
|
||||
canOAuth: boolean
|
||||
canApiKey: boolean
|
||||
}) => (
|
||||
<div data-testid="authorize-component">
|
||||
<span data-testid="auth-provider">{pluginPayload.provider}</span>
|
||||
{canOAuth && <span data-testid="auth-oauth">OAuth available</span>}
|
||||
{canApiKey && <span data-testid="auth-apikey">API Key available</span>}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/plugin-auth/authorized', () => ({
|
||||
default: ({ pluginPayload, credentials }: {
|
||||
pluginPayload: { provider: string }
|
||||
credentials: Array<{ id: string, name: string }>
|
||||
}) => (
|
||||
<div data-testid="authorized-component">
|
||||
<span data-testid="auth-provider">{pluginPayload.provider}</span>
|
||||
<span data-testid="auth-credential-count">
|
||||
{credentials.length}
|
||||
{' '}
|
||||
credentials
|
||||
</span>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
const { default: PluginAuth } = await import('@/app/components/plugins/plugin-auth/plugin-auth')
|
||||
|
||||
describe('Plugin Authentication Flow Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
})
|
||||
|
||||
const basePayload = {
|
||||
category: AuthCategory.tool,
|
||||
provider: 'test-provider',
|
||||
}
|
||||
|
||||
describe('Unauthorized State', () => {
|
||||
it('renders Authorize component when not authorized', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(<PluginAuth pluginPayload={basePayload} />)
|
||||
|
||||
expect(screen.getByTestId('authorize-component')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('authorized-component')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('auth-apikey')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows OAuth option when plugin supports it', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: true,
|
||||
canApiKey: true,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(<PluginAuth pluginPayload={basePayload} />)
|
||||
|
||||
expect(screen.getByTestId('auth-oauth')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('auth-apikey')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('applies className to wrapper when not authorized', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
const { container } = render(
|
||||
<PluginAuth pluginPayload={basePayload} className="custom-class" />,
|
||||
)
|
||||
|
||||
expect(container.firstChild).toHaveClass('custom-class')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Authorized State', () => {
|
||||
it('renders Authorized component when authorized and no children', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: true,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [
|
||||
{ id: 'cred-1', name: 'My API Key', is_default: true },
|
||||
],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(<PluginAuth pluginPayload={basePayload} />)
|
||||
|
||||
expect(screen.queryByTestId('authorize-component')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('authorized-component')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('auth-credential-count')).toHaveTextContent('1 credentials')
|
||||
})
|
||||
|
||||
it('renders children instead of Authorized when authorized and children provided', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: true,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [{ id: 'cred-1', name: 'Key', is_default: true }],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(
|
||||
<PluginAuth pluginPayload={basePayload}>
|
||||
<div data-testid="custom-children">Custom authorized view</div>
|
||||
</PluginAuth>,
|
||||
)
|
||||
|
||||
expect(screen.queryByTestId('authorize-component')).not.toBeInTheDocument()
|
||||
expect(screen.queryByTestId('authorized-component')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('custom-children')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not apply className when authorized', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: true,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [{ id: 'cred-1', name: 'Key', is_default: true }],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
const { container } = render(
|
||||
<PluginAuth pluginPayload={basePayload} className="custom-class" />,
|
||||
)
|
||||
|
||||
expect(container.firstChild).not.toHaveClass('custom-class')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Auth Category Integration', () => {
|
||||
it('passes correct provider to usePluginAuth for tool category', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: false,
|
||||
canApiKey: true,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
const toolPayload = {
|
||||
category: AuthCategory.tool,
|
||||
provider: 'google-search-provider',
|
||||
}
|
||||
|
||||
render(<PluginAuth pluginPayload={toolPayload} />)
|
||||
|
||||
expect(mockUsePluginAuth).toHaveBeenCalledWith(toolPayload, true)
|
||||
expect(screen.getByTestId('auth-provider')).toHaveTextContent('google-search-provider')
|
||||
})
|
||||
|
||||
it('passes correct provider to usePluginAuth for datasource category', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: false,
|
||||
canOAuth: true,
|
||||
canApiKey: false,
|
||||
credentials: [],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
const dsPayload = {
|
||||
category: AuthCategory.datasource,
|
||||
provider: 'notion-datasource',
|
||||
}
|
||||
|
||||
render(<PluginAuth pluginPayload={dsPayload} />)
|
||||
|
||||
expect(mockUsePluginAuth).toHaveBeenCalledWith(dsPayload, true)
|
||||
expect(screen.getByTestId('auth-oauth')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('auth-apikey')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Multiple Credentials', () => {
|
||||
it('shows credential count when multiple credentials exist', () => {
|
||||
mockUsePluginAuth.mockReturnValue({
|
||||
isAuthorized: true,
|
||||
canOAuth: true,
|
||||
canApiKey: true,
|
||||
credentials: [
|
||||
{ id: 'cred-1', name: 'API Key 1', is_default: true },
|
||||
{ id: 'cred-2', name: 'API Key 2', is_default: false },
|
||||
{ id: 'cred-3', name: 'OAuth Token', is_default: false, credential_type: CredentialTypeEnum.OAUTH2 },
|
||||
],
|
||||
disabled: false,
|
||||
invalidPluginCredentialInfo: vi.fn(),
|
||||
notAllowCustomCredential: false,
|
||||
})
|
||||
|
||||
render(<PluginAuth pluginPayload={basePayload} />)
|
||||
|
||||
expect(screen.getByTestId('auth-credential-count')).toHaveTextContent('3 credentials')
|
||||
})
|
||||
})
|
||||
})
|
||||
224
web/__tests__/plugins/plugin-card-rendering.test.tsx
Normal file
224
web/__tests__/plugins/plugin-card-rendering.test.tsx
Normal file
@ -0,0 +1,224 @@
|
||||
/**
|
||||
* Integration Test: Plugin Card Rendering Pipeline
|
||||
*
|
||||
* Tests the integration between Card, Icon, Title, Description,
|
||||
* OrgInfo, CornerMark, and CardMoreInfo components. Verifies that
|
||||
* plugin data flows correctly through the card rendering pipeline.
|
||||
*/
|
||||
import { cleanup, render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('#i18n', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useGetLanguage: () => 'en_US',
|
||||
}))
|
||||
|
||||
vi.mock('@/hooks/use-theme', () => ({
|
||||
default: () => ({ theme: 'light' }),
|
||||
}))
|
||||
|
||||
vi.mock('@/i18n-config', () => ({
|
||||
renderI18nObject: (obj: Record<string, string>, locale: string) => obj[locale] || obj.en_US || '',
|
||||
}))
|
||||
|
||||
vi.mock('@/types/app', () => ({
|
||||
Theme: { dark: 'dark', light: 'light' },
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/classnames', () => ({
|
||||
cn: (...args: unknown[]) => args.filter(a => typeof a === 'string' && a).join(' '),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/hooks', () => ({
|
||||
useCategories: () => ({
|
||||
categoriesMap: {
|
||||
tool: { label: 'Tool' },
|
||||
model: { label: 'Model' },
|
||||
extension: { label: 'Extension' },
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/base/badges/partner', () => ({
|
||||
default: () => <span data-testid="partner-badge">Partner</span>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/base/badges/verified', () => ({
|
||||
default: () => <span data-testid="verified-badge">Verified</span>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/card-icon', () => ({
|
||||
default: ({ src, installed, installFailed }: { src: string | object, installed?: boolean, installFailed?: boolean }) => (
|
||||
<div data-testid="card-icon" data-installed={installed} data-install-failed={installFailed}>
|
||||
{typeof src === 'string' ? src : 'emoji-icon'}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/corner-mark', () => ({
|
||||
default: ({ text }: { text: string }) => (
|
||||
<div data-testid="corner-mark">{text}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/description', () => ({
|
||||
default: ({ text, descriptionLineRows }: { text: string, descriptionLineRows?: number }) => (
|
||||
<div data-testid="description" data-rows={descriptionLineRows}>{text}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/org-info', () => ({
|
||||
default: ({ orgName, packageName }: { orgName: string, packageName: string }) => (
|
||||
<div data-testid="org-info">
|
||||
{orgName}
|
||||
/
|
||||
{packageName}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/placeholder', () => ({
|
||||
default: ({ text }: { text: string }) => (
|
||||
<div data-testid="placeholder">{text}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/title', () => ({
|
||||
default: ({ title }: { title: string }) => (
|
||||
<div data-testid="title">{title}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
const { default: Card } = await import('@/app/components/plugins/card/index')
|
||||
type CardPayload = Parameters<typeof Card>[0]['payload']
|
||||
|
||||
describe('Plugin Card Rendering Integration', () => {
|
||||
beforeEach(() => {
|
||||
cleanup()
|
||||
})
|
||||
|
||||
const makePayload = (overrides = {}) => ({
|
||||
category: 'tool',
|
||||
type: 'plugin',
|
||||
name: 'google-search',
|
||||
org: 'langgenius',
|
||||
label: { en_US: 'Google Search', zh_Hans: 'Google搜索' },
|
||||
brief: { en_US: 'Search the web using Google', zh_Hans: '使用Google搜索网页' },
|
||||
icon: 'https://example.com/icon.png',
|
||||
verified: true,
|
||||
badges: [] as string[],
|
||||
...overrides,
|
||||
}) as CardPayload
|
||||
|
||||
it('renders a complete plugin card with all subcomponents', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} />)
|
||||
|
||||
expect(screen.getByTestId('card-icon')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('title')).toHaveTextContent('Google Search')
|
||||
expect(screen.getByTestId('org-info')).toHaveTextContent('langgenius/google-search')
|
||||
expect(screen.getByTestId('description')).toHaveTextContent('Search the web using Google')
|
||||
})
|
||||
|
||||
it('shows corner mark with category label when not hidden', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} />)
|
||||
|
||||
expect(screen.getByTestId('corner-mark')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('hides corner mark when hideCornerMark is true', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} hideCornerMark />)
|
||||
|
||||
expect(screen.queryByTestId('corner-mark')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows installed status on icon', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} installed />)
|
||||
|
||||
const icon = screen.getByTestId('card-icon')
|
||||
expect(icon).toHaveAttribute('data-installed', 'true')
|
||||
})
|
||||
|
||||
it('shows install failed status on icon', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} installFailed />)
|
||||
|
||||
const icon = screen.getByTestId('card-icon')
|
||||
expect(icon).toHaveAttribute('data-install-failed', 'true')
|
||||
})
|
||||
|
||||
it('renders verified badge when plugin is verified', () => {
|
||||
const payload = makePayload({ verified: true })
|
||||
render(<Card payload={payload} />)
|
||||
|
||||
expect(screen.getByTestId('verified-badge')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders partner badge when plugin has partner badge', () => {
|
||||
const payload = makePayload({ badges: ['partner'] })
|
||||
render(<Card payload={payload} />)
|
||||
|
||||
expect(screen.getByTestId('partner-badge')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders footer content when provided', () => {
|
||||
const payload = makePayload()
|
||||
render(
|
||||
<Card
|
||||
payload={payload}
|
||||
footer={<div data-testid="custom-footer">Custom footer</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('custom-footer')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders titleLeft content when provided', () => {
|
||||
const payload = makePayload()
|
||||
render(
|
||||
<Card
|
||||
payload={payload}
|
||||
titleLeft={<span data-testid="title-left-content">New</span>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('title-left-content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('uses dark icon when theme is dark and icon_dark is provided', () => {
|
||||
vi.doMock('@/hooks/use-theme', () => ({
|
||||
default: () => ({ theme: 'dark' }),
|
||||
}))
|
||||
|
||||
const payload = makePayload({
|
||||
icon: 'https://example.com/icon-light.png',
|
||||
icon_dark: 'https://example.com/icon-dark.png',
|
||||
})
|
||||
|
||||
render(<Card payload={payload} />)
|
||||
expect(screen.getByTestId('card-icon')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows loading placeholder when isLoading is true', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} isLoading loadingFileName="uploading.difypkg" />)
|
||||
|
||||
expect(screen.getByTestId('placeholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders description with custom line rows', () => {
|
||||
const payload = makePayload()
|
||||
render(<Card payload={payload} descriptionLineRows={3} />)
|
||||
|
||||
const description = screen.getByTestId('description')
|
||||
expect(description).toHaveAttribute('data-rows', '3')
|
||||
})
|
||||
})
|
||||
159
web/__tests__/plugins/plugin-data-utilities.test.ts
Normal file
159
web/__tests__/plugins/plugin-data-utilities.test.ts
Normal file
@ -0,0 +1,159 @@
|
||||
/**
|
||||
* Integration Test: Plugin Data Utilities
|
||||
*
|
||||
* Tests the integration between plugin utility functions, including
|
||||
* tag/category validation, form schema transformation, and
|
||||
* credential data processing. Verifies that these utilities work
|
||||
* correctly together in processing plugin metadata.
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { transformFormSchemasSecretInput } from '@/app/components/plugins/plugin-auth/utils'
|
||||
import { getValidCategoryKeys, getValidTagKeys } from '@/app/components/plugins/utils'
|
||||
|
||||
type TagInput = Parameters<typeof getValidTagKeys>[0]
|
||||
|
||||
describe('Plugin Data Utilities Integration', () => {
|
||||
describe('Tag and Category Validation Pipeline', () => {
|
||||
it('validates tags and categories in a metadata processing flow', () => {
|
||||
const pluginMetadata = {
|
||||
tags: ['search', 'productivity', 'invalid-tag', 'media-generate'],
|
||||
category: 'tool',
|
||||
}
|
||||
|
||||
const validTags = getValidTagKeys(pluginMetadata.tags as TagInput)
|
||||
expect(validTags.length).toBeGreaterThan(0)
|
||||
expect(validTags.length).toBeLessThanOrEqual(pluginMetadata.tags.length)
|
||||
|
||||
const validCategory = getValidCategoryKeys(pluginMetadata.category)
|
||||
expect(validCategory).toBeDefined()
|
||||
})
|
||||
|
||||
it('handles completely invalid metadata gracefully', () => {
|
||||
const invalidMetadata = {
|
||||
tags: ['nonexistent-1', 'nonexistent-2'],
|
||||
category: 'nonexistent-category',
|
||||
}
|
||||
|
||||
const validTags = getValidTagKeys(invalidMetadata.tags as TagInput)
|
||||
expect(validTags).toHaveLength(0)
|
||||
|
||||
const validCategory = getValidCategoryKeys(invalidMetadata.category)
|
||||
expect(validCategory).toBeUndefined()
|
||||
})
|
||||
|
||||
it('handles undefined and empty inputs', () => {
|
||||
expect(getValidTagKeys([] as TagInput)).toHaveLength(0)
|
||||
expect(getValidCategoryKeys(undefined)).toBeUndefined()
|
||||
expect(getValidCategoryKeys('')).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Credential Secret Masking Pipeline', () => {
|
||||
it('masks secrets when displaying credential form data', () => {
|
||||
const credentialValues = {
|
||||
api_key: 'sk-abc123456789',
|
||||
api_endpoint: 'https://api.example.com',
|
||||
secret_token: 'secret-token-value',
|
||||
description: 'My credential set',
|
||||
}
|
||||
|
||||
const secretFields = ['api_key', 'secret_token']
|
||||
|
||||
const displayValues = transformFormSchemasSecretInput(secretFields, credentialValues)
|
||||
|
||||
expect(displayValues.api_key).toBe('[__HIDDEN__]')
|
||||
expect(displayValues.secret_token).toBe('[__HIDDEN__]')
|
||||
expect(displayValues.api_endpoint).toBe('https://api.example.com')
|
||||
expect(displayValues.description).toBe('My credential set')
|
||||
})
|
||||
|
||||
it('preserves original values when no secret fields', () => {
|
||||
const values = {
|
||||
name: 'test',
|
||||
endpoint: 'https://api.example.com',
|
||||
}
|
||||
|
||||
const result = transformFormSchemasSecretInput([], values)
|
||||
expect(result).toEqual(values)
|
||||
})
|
||||
|
||||
it('handles falsy secret values without masking', () => {
|
||||
const values = {
|
||||
api_key: '',
|
||||
secret: null as unknown as string,
|
||||
other: 'visible',
|
||||
}
|
||||
|
||||
const result = transformFormSchemasSecretInput(['api_key', 'secret'], values)
|
||||
expect(result.api_key).toBe('')
|
||||
expect(result.secret).toBeNull()
|
||||
expect(result.other).toBe('visible')
|
||||
})
|
||||
|
||||
it('does not mutate the original values object', () => {
|
||||
const original = {
|
||||
api_key: 'my-secret-key',
|
||||
name: 'test',
|
||||
}
|
||||
const originalCopy = { ...original }
|
||||
|
||||
transformFormSchemasSecretInput(['api_key'], original)
|
||||
|
||||
expect(original).toEqual(originalCopy)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Combined Plugin Metadata Validation', () => {
|
||||
it('processes a complete plugin entry with tags and credentials', () => {
|
||||
const pluginEntry = {
|
||||
name: 'test-plugin',
|
||||
category: 'tool',
|
||||
tags: ['search', 'invalid-tag'],
|
||||
credentials: {
|
||||
api_key: 'sk-test-key-123',
|
||||
base_url: 'https://api.test.com',
|
||||
},
|
||||
secretFields: ['api_key'],
|
||||
}
|
||||
|
||||
const validCategory = getValidCategoryKeys(pluginEntry.category)
|
||||
expect(validCategory).toBe('tool')
|
||||
|
||||
const validTags = getValidTagKeys(pluginEntry.tags as TagInput)
|
||||
expect(validTags).toContain('search')
|
||||
|
||||
const displayCredentials = transformFormSchemasSecretInput(
|
||||
pluginEntry.secretFields,
|
||||
pluginEntry.credentials,
|
||||
)
|
||||
expect(displayCredentials.api_key).toBe('[__HIDDEN__]')
|
||||
expect(displayCredentials.base_url).toBe('https://api.test.com')
|
||||
|
||||
expect(pluginEntry.credentials.api_key).toBe('sk-test-key-123')
|
||||
})
|
||||
|
||||
it('handles multiple plugins in batch processing', () => {
|
||||
const plugins = [
|
||||
{ tags: ['search', 'productivity'], category: 'tool' },
|
||||
{ tags: ['image', 'design'], category: 'model' },
|
||||
{ tags: ['invalid'], category: 'extension' },
|
||||
]
|
||||
|
||||
const results = plugins.map(p => ({
|
||||
validTags: getValidTagKeys(p.tags as TagInput),
|
||||
validCategory: getValidCategoryKeys(p.category),
|
||||
}))
|
||||
|
||||
expect(results[0].validTags.length).toBeGreaterThan(0)
|
||||
expect(results[0].validCategory).toBe('tool')
|
||||
|
||||
expect(results[1].validTags).toContain('image')
|
||||
expect(results[1].validTags).toContain('design')
|
||||
expect(results[1].validCategory).toBe('model')
|
||||
|
||||
expect(results[2].validTags).toHaveLength(0)
|
||||
expect(results[2].validCategory).toBe('extension')
|
||||
})
|
||||
})
|
||||
})
|
||||
269
web/__tests__/plugins/plugin-install-flow.test.ts
Normal file
269
web/__tests__/plugins/plugin-install-flow.test.ts
Normal file
@ -0,0 +1,269 @@
|
||||
/**
|
||||
* Integration Test: Plugin Installation Flow
|
||||
*
|
||||
* Tests the integration between GitHub release fetching, version comparison,
|
||||
* upload handling, and task status polling. Verifies the complete plugin
|
||||
* installation pipeline from source discovery to completion.
|
||||
*/
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/config', () => ({
|
||||
GITHUB_ACCESS_TOKEN: '',
|
||||
}))
|
||||
|
||||
const mockToastNotify = vi.fn()
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
default: { notify: (...args: unknown[]) => mockToastNotify(...args) },
|
||||
}))
|
||||
|
||||
const mockUploadGitHub = vi.fn()
|
||||
vi.mock('@/service/plugins', () => ({
|
||||
uploadGitHub: (...args: unknown[]) => mockUploadGitHub(...args),
|
||||
checkTaskStatus: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/semver', () => ({
|
||||
compareVersion: (a: string, b: string) => {
|
||||
const parse = (v: string) => v.replace(/^v/, '').split('.').map(Number)
|
||||
const [aMajor, aMinor = 0, aPatch = 0] = parse(a)
|
||||
const [bMajor, bMinor = 0, bPatch = 0] = parse(b)
|
||||
if (aMajor !== bMajor)
|
||||
return aMajor > bMajor ? 1 : -1
|
||||
if (aMinor !== bMinor)
|
||||
return aMinor > bMinor ? 1 : -1
|
||||
if (aPatch !== bPatch)
|
||||
return aPatch > bPatch ? 1 : -1
|
||||
return 0
|
||||
},
|
||||
getLatestVersion: (versions: string[]) => {
|
||||
return versions.sort((a, b) => {
|
||||
const parse = (v: string) => v.replace(/^v/, '').split('.').map(Number)
|
||||
const [aMaj, aMin = 0, aPat = 0] = parse(a)
|
||||
const [bMaj, bMin = 0, bPat = 0] = parse(b)
|
||||
if (aMaj !== bMaj)
|
||||
return bMaj - aMaj
|
||||
if (aMin !== bMin)
|
||||
return bMin - aMin
|
||||
return bPat - aPat
|
||||
})[0]
|
||||
},
|
||||
}))
|
||||
|
||||
const { useGitHubReleases, useGitHubUpload } = await import(
|
||||
'@/app/components/plugins/install-plugin/hooks',
|
||||
)
|
||||
|
||||
describe('Plugin Installation Flow Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
globalThis.fetch = vi.fn()
|
||||
})
|
||||
|
||||
describe('GitHub Release Discovery → Version Check → Upload Pipeline', () => {
|
||||
it('fetches releases, checks for updates, and uploads the new version', async () => {
|
||||
const mockReleases = [
|
||||
{
|
||||
tag_name: 'v2.0.0',
|
||||
assets: [{ browser_download_url: 'https://github.com/test/v2.difypkg', name: 'plugin-v2.difypkg' }],
|
||||
},
|
||||
{
|
||||
tag_name: 'v1.5.0',
|
||||
assets: [{ browser_download_url: 'https://github.com/test/v1.5.difypkg', name: 'plugin-v1.5.difypkg' }],
|
||||
},
|
||||
{
|
||||
tag_name: 'v1.0.0',
|
||||
assets: [{ browser_download_url: 'https://github.com/test/v1.difypkg', name: 'plugin-v1.difypkg' }],
|
||||
},
|
||||
]
|
||||
|
||||
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockReleases),
|
||||
})
|
||||
|
||||
mockUploadGitHub.mockResolvedValue({
|
||||
manifest: { name: 'test-plugin', version: '2.0.0' },
|
||||
unique_identifier: 'test-plugin:2.0.0',
|
||||
})
|
||||
|
||||
const { fetchReleases, checkForUpdates } = useGitHubReleases()
|
||||
|
||||
const releases = await fetchReleases('test-org', 'test-repo')
|
||||
expect(releases).toHaveLength(3)
|
||||
expect(releases[0].tag_name).toBe('v2.0.0')
|
||||
|
||||
const { needUpdate, toastProps } = checkForUpdates(releases, 'v1.0.0')
|
||||
expect(needUpdate).toBe(true)
|
||||
expect(toastProps.message).toContain('v2.0.0')
|
||||
|
||||
const { handleUpload } = useGitHubUpload()
|
||||
const onSuccess = vi.fn()
|
||||
const result = await handleUpload(
|
||||
'https://github.com/test-org/test-repo',
|
||||
'v2.0.0',
|
||||
'plugin-v2.difypkg',
|
||||
onSuccess,
|
||||
)
|
||||
|
||||
expect(mockUploadGitHub).toHaveBeenCalledWith(
|
||||
'https://github.com/test-org/test-repo',
|
||||
'v2.0.0',
|
||||
'plugin-v2.difypkg',
|
||||
)
|
||||
expect(onSuccess).toHaveBeenCalledWith({
|
||||
manifest: { name: 'test-plugin', version: '2.0.0' },
|
||||
unique_identifier: 'test-plugin:2.0.0',
|
||||
})
|
||||
expect(result).toEqual({
|
||||
manifest: { name: 'test-plugin', version: '2.0.0' },
|
||||
unique_identifier: 'test-plugin:2.0.0',
|
||||
})
|
||||
})
|
||||
|
||||
it('handles no new version available', async () => {
|
||||
const mockReleases = [
|
||||
{
|
||||
tag_name: 'v1.0.0',
|
||||
assets: [{ browser_download_url: 'https://github.com/test/v1.difypkg', name: 'plugin-v1.difypkg' }],
|
||||
},
|
||||
]
|
||||
|
||||
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockReleases),
|
||||
})
|
||||
|
||||
const { fetchReleases, checkForUpdates } = useGitHubReleases()
|
||||
|
||||
const releases = await fetchReleases('test-org', 'test-repo')
|
||||
const { needUpdate, toastProps } = checkForUpdates(releases, 'v1.0.0')
|
||||
|
||||
expect(needUpdate).toBe(false)
|
||||
expect(toastProps.type).toBe('info')
|
||||
expect(toastProps.message).toBe('No new version available')
|
||||
})
|
||||
|
||||
it('handles empty releases', async () => {
|
||||
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
})
|
||||
|
||||
const { fetchReleases, checkForUpdates } = useGitHubReleases()
|
||||
|
||||
const releases = await fetchReleases('test-org', 'test-repo')
|
||||
expect(releases).toHaveLength(0)
|
||||
|
||||
const { needUpdate, toastProps } = checkForUpdates(releases, 'v1.0.0')
|
||||
expect(needUpdate).toBe(false)
|
||||
expect(toastProps.type).toBe('error')
|
||||
expect(toastProps.message).toBe('Input releases is empty')
|
||||
})
|
||||
|
||||
it('handles fetch failure gracefully', async () => {
|
||||
;(globalThis.fetch as ReturnType<typeof vi.fn>).mockResolvedValue({
|
||||
ok: false,
|
||||
status: 404,
|
||||
})
|
||||
|
||||
const { fetchReleases } = useGitHubReleases()
|
||||
const releases = await fetchReleases('nonexistent-org', 'nonexistent-repo')
|
||||
|
||||
expect(releases).toEqual([])
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'error' }),
|
||||
)
|
||||
})
|
||||
|
||||
it('handles upload failure gracefully', async () => {
|
||||
mockUploadGitHub.mockRejectedValue(new Error('Upload failed'))
|
||||
|
||||
const { handleUpload } = useGitHubUpload()
|
||||
const onSuccess = vi.fn()
|
||||
|
||||
await expect(
|
||||
handleUpload('https://github.com/test/repo', 'v1.0.0', 'plugin.difypkg', onSuccess),
|
||||
).rejects.toThrow('Upload failed')
|
||||
|
||||
expect(onSuccess).not.toHaveBeenCalled()
|
||||
expect(mockToastNotify).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'error', message: 'Error uploading package' }),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Task Status Polling Integration', () => {
|
||||
it('polls until plugin installation succeeds', async () => {
|
||||
const mockCheckTaskStatus = vi.fn()
|
||||
.mockResolvedValueOnce({
|
||||
task: {
|
||||
plugins: [{ plugin_unique_identifier: 'test:1.0.0', status: 'running' }],
|
||||
},
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
task: {
|
||||
plugins: [{ plugin_unique_identifier: 'test:1.0.0', status: 'success' }],
|
||||
},
|
||||
})
|
||||
|
||||
const { checkTaskStatus: fetchCheckTaskStatus } = await import('@/service/plugins')
|
||||
;(fetchCheckTaskStatus as ReturnType<typeof vi.fn>).mockImplementation(mockCheckTaskStatus)
|
||||
|
||||
await vi.doMock('@/utils', () => ({
|
||||
sleep: () => Promise.resolve(),
|
||||
}))
|
||||
|
||||
const { default: checkTaskStatus } = await import(
|
||||
'@/app/components/plugins/install-plugin/base/check-task-status',
|
||||
)
|
||||
|
||||
const checker = checkTaskStatus()
|
||||
const result = await checker.check({
|
||||
taskId: 'task-123',
|
||||
pluginUniqueIdentifier: 'test:1.0.0',
|
||||
})
|
||||
|
||||
expect(result.status).toBe('success')
|
||||
})
|
||||
|
||||
it('returns failure when plugin not found in task', async () => {
|
||||
const mockCheckTaskStatus = vi.fn().mockResolvedValue({
|
||||
task: {
|
||||
plugins: [{ plugin_unique_identifier: 'other:1.0.0', status: 'success' }],
|
||||
},
|
||||
})
|
||||
|
||||
const { checkTaskStatus: fetchCheckTaskStatus } = await import('@/service/plugins')
|
||||
;(fetchCheckTaskStatus as ReturnType<typeof vi.fn>).mockImplementation(mockCheckTaskStatus)
|
||||
|
||||
const { default: checkTaskStatus } = await import(
|
||||
'@/app/components/plugins/install-plugin/base/check-task-status',
|
||||
)
|
||||
|
||||
const checker = checkTaskStatus()
|
||||
const result = await checker.check({
|
||||
taskId: 'task-123',
|
||||
pluginUniqueIdentifier: 'test:1.0.0',
|
||||
})
|
||||
|
||||
expect(result.status).toBe('failed')
|
||||
expect(result.error).toBe('Plugin package not found')
|
||||
})
|
||||
|
||||
it('stops polling when stop() is called', async () => {
|
||||
const { default: checkTaskStatus } = await import(
|
||||
'@/app/components/plugins/install-plugin/base/check-task-status',
|
||||
)
|
||||
|
||||
const checker = checkTaskStatus()
|
||||
checker.stop()
|
||||
|
||||
const result = await checker.check({
|
||||
taskId: 'task-123',
|
||||
pluginUniqueIdentifier: 'test:1.0.0',
|
||||
})
|
||||
|
||||
expect(result.status).toBe('success')
|
||||
})
|
||||
})
|
||||
})
|
||||
97
web/__tests__/plugins/plugin-marketplace-to-install.test.tsx
Normal file
97
web/__tests__/plugins/plugin-marketplace-to-install.test.tsx
Normal file
@ -0,0 +1,97 @@
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { pluginInstallLimit } from '@/app/components/plugins/install-plugin/hooks/use-install-plugin-limit'
|
||||
import { InstallationScope } from '@/types/feature'
|
||||
|
||||
vi.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: () => ({
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: false,
|
||||
plugin_installation_scope: InstallationScope.ALL,
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('Plugin Marketplace to Install Flow', () => {
|
||||
describe('install permission validation pipeline', () => {
|
||||
const systemFeaturesAll = {
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: false,
|
||||
plugin_installation_scope: InstallationScope.ALL,
|
||||
},
|
||||
}
|
||||
|
||||
const systemFeaturesMarketplaceOnly = {
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: true,
|
||||
plugin_installation_scope: InstallationScope.ALL,
|
||||
},
|
||||
}
|
||||
|
||||
const systemFeaturesOfficialOnly = {
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: false,
|
||||
plugin_installation_scope: InstallationScope.OFFICIAL_ONLY,
|
||||
},
|
||||
}
|
||||
|
||||
it('should allow marketplace plugin when all sources allowed', () => {
|
||||
const plugin = { from: 'marketplace' as const, verification: { authorized_category: 'langgenius' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesAll as never)
|
||||
expect(result.canInstall).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow github plugin when all sources allowed', () => {
|
||||
const plugin = { from: 'github' as const, verification: { authorized_category: 'langgenius' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesAll as never)
|
||||
expect(result.canInstall).toBe(true)
|
||||
})
|
||||
|
||||
it('should block github plugin when marketplace only', () => {
|
||||
const plugin = { from: 'github' as const, verification: { authorized_category: 'langgenius' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesMarketplaceOnly as never)
|
||||
expect(result.canInstall).toBe(false)
|
||||
})
|
||||
|
||||
it('should allow marketplace plugin when marketplace only', () => {
|
||||
const plugin = { from: 'marketplace' as const, verification: { authorized_category: 'partner' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesMarketplaceOnly as never)
|
||||
expect(result.canInstall).toBe(true)
|
||||
})
|
||||
|
||||
it('should allow official plugin when official only', () => {
|
||||
const plugin = { from: 'marketplace' as const, verification: { authorized_category: 'langgenius' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesOfficialOnly as never)
|
||||
expect(result.canInstall).toBe(true)
|
||||
})
|
||||
|
||||
it('should block community plugin when official only', () => {
|
||||
const plugin = { from: 'marketplace' as const, verification: { authorized_category: 'community' } }
|
||||
const result = pluginInstallLimit(plugin as never, systemFeaturesOfficialOnly as never)
|
||||
expect(result.canInstall).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('plugin source classification', () => {
|
||||
it('should correctly classify plugin install sources', () => {
|
||||
const sources = ['marketplace', 'github', 'package'] as const
|
||||
const features = {
|
||||
plugin_installation_permission: {
|
||||
restrict_to_marketplace_only: true,
|
||||
plugin_installation_scope: InstallationScope.ALL,
|
||||
},
|
||||
}
|
||||
|
||||
const results = sources.map(source => ({
|
||||
source,
|
||||
canInstall: pluginInstallLimit(
|
||||
{ from: source, verification: { authorized_category: 'langgenius' } } as never,
|
||||
features as never,
|
||||
).canInstall,
|
||||
}))
|
||||
|
||||
expect(results.find(r => r.source === 'marketplace')?.canInstall).toBe(true)
|
||||
expect(results.find(r => r.source === 'github')?.canInstall).toBe(false)
|
||||
expect(results.find(r => r.source === 'package')?.canInstall).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
120
web/__tests__/plugins/plugin-page-filter-management.test.tsx
Normal file
120
web/__tests__/plugins/plugin-page-filter-management.test.tsx
Normal file
@ -0,0 +1,120 @@
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it } from 'vitest'
|
||||
import { useStore } from '@/app/components/plugins/plugin-page/filter-management/store'
|
||||
|
||||
describe('Plugin Page Filter Management Integration', () => {
|
||||
beforeEach(() => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
act(() => {
|
||||
result.current.setTagList([])
|
||||
result.current.setCategoryList([])
|
||||
result.current.setShowTagManagementModal(false)
|
||||
result.current.setShowCategoryManagementModal(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('tag and category filter lifecycle', () => {
|
||||
it('should manage full tag lifecycle: add -> update -> clear', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
const initialTags = [
|
||||
{ name: 'search', label: { en_US: 'Search' } },
|
||||
{ name: 'productivity', label: { en_US: 'Productivity' } },
|
||||
]
|
||||
|
||||
act(() => {
|
||||
result.current.setTagList(initialTags as never[])
|
||||
})
|
||||
expect(result.current.tagList).toHaveLength(2)
|
||||
|
||||
const updatedTags = [
|
||||
...initialTags,
|
||||
{ name: 'image', label: { en_US: 'Image' } },
|
||||
]
|
||||
|
||||
act(() => {
|
||||
result.current.setTagList(updatedTags as never[])
|
||||
})
|
||||
expect(result.current.tagList).toHaveLength(3)
|
||||
|
||||
act(() => {
|
||||
result.current.setTagList([])
|
||||
})
|
||||
expect(result.current.tagList).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should manage full category lifecycle: add -> update -> clear', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
const categories = [
|
||||
{ name: 'tool', label: { en_US: 'Tool' } },
|
||||
{ name: 'model', label: { en_US: 'Model' } },
|
||||
]
|
||||
|
||||
act(() => {
|
||||
result.current.setCategoryList(categories as never[])
|
||||
})
|
||||
expect(result.current.categoryList).toHaveLength(2)
|
||||
|
||||
act(() => {
|
||||
result.current.setCategoryList([])
|
||||
})
|
||||
expect(result.current.categoryList).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('modal state management', () => {
|
||||
it('should manage tag management modal independently', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
act(() => {
|
||||
result.current.setShowTagManagementModal(true)
|
||||
})
|
||||
expect(result.current.showTagManagementModal).toBe(true)
|
||||
expect(result.current.showCategoryManagementModal).toBe(false)
|
||||
|
||||
act(() => {
|
||||
result.current.setShowTagManagementModal(false)
|
||||
})
|
||||
expect(result.current.showTagManagementModal).toBe(false)
|
||||
})
|
||||
|
||||
it('should manage category management modal independently', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
act(() => {
|
||||
result.current.setShowCategoryManagementModal(true)
|
||||
})
|
||||
expect(result.current.showCategoryManagementModal).toBe(true)
|
||||
expect(result.current.showTagManagementModal).toBe(false)
|
||||
})
|
||||
|
||||
it('should support both modals open simultaneously', () => {
|
||||
const { result } = renderHook(() => useStore())
|
||||
|
||||
act(() => {
|
||||
result.current.setShowTagManagementModal(true)
|
||||
result.current.setShowCategoryManagementModal(true)
|
||||
})
|
||||
|
||||
expect(result.current.showTagManagementModal).toBe(true)
|
||||
expect(result.current.showCategoryManagementModal).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('state persistence across renders', () => {
|
||||
it('should maintain filter state when re-rendered', () => {
|
||||
const { result, rerender } = renderHook(() => useStore())
|
||||
|
||||
act(() => {
|
||||
result.current.setTagList([{ name: 'search' }] as never[])
|
||||
result.current.setCategoryList([{ name: 'tool' }] as never[])
|
||||
})
|
||||
|
||||
rerender()
|
||||
|
||||
expect(result.current.tagList).toHaveLength(1)
|
||||
expect(result.current.categoryList).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
210
web/__tests__/rag-pipeline/chunk-preview-formatting.test.ts
Normal file
210
web/__tests__/rag-pipeline/chunk-preview-formatting.test.ts
Normal file
@ -0,0 +1,210 @@
|
||||
/**
|
||||
* Integration test: Chunk preview formatting pipeline
|
||||
*
|
||||
* Tests the formatPreviewChunks utility across all chunking modes
|
||||
* (text, parentChild, QA) with real data structures.
|
||||
*/
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/config', () => ({
|
||||
RAG_PIPELINE_PREVIEW_CHUNK_NUM: 3,
|
||||
}))
|
||||
|
||||
vi.mock('@/models/datasets', () => ({
|
||||
ChunkingMode: {
|
||||
text: 'text',
|
||||
parentChild: 'parent-child',
|
||||
qa: 'qa',
|
||||
},
|
||||
}))
|
||||
|
||||
const { formatPreviewChunks } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/result/result-preview/utils',
|
||||
)
|
||||
|
||||
describe('Chunk Preview Formatting', () => {
|
||||
describe('general text chunks', () => {
|
||||
it('should format text chunks correctly', () => {
|
||||
const outputs = {
|
||||
chunk_structure: 'text',
|
||||
preview: [
|
||||
{ content: 'Chunk 1 content', summary: 'Summary 1' },
|
||||
{ content: 'Chunk 2 content' },
|
||||
],
|
||||
}
|
||||
|
||||
const result = formatPreviewChunks(outputs)
|
||||
|
||||
expect(Array.isArray(result)).toBe(true)
|
||||
const chunks = result as Array<{ content: string, summary?: string }>
|
||||
expect(chunks).toHaveLength(2)
|
||||
expect(chunks[0].content).toBe('Chunk 1 content')
|
||||
expect(chunks[0].summary).toBe('Summary 1')
|
||||
expect(chunks[1].content).toBe('Chunk 2 content')
|
||||
})
|
||||
|
||||
it('should limit chunks to RAG_PIPELINE_PREVIEW_CHUNK_NUM', () => {
|
||||
const outputs = {
|
||||
chunk_structure: 'text',
|
||||
preview: Array.from({ length: 10 }, (_, i) => ({
|
||||
content: `Chunk ${i + 1}`,
|
||||
})),
|
||||
}
|
||||
|
||||
const result = formatPreviewChunks(outputs)
|
||||
const chunks = result as Array<{ content: string }>
|
||||
|
||||
expect(chunks).toHaveLength(3) // Mocked limit
|
||||
})
|
||||
})
|
||||
|
||||
describe('parent-child chunks — paragraph mode', () => {
|
||||
it('should format paragraph parent-child chunks', () => {
|
||||
const outputs = {
|
||||
chunk_structure: 'parent-child',
|
||||
parent_mode: 'paragraph',
|
||||
preview: [
|
||||
{
|
||||
content: 'Parent paragraph',
|
||||
child_chunks: ['Child 1', 'Child 2'],
|
||||
summary: 'Parent summary',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const result = formatPreviewChunks(outputs) as {
|
||||
parent_child_chunks: Array<{
|
||||
parent_content: string
|
||||
parent_summary?: string
|
||||
child_contents: string[]
|
||||
parent_mode: string
|
||||
}>
|
||||
parent_mode: string
|
||||
}
|
||||
|
||||
expect(result.parent_mode).toBe('paragraph')
|
||||
expect(result.parent_child_chunks).toHaveLength(1)
|
||||
expect(result.parent_child_chunks[0].parent_content).toBe('Parent paragraph')
|
||||
expect(result.parent_child_chunks[0].parent_summary).toBe('Parent summary')
|
||||
expect(result.parent_child_chunks[0].child_contents).toEqual(['Child 1', 'Child 2'])
|
||||
})
|
||||
|
||||
it('should limit parent chunks in paragraph mode', () => {
|
||||
const outputs = {
|
||||
chunk_structure: 'parent-child',
|
||||
parent_mode: 'paragraph',
|
||||
preview: Array.from({ length: 10 }, (_, i) => ({
|
||||
content: `Parent ${i + 1}`,
|
||||
child_chunks: [`Child of ${i + 1}`],
|
||||
})),
|
||||
}
|
||||
|
||||
const result = formatPreviewChunks(outputs) as {
|
||||
parent_child_chunks: unknown[]
|
||||
}
|
||||
|
||||
expect(result.parent_child_chunks).toHaveLength(3) // Mocked limit
|
||||
})
|
||||
})
|
||||
|
||||
describe('parent-child chunks — full-doc mode', () => {
|
||||
it('should format full-doc parent-child chunks', () => {
|
||||
const outputs = {
|
||||
chunk_structure: 'parent-child',
|
||||
parent_mode: 'full-doc',
|
||||
preview: [
|
||||
{
|
||||
content: 'Full document content',
|
||||
child_chunks: ['Section 1', 'Section 2', 'Section 3'],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const result = formatPreviewChunks(outputs) as {
|
||||
parent_child_chunks: Array<{
|
||||
parent_content: string
|
||||
child_contents: string[]
|
||||
parent_mode: string
|
||||
}>
|
||||
}
|
||||
|
||||
expect(result.parent_child_chunks).toHaveLength(1)
|
||||
expect(result.parent_child_chunks[0].parent_content).toBe('Full document content')
|
||||
expect(result.parent_child_chunks[0].parent_mode).toBe('full-doc')
|
||||
})
|
||||
|
||||
it('should limit child chunks in full-doc mode', () => {
|
||||
const outputs = {
|
||||
chunk_structure: 'parent-child',
|
||||
parent_mode: 'full-doc',
|
||||
preview: [
|
||||
{
|
||||
content: 'Document',
|
||||
child_chunks: Array.from({ length: 20 }, (_, i) => `Section ${i + 1}`),
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const result = formatPreviewChunks(outputs) as {
|
||||
parent_child_chunks: Array<{ child_contents: string[] }>
|
||||
}
|
||||
|
||||
expect(result.parent_child_chunks[0].child_contents).toHaveLength(3) // Mocked limit
|
||||
})
|
||||
})
|
||||
|
||||
describe('QA chunks', () => {
|
||||
it('should format QA chunks correctly', () => {
|
||||
const outputs = {
|
||||
chunk_structure: 'qa',
|
||||
qa_preview: [
|
||||
{ question: 'What is AI?', answer: 'Artificial Intelligence is...' },
|
||||
{ question: 'What is ML?', answer: 'Machine Learning is...' },
|
||||
],
|
||||
}
|
||||
|
||||
const result = formatPreviewChunks(outputs) as {
|
||||
qa_chunks: Array<{ question: string, answer: string }>
|
||||
}
|
||||
|
||||
expect(result.qa_chunks).toHaveLength(2)
|
||||
expect(result.qa_chunks[0].question).toBe('What is AI?')
|
||||
expect(result.qa_chunks[0].answer).toBe('Artificial Intelligence is...')
|
||||
})
|
||||
|
||||
it('should limit QA chunks', () => {
|
||||
const outputs = {
|
||||
chunk_structure: 'qa',
|
||||
qa_preview: Array.from({ length: 10 }, (_, i) => ({
|
||||
question: `Q${i + 1}`,
|
||||
answer: `A${i + 1}`,
|
||||
})),
|
||||
}
|
||||
|
||||
const result = formatPreviewChunks(outputs) as {
|
||||
qa_chunks: unknown[]
|
||||
}
|
||||
|
||||
expect(result.qa_chunks).toHaveLength(3) // Mocked limit
|
||||
})
|
||||
})
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should return undefined for null outputs', () => {
|
||||
expect(formatPreviewChunks(null)).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return undefined for undefined outputs', () => {
|
||||
expect(formatPreviewChunks(undefined)).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return undefined for unknown chunk_structure', () => {
|
||||
const outputs = {
|
||||
chunk_structure: 'unknown-type',
|
||||
preview: [],
|
||||
}
|
||||
|
||||
expect(formatPreviewChunks(outputs)).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
179
web/__tests__/rag-pipeline/dsl-export-import-flow.test.ts
Normal file
179
web/__tests__/rag-pipeline/dsl-export-import-flow.test.ts
Normal file
@ -0,0 +1,179 @@
|
||||
/**
|
||||
* Integration test: DSL export/import flow
|
||||
*
|
||||
* Validates DSL export logic (sync draft → check secrets → download)
|
||||
* and DSL import modal state management.
|
||||
*/
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const mockDoSyncWorkflowDraft = vi.fn().mockResolvedValue(undefined)
|
||||
const mockExportPipelineConfig = vi.fn().mockResolvedValue({ data: 'yaml-content' })
|
||||
const mockNotify = vi.fn()
|
||||
const mockEventEmitter = { emit: vi.fn() }
|
||||
const mockDownloadBlob = vi.fn()
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
useToastContext: () => ({ notify: mockNotify }),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/constants', () => ({
|
||||
DSL_EXPORT_CHECK: 'DSL_EXPORT_CHECK',
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/store', () => ({
|
||||
useWorkflowStore: () => ({
|
||||
getState: () => ({
|
||||
pipelineId: 'pipeline-abc',
|
||||
knowledgeName: 'My Pipeline',
|
||||
}),
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/event-emitter', () => ({
|
||||
useEventEmitterContextContext: () => ({
|
||||
eventEmitter: mockEventEmitter,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-pipeline', () => ({
|
||||
useExportPipelineDSL: () => ({
|
||||
mutateAsync: mockExportPipelineConfig,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/workflow', () => ({
|
||||
fetchWorkflowDraft: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/download', () => ({
|
||||
downloadBlob: (...args: unknown[]) => mockDownloadBlob(...args),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/rag-pipeline/hooks/use-nodes-sync-draft', () => ({
|
||||
useNodesSyncDraft: () => ({
|
||||
doSyncWorkflowDraft: mockDoSyncWorkflowDraft,
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('DSL Export/Import Flow', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Export Flow', () => {
|
||||
it('should sync draft then export then download', async () => {
|
||||
const { useDSL } = await import('@/app/components/rag-pipeline/hooks/use-DSL')
|
||||
const { result } = renderHook(() => useDSL())
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleExportDSL()
|
||||
})
|
||||
|
||||
expect(mockDoSyncWorkflowDraft).toHaveBeenCalled()
|
||||
expect(mockExportPipelineConfig).toHaveBeenCalledWith({
|
||||
pipelineId: 'pipeline-abc',
|
||||
include: false,
|
||||
})
|
||||
expect(mockDownloadBlob).toHaveBeenCalledWith(expect.objectContaining({
|
||||
fileName: 'My Pipeline.pipeline',
|
||||
}))
|
||||
})
|
||||
|
||||
it('should export with include flag when specified', async () => {
|
||||
const { useDSL } = await import('@/app/components/rag-pipeline/hooks/use-DSL')
|
||||
const { result } = renderHook(() => useDSL())
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleExportDSL(true)
|
||||
})
|
||||
|
||||
expect(mockExportPipelineConfig).toHaveBeenCalledWith({
|
||||
pipelineId: 'pipeline-abc',
|
||||
include: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should notify on export error', async () => {
|
||||
mockDoSyncWorkflowDraft.mockRejectedValueOnce(new Error('sync failed'))
|
||||
const { useDSL } = await import('@/app/components/rag-pipeline/hooks/use-DSL')
|
||||
const { result } = renderHook(() => useDSL())
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleExportDSL()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'error',
|
||||
}))
|
||||
})
|
||||
})
|
||||
|
||||
describe('Export Check Flow', () => {
|
||||
it('should export directly when no secret environment variables', async () => {
|
||||
const { fetchWorkflowDraft } = await import('@/service/workflow')
|
||||
vi.mocked(fetchWorkflowDraft).mockResolvedValueOnce({
|
||||
environment_variables: [
|
||||
{ value_type: 'string', key: 'API_URL', value: 'https://api.example.com' },
|
||||
],
|
||||
} as unknown as Awaited<ReturnType<typeof fetchWorkflowDraft>>)
|
||||
|
||||
const { useDSL } = await import('@/app/components/rag-pipeline/hooks/use-DSL')
|
||||
const { result } = renderHook(() => useDSL())
|
||||
|
||||
await act(async () => {
|
||||
await result.current.exportCheck()
|
||||
})
|
||||
|
||||
// Should proceed to export directly (no secret vars)
|
||||
expect(mockDoSyncWorkflowDraft).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should emit DSL_EXPORT_CHECK event when secret variables exist', async () => {
|
||||
const { fetchWorkflowDraft } = await import('@/service/workflow')
|
||||
vi.mocked(fetchWorkflowDraft).mockResolvedValueOnce({
|
||||
environment_variables: [
|
||||
{ value_type: 'secret', key: 'API_KEY', value: '***' },
|
||||
],
|
||||
} as unknown as Awaited<ReturnType<typeof fetchWorkflowDraft>>)
|
||||
|
||||
const { useDSL } = await import('@/app/components/rag-pipeline/hooks/use-DSL')
|
||||
const { result } = renderHook(() => useDSL())
|
||||
|
||||
await act(async () => {
|
||||
await result.current.exportCheck()
|
||||
})
|
||||
|
||||
expect(mockEventEmitter.emit).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'DSL_EXPORT_CHECK',
|
||||
payload: expect.objectContaining({
|
||||
data: expect.arrayContaining([
|
||||
expect.objectContaining({ value_type: 'secret' }),
|
||||
]),
|
||||
}),
|
||||
}))
|
||||
})
|
||||
|
||||
it('should notify on export check error', async () => {
|
||||
const { fetchWorkflowDraft } = await import('@/service/workflow')
|
||||
vi.mocked(fetchWorkflowDraft).mockRejectedValueOnce(new Error('fetch failed'))
|
||||
|
||||
const { useDSL } = await import('@/app/components/rag-pipeline/hooks/use-DSL')
|
||||
const { result } = renderHook(() => useDSL())
|
||||
|
||||
await act(async () => {
|
||||
await result.current.exportCheck()
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
|
||||
type: 'error',
|
||||
}))
|
||||
})
|
||||
})
|
||||
})
|
||||
278
web/__tests__/rag-pipeline/input-field-crud-flow.test.ts
Normal file
278
web/__tests__/rag-pipeline/input-field-crud-flow.test.ts
Normal file
@ -0,0 +1,278 @@
|
||||
/**
|
||||
* Integration test: Input field CRUD complete flow
|
||||
*
|
||||
* Validates the full lifecycle of input fields:
|
||||
* creation, editing, renaming, removal, and data conversion round-trip.
|
||||
*/
|
||||
import type { FormData } from '@/app/components/rag-pipeline/components/panel/input-field/editor/form/types'
|
||||
import type { InputVar } from '@/models/pipeline'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { SupportUploadFileTypes } from '@/app/components/workflow/types'
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
import { TransferMethod } from '@/types/app'
|
||||
|
||||
vi.mock('@/config', () => ({
|
||||
VAR_ITEM_TEMPLATE_IN_PIPELINE: {
|
||||
type: 'text-input',
|
||||
label: '',
|
||||
variable: '',
|
||||
max_length: 48,
|
||||
default_value: undefined,
|
||||
required: true,
|
||||
tooltips: undefined,
|
||||
options: [],
|
||||
placeholder: undefined,
|
||||
unit: undefined,
|
||||
allowed_file_upload_methods: undefined,
|
||||
allowed_file_types: undefined,
|
||||
allowed_file_extensions: undefined,
|
||||
},
|
||||
}))
|
||||
|
||||
describe('Input Field CRUD Flow', () => {
|
||||
describe('Create → Edit → Convert Round-trip', () => {
|
||||
it('should create a text field and roundtrip through form data', async () => {
|
||||
const { convertToInputFieldFormData, convertFormDataToINputField } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/input-field/editor/utils',
|
||||
)
|
||||
|
||||
// Create new field from template (no data passed)
|
||||
const newFormData = convertToInputFieldFormData()
|
||||
expect(newFormData.type).toBe('text-input')
|
||||
expect(newFormData.variable).toBe('')
|
||||
expect(newFormData.label).toBe('')
|
||||
expect(newFormData.required).toBe(true)
|
||||
|
||||
// Simulate user editing form data
|
||||
const editedFormData: FormData = {
|
||||
...newFormData,
|
||||
variable: 'user_name',
|
||||
label: 'User Name',
|
||||
maxLength: 100,
|
||||
default: 'John',
|
||||
tooltips: 'Enter your name',
|
||||
placeholder: 'Type here...',
|
||||
allowedTypesAndExtensions: {},
|
||||
}
|
||||
|
||||
// Convert back to InputVar
|
||||
const inputVar = convertFormDataToINputField(editedFormData)
|
||||
|
||||
expect(inputVar.variable).toBe('user_name')
|
||||
expect(inputVar.label).toBe('User Name')
|
||||
expect(inputVar.max_length).toBe(100)
|
||||
expect(inputVar.default_value).toBe('John')
|
||||
expect(inputVar.tooltips).toBe('Enter your name')
|
||||
expect(inputVar.placeholder).toBe('Type here...')
|
||||
expect(inputVar.required).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle file field with upload settings', async () => {
|
||||
const { convertToInputFieldFormData, convertFormDataToINputField } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/input-field/editor/utils',
|
||||
)
|
||||
|
||||
const fileInputVar: InputVar = {
|
||||
type: PipelineInputVarType.singleFile,
|
||||
label: 'Upload Document',
|
||||
variable: 'doc_file',
|
||||
max_length: 1,
|
||||
default_value: undefined,
|
||||
required: true,
|
||||
tooltips: 'Upload a PDF',
|
||||
options: [],
|
||||
placeholder: undefined,
|
||||
unit: undefined,
|
||||
allowed_file_upload_methods: [TransferMethod.local_file, TransferMethod.remote_url],
|
||||
allowed_file_types: [SupportUploadFileTypes.document],
|
||||
allowed_file_extensions: ['.pdf', '.docx'],
|
||||
}
|
||||
|
||||
// Convert to form data
|
||||
const formData = convertToInputFieldFormData(fileInputVar)
|
||||
expect(formData.allowedFileUploadMethods).toEqual([TransferMethod.local_file, TransferMethod.remote_url])
|
||||
expect(formData.allowedTypesAndExtensions).toEqual({
|
||||
allowedFileTypes: [SupportUploadFileTypes.document],
|
||||
allowedFileExtensions: ['.pdf', '.docx'],
|
||||
})
|
||||
|
||||
// Round-trip back
|
||||
const restored = convertFormDataToINputField(formData)
|
||||
expect(restored.allowed_file_upload_methods).toEqual([TransferMethod.local_file, TransferMethod.remote_url])
|
||||
expect(restored.allowed_file_types).toEqual([SupportUploadFileTypes.document])
|
||||
expect(restored.allowed_file_extensions).toEqual(['.pdf', '.docx'])
|
||||
})
|
||||
|
||||
it('should handle select field with options', async () => {
|
||||
const { convertToInputFieldFormData, convertFormDataToINputField } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/input-field/editor/utils',
|
||||
)
|
||||
|
||||
const selectVar: InputVar = {
|
||||
type: PipelineInputVarType.select,
|
||||
label: 'Priority',
|
||||
variable: 'priority',
|
||||
max_length: 0,
|
||||
default_value: 'medium',
|
||||
required: false,
|
||||
tooltips: 'Select priority level',
|
||||
options: ['low', 'medium', 'high'],
|
||||
placeholder: 'Choose...',
|
||||
unit: undefined,
|
||||
allowed_file_upload_methods: undefined,
|
||||
allowed_file_types: undefined,
|
||||
allowed_file_extensions: undefined,
|
||||
}
|
||||
|
||||
const formData = convertToInputFieldFormData(selectVar)
|
||||
expect(formData.options).toEqual(['low', 'medium', 'high'])
|
||||
expect(formData.default).toBe('medium')
|
||||
|
||||
const restored = convertFormDataToINputField(formData)
|
||||
expect(restored.options).toEqual(['low', 'medium', 'high'])
|
||||
expect(restored.default_value).toBe('medium')
|
||||
})
|
||||
|
||||
it('should handle number field with unit', async () => {
|
||||
const { convertToInputFieldFormData, convertFormDataToINputField } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/input-field/editor/utils',
|
||||
)
|
||||
|
||||
const numberVar: InputVar = {
|
||||
type: PipelineInputVarType.number,
|
||||
label: 'Max Tokens',
|
||||
variable: 'max_tokens',
|
||||
max_length: 0,
|
||||
default_value: '1024',
|
||||
required: true,
|
||||
tooltips: undefined,
|
||||
options: [],
|
||||
placeholder: undefined,
|
||||
unit: 'tokens',
|
||||
allowed_file_upload_methods: undefined,
|
||||
allowed_file_types: undefined,
|
||||
allowed_file_extensions: undefined,
|
||||
}
|
||||
|
||||
const formData = convertToInputFieldFormData(numberVar)
|
||||
expect(formData.unit).toBe('tokens')
|
||||
expect(formData.default).toBe('1024')
|
||||
|
||||
const restored = convertFormDataToINputField(formData)
|
||||
expect(restored.unit).toBe('tokens')
|
||||
expect(restored.default_value).toBe('1024')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Omit optional fields', () => {
|
||||
it('should not include tooltips when undefined', async () => {
|
||||
const { convertToInputFieldFormData } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/input-field/editor/utils',
|
||||
)
|
||||
|
||||
const inputVar: InputVar = {
|
||||
type: PipelineInputVarType.textInput,
|
||||
label: 'Test',
|
||||
variable: 'test',
|
||||
max_length: 48,
|
||||
default_value: undefined,
|
||||
required: true,
|
||||
tooltips: undefined,
|
||||
options: [],
|
||||
placeholder: undefined,
|
||||
unit: undefined,
|
||||
allowed_file_upload_methods: undefined,
|
||||
allowed_file_types: undefined,
|
||||
allowed_file_extensions: undefined,
|
||||
}
|
||||
|
||||
const formData = convertToInputFieldFormData(inputVar)
|
||||
|
||||
// Optional fields should not be present
|
||||
expect('tooltips' in formData).toBe(false)
|
||||
expect('placeholder' in formData).toBe(false)
|
||||
expect('unit' in formData).toBe(false)
|
||||
expect('default' in formData).toBe(false)
|
||||
})
|
||||
|
||||
it('should include optional fields when explicitly set to empty string', async () => {
|
||||
const { convertToInputFieldFormData } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/input-field/editor/utils',
|
||||
)
|
||||
|
||||
const inputVar: InputVar = {
|
||||
type: PipelineInputVarType.textInput,
|
||||
label: 'Test',
|
||||
variable: 'test',
|
||||
max_length: 48,
|
||||
default_value: '',
|
||||
required: true,
|
||||
tooltips: '',
|
||||
options: [],
|
||||
placeholder: '',
|
||||
unit: '',
|
||||
allowed_file_upload_methods: undefined,
|
||||
allowed_file_types: undefined,
|
||||
allowed_file_extensions: undefined,
|
||||
}
|
||||
|
||||
const formData = convertToInputFieldFormData(inputVar)
|
||||
|
||||
expect(formData.default).toBe('')
|
||||
expect(formData.tooltips).toBe('')
|
||||
expect(formData.placeholder).toBe('')
|
||||
expect(formData.unit).toBe('')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Multiple fields workflow', () => {
|
||||
it('should process multiple fields independently', async () => {
|
||||
const { convertToInputFieldFormData, convertFormDataToINputField } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/input-field/editor/utils',
|
||||
)
|
||||
|
||||
const fields: InputVar[] = [
|
||||
{
|
||||
type: PipelineInputVarType.textInput,
|
||||
label: 'Name',
|
||||
variable: 'name',
|
||||
max_length: 48,
|
||||
default_value: 'Alice',
|
||||
required: true,
|
||||
tooltips: undefined,
|
||||
options: [],
|
||||
placeholder: undefined,
|
||||
unit: undefined,
|
||||
allowed_file_upload_methods: undefined,
|
||||
allowed_file_types: undefined,
|
||||
allowed_file_extensions: undefined,
|
||||
},
|
||||
{
|
||||
type: PipelineInputVarType.number,
|
||||
label: 'Count',
|
||||
variable: 'count',
|
||||
max_length: 0,
|
||||
default_value: '10',
|
||||
required: false,
|
||||
tooltips: undefined,
|
||||
options: [],
|
||||
placeholder: undefined,
|
||||
unit: 'items',
|
||||
allowed_file_upload_methods: undefined,
|
||||
allowed_file_types: undefined,
|
||||
allowed_file_extensions: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
const formDataList = fields.map(f => convertToInputFieldFormData(f))
|
||||
const restoredFields = formDataList.map(fd => convertFormDataToINputField(fd))
|
||||
|
||||
expect(restoredFields).toHaveLength(2)
|
||||
expect(restoredFields[0].variable).toBe('name')
|
||||
expect(restoredFields[0].default_value).toBe('Alice')
|
||||
expect(restoredFields[1].variable).toBe('count')
|
||||
expect(restoredFields[1].default_value).toBe('10')
|
||||
expect(restoredFields[1].unit).toBe('items')
|
||||
})
|
||||
})
|
||||
})
|
||||
199
web/__tests__/rag-pipeline/input-field-editor-flow.test.ts
Normal file
199
web/__tests__/rag-pipeline/input-field-editor-flow.test.ts
Normal file
@ -0,0 +1,199 @@
|
||||
/**
|
||||
* Integration test: Input field editor data conversion flow
|
||||
*
|
||||
* Tests the full pipeline: InputVar -> FormData -> InputVar roundtrip
|
||||
* and schema validation for various input types.
|
||||
*/
|
||||
import type { InputVar } from '@/models/pipeline'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { PipelineInputVarType } from '@/models/pipeline'
|
||||
|
||||
// Mock the config module for VAR_ITEM_TEMPLATE_IN_PIPELINE
|
||||
vi.mock('@/config', () => ({
|
||||
VAR_ITEM_TEMPLATE_IN_PIPELINE: {
|
||||
type: 'text-input',
|
||||
label: '',
|
||||
variable: '',
|
||||
max_length: 48,
|
||||
required: false,
|
||||
options: [],
|
||||
allowed_file_upload_methods: [],
|
||||
allowed_file_types: [],
|
||||
allowed_file_extensions: [],
|
||||
},
|
||||
MAX_VAR_KEY_LENGTH: 30,
|
||||
RAG_PIPELINE_PREVIEW_CHUNK_NUM: 10,
|
||||
}))
|
||||
|
||||
// Import real functions (not mocked)
|
||||
const { convertToInputFieldFormData, convertFormDataToINputField } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/input-field/editor/utils',
|
||||
)
|
||||
|
||||
describe('Input Field Editor Data Flow', () => {
|
||||
describe('convertToInputFieldFormData', () => {
|
||||
it('should convert a text input InputVar to FormData', () => {
|
||||
const inputVar: InputVar = {
|
||||
type: 'text-input',
|
||||
label: 'Name',
|
||||
variable: 'user_name',
|
||||
max_length: 100,
|
||||
required: true,
|
||||
default_value: 'John',
|
||||
tooltips: 'Enter your name',
|
||||
placeholder: 'Type here...',
|
||||
options: [],
|
||||
} as InputVar
|
||||
|
||||
const formData = convertToInputFieldFormData(inputVar)
|
||||
|
||||
expect(formData.type).toBe('text-input')
|
||||
expect(formData.label).toBe('Name')
|
||||
expect(formData.variable).toBe('user_name')
|
||||
expect(formData.maxLength).toBe(100)
|
||||
expect(formData.required).toBe(true)
|
||||
expect(formData.default).toBe('John')
|
||||
expect(formData.tooltips).toBe('Enter your name')
|
||||
expect(formData.placeholder).toBe('Type here...')
|
||||
})
|
||||
|
||||
it('should handle file input with upload settings', () => {
|
||||
const inputVar: InputVar = {
|
||||
type: 'file',
|
||||
label: 'Document',
|
||||
variable: 'doc',
|
||||
required: false,
|
||||
allowed_file_upload_methods: ['local_file', 'remote_url'],
|
||||
allowed_file_types: ['document', 'image'],
|
||||
allowed_file_extensions: ['.pdf', '.jpg'],
|
||||
options: [],
|
||||
} as InputVar
|
||||
|
||||
const formData = convertToInputFieldFormData(inputVar)
|
||||
|
||||
expect(formData.allowedFileUploadMethods).toEqual(['local_file', 'remote_url'])
|
||||
expect(formData.allowedTypesAndExtensions).toEqual({
|
||||
allowedFileTypes: ['document', 'image'],
|
||||
allowedFileExtensions: ['.pdf', '.jpg'],
|
||||
})
|
||||
})
|
||||
|
||||
it('should use template defaults when no data provided', () => {
|
||||
const formData = convertToInputFieldFormData(undefined)
|
||||
|
||||
expect(formData.type).toBe('text-input')
|
||||
expect(formData.maxLength).toBe(48)
|
||||
expect(formData.required).toBe(false)
|
||||
})
|
||||
|
||||
it('should omit undefined/null optional fields', () => {
|
||||
const inputVar: InputVar = {
|
||||
type: 'text-input',
|
||||
label: 'Simple',
|
||||
variable: 'simple_var',
|
||||
max_length: 50,
|
||||
required: false,
|
||||
options: [],
|
||||
} as InputVar
|
||||
|
||||
const formData = convertToInputFieldFormData(inputVar)
|
||||
|
||||
expect(formData.default).toBeUndefined()
|
||||
expect(formData.tooltips).toBeUndefined()
|
||||
expect(formData.placeholder).toBeUndefined()
|
||||
expect(formData.unit).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('convertFormDataToINputField', () => {
|
||||
it('should convert FormData back to InputVar', () => {
|
||||
const formData = {
|
||||
type: PipelineInputVarType.textInput,
|
||||
label: 'Name',
|
||||
variable: 'user_name',
|
||||
maxLength: 100,
|
||||
required: true,
|
||||
default: 'John',
|
||||
tooltips: 'Enter your name',
|
||||
options: [],
|
||||
placeholder: 'Type here...',
|
||||
allowedTypesAndExtensions: {
|
||||
allowedFileTypes: undefined,
|
||||
allowedFileExtensions: undefined,
|
||||
},
|
||||
}
|
||||
|
||||
const inputVar = convertFormDataToINputField(formData)
|
||||
|
||||
expect(inputVar.type).toBe('text-input')
|
||||
expect(inputVar.label).toBe('Name')
|
||||
expect(inputVar.variable).toBe('user_name')
|
||||
expect(inputVar.max_length).toBe(100)
|
||||
expect(inputVar.required).toBe(true)
|
||||
expect(inputVar.default_value).toBe('John')
|
||||
expect(inputVar.tooltips).toBe('Enter your name')
|
||||
})
|
||||
})
|
||||
|
||||
describe('roundtrip conversion', () => {
|
||||
it('should preserve text input data through roundtrip', () => {
|
||||
const original: InputVar = {
|
||||
type: 'text-input',
|
||||
label: 'Question',
|
||||
variable: 'question',
|
||||
max_length: 200,
|
||||
required: true,
|
||||
default_value: 'What is AI?',
|
||||
tooltips: 'Enter your question',
|
||||
placeholder: 'Ask something...',
|
||||
options: [],
|
||||
} as InputVar
|
||||
|
||||
const formData = convertToInputFieldFormData(original)
|
||||
const restored = convertFormDataToINputField(formData)
|
||||
|
||||
expect(restored.type).toBe(original.type)
|
||||
expect(restored.label).toBe(original.label)
|
||||
expect(restored.variable).toBe(original.variable)
|
||||
expect(restored.max_length).toBe(original.max_length)
|
||||
expect(restored.required).toBe(original.required)
|
||||
expect(restored.default_value).toBe(original.default_value)
|
||||
expect(restored.tooltips).toBe(original.tooltips)
|
||||
expect(restored.placeholder).toBe(original.placeholder)
|
||||
})
|
||||
|
||||
it('should preserve number input data through roundtrip', () => {
|
||||
const original = {
|
||||
type: 'number',
|
||||
label: 'Temperature',
|
||||
variable: 'temp',
|
||||
required: false,
|
||||
default_value: '0.7',
|
||||
unit: '°C',
|
||||
options: [],
|
||||
} as InputVar
|
||||
|
||||
const formData = convertToInputFieldFormData(original)
|
||||
const restored = convertFormDataToINputField(formData)
|
||||
|
||||
expect(restored.type).toBe('number')
|
||||
expect(restored.unit).toBe('°C')
|
||||
expect(restored.default_value).toBe('0.7')
|
||||
})
|
||||
|
||||
it('should preserve select options through roundtrip', () => {
|
||||
const original: InputVar = {
|
||||
type: 'select',
|
||||
label: 'Mode',
|
||||
variable: 'mode',
|
||||
required: true,
|
||||
options: ['fast', 'balanced', 'quality'],
|
||||
} as InputVar
|
||||
|
||||
const formData = convertToInputFieldFormData(original)
|
||||
const restored = convertFormDataToINputField(formData)
|
||||
|
||||
expect(restored.options).toEqual(['fast', 'balanced', 'quality'])
|
||||
})
|
||||
})
|
||||
})
|
||||
277
web/__tests__/rag-pipeline/test-run-flow.test.ts
Normal file
277
web/__tests__/rag-pipeline/test-run-flow.test.ts
Normal file
@ -0,0 +1,277 @@
|
||||
/**
|
||||
* Integration test: Test run end-to-end flow
|
||||
*
|
||||
* Validates the data flow through test-run preparation hooks:
|
||||
* step navigation, datasource filtering, and data clearing.
|
||||
*/
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mutable holder so mock data can reference BlockEnum after imports
|
||||
const mockNodesHolder = vi.hoisted(() => ({ value: [] as Record<string, unknown>[] }))
|
||||
|
||||
vi.mock('reactflow', () => ({
|
||||
useNodes: () => mockNodesHolder.value,
|
||||
}))
|
||||
|
||||
mockNodesHolder.value = [
|
||||
{
|
||||
id: 'ds-1',
|
||||
data: {
|
||||
type: BlockEnum.DataSource,
|
||||
title: 'Local Files',
|
||||
datasource_type: 'upload_file',
|
||||
datasource_configurations: { datasource_label: 'Upload', upload_file_config: {} },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'ds-2',
|
||||
data: {
|
||||
type: BlockEnum.DataSource,
|
||||
title: 'Web Crawl',
|
||||
datasource_type: 'website_crawl',
|
||||
datasource_configurations: { datasource_label: 'Crawl' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'kb-1',
|
||||
data: {
|
||||
type: BlockEnum.KnowledgeBase,
|
||||
title: 'Knowledge Base',
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
// Mock the Zustand store used by the hooks
|
||||
const mockSetDocumentsData = vi.fn()
|
||||
const mockSetSearchValue = vi.fn()
|
||||
const mockSetSelectedPagesId = vi.fn()
|
||||
const mockSetOnlineDocuments = vi.fn()
|
||||
const mockSetCurrentDocument = vi.fn()
|
||||
const mockSetStep = vi.fn()
|
||||
const mockSetCrawlResult = vi.fn()
|
||||
const mockSetWebsitePages = vi.fn()
|
||||
const mockSetPreviewIndex = vi.fn()
|
||||
const mockSetCurrentWebsite = vi.fn()
|
||||
const mockSetOnlineDriveFileList = vi.fn()
|
||||
const mockSetBucket = vi.fn()
|
||||
const mockSetPrefix = vi.fn()
|
||||
const mockSetKeywords = vi.fn()
|
||||
const mockSetSelectedFileIds = vi.fn()
|
||||
|
||||
vi.mock('@/app/components/datasets/documents/create-from-pipeline/data-source/store', () => ({
|
||||
useDataSourceStore: () => ({
|
||||
getState: () => ({
|
||||
setDocumentsData: mockSetDocumentsData,
|
||||
setSearchValue: mockSetSearchValue,
|
||||
setSelectedPagesId: mockSetSelectedPagesId,
|
||||
setOnlineDocuments: mockSetOnlineDocuments,
|
||||
setCurrentDocument: mockSetCurrentDocument,
|
||||
setStep: mockSetStep,
|
||||
setCrawlResult: mockSetCrawlResult,
|
||||
setWebsitePages: mockSetWebsitePages,
|
||||
setPreviewIndex: mockSetPreviewIndex,
|
||||
setCurrentWebsite: mockSetCurrentWebsite,
|
||||
setOnlineDriveFileList: mockSetOnlineDriveFileList,
|
||||
setBucket: mockSetBucket,
|
||||
setPrefix: mockSetPrefix,
|
||||
setKeywords: mockSetKeywords,
|
||||
setSelectedFileIds: mockSetSelectedFileIds,
|
||||
}),
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/models/datasets', () => ({
|
||||
CrawlStep: {
|
||||
init: 'init',
|
||||
},
|
||||
}))
|
||||
|
||||
describe('Test Run Flow Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Step Navigation', () => {
|
||||
it('should start at step 1 and navigate forward', async () => {
|
||||
const { useTestRunSteps } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result } = renderHook(() => useTestRunSteps())
|
||||
|
||||
expect(result.current.currentStep).toBe(1)
|
||||
|
||||
act(() => {
|
||||
result.current.handleNextStep()
|
||||
})
|
||||
|
||||
expect(result.current.currentStep).toBe(2)
|
||||
})
|
||||
|
||||
it('should navigate back from step 2 to step 1', async () => {
|
||||
const { useTestRunSteps } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result } = renderHook(() => useTestRunSteps())
|
||||
|
||||
act(() => {
|
||||
result.current.handleNextStep()
|
||||
})
|
||||
expect(result.current.currentStep).toBe(2)
|
||||
|
||||
act(() => {
|
||||
result.current.handleBackStep()
|
||||
})
|
||||
expect(result.current.currentStep).toBe(1)
|
||||
})
|
||||
|
||||
it('should provide labeled steps', async () => {
|
||||
const { useTestRunSteps } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result } = renderHook(() => useTestRunSteps())
|
||||
|
||||
expect(result.current.steps).toHaveLength(2)
|
||||
expect(result.current.steps[0].value).toBe('dataSource')
|
||||
expect(result.current.steps[1].value).toBe('documentProcessing')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Datasource Options', () => {
|
||||
it('should filter nodes to only DataSource type', async () => {
|
||||
const { useDatasourceOptions } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result } = renderHook(() => useDatasourceOptions())
|
||||
|
||||
// Should only include DataSource nodes, not KnowledgeBase
|
||||
expect(result.current).toHaveLength(2)
|
||||
expect(result.current[0].value).toBe('ds-1')
|
||||
expect(result.current[1].value).toBe('ds-2')
|
||||
})
|
||||
|
||||
it('should include node data in options', async () => {
|
||||
const { useDatasourceOptions } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result } = renderHook(() => useDatasourceOptions())
|
||||
|
||||
expect(result.current[0].label).toBe('Local Files')
|
||||
expect(result.current[0].data.type).toBe(BlockEnum.DataSource)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Data Clearing Flow', () => {
|
||||
it('should clear online document data', async () => {
|
||||
const { useOnlineDocument } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result } = renderHook(() => useOnlineDocument())
|
||||
|
||||
act(() => {
|
||||
result.current.clearOnlineDocumentData()
|
||||
})
|
||||
|
||||
expect(mockSetDocumentsData).toHaveBeenCalledWith([])
|
||||
expect(mockSetSearchValue).toHaveBeenCalledWith('')
|
||||
expect(mockSetSelectedPagesId).toHaveBeenCalledWith(expect.any(Set))
|
||||
expect(mockSetOnlineDocuments).toHaveBeenCalledWith([])
|
||||
expect(mockSetCurrentDocument).toHaveBeenCalledWith(undefined)
|
||||
})
|
||||
|
||||
it('should clear website crawl data', async () => {
|
||||
const { useWebsiteCrawl } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result } = renderHook(() => useWebsiteCrawl())
|
||||
|
||||
act(() => {
|
||||
result.current.clearWebsiteCrawlData()
|
||||
})
|
||||
|
||||
expect(mockSetStep).toHaveBeenCalledWith('init')
|
||||
expect(mockSetCrawlResult).toHaveBeenCalledWith(undefined)
|
||||
expect(mockSetCurrentWebsite).toHaveBeenCalledWith(undefined)
|
||||
expect(mockSetWebsitePages).toHaveBeenCalledWith([])
|
||||
expect(mockSetPreviewIndex).toHaveBeenCalledWith(-1)
|
||||
})
|
||||
|
||||
it('should clear online drive data', async () => {
|
||||
const { useOnlineDrive } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result } = renderHook(() => useOnlineDrive())
|
||||
|
||||
act(() => {
|
||||
result.current.clearOnlineDriveData()
|
||||
})
|
||||
|
||||
expect(mockSetOnlineDriveFileList).toHaveBeenCalledWith([])
|
||||
expect(mockSetBucket).toHaveBeenCalledWith('')
|
||||
expect(mockSetPrefix).toHaveBeenCalledWith([])
|
||||
expect(mockSetKeywords).toHaveBeenCalledWith('')
|
||||
expect(mockSetSelectedFileIds).toHaveBeenCalledWith([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('Full Flow Simulation', () => {
|
||||
it('should support complete step navigation cycle', async () => {
|
||||
const { useTestRunSteps } = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result } = renderHook(() => useTestRunSteps())
|
||||
|
||||
// Start at step 1
|
||||
expect(result.current.currentStep).toBe(1)
|
||||
|
||||
// Move to step 2
|
||||
act(() => {
|
||||
result.current.handleNextStep()
|
||||
})
|
||||
expect(result.current.currentStep).toBe(2)
|
||||
|
||||
// Go back to step 1
|
||||
act(() => {
|
||||
result.current.handleBackStep()
|
||||
})
|
||||
expect(result.current.currentStep).toBe(1)
|
||||
|
||||
// Move forward again
|
||||
act(() => {
|
||||
result.current.handleNextStep()
|
||||
})
|
||||
expect(result.current.currentStep).toBe(2)
|
||||
})
|
||||
|
||||
it('should not regress when clearing all data sources in sequence', async () => {
|
||||
const {
|
||||
useOnlineDocument,
|
||||
useWebsiteCrawl,
|
||||
useOnlineDrive,
|
||||
} = await import(
|
||||
'@/app/components/rag-pipeline/components/panel/test-run/preparation/hooks',
|
||||
)
|
||||
const { result: docResult } = renderHook(() => useOnlineDocument())
|
||||
const { result: crawlResult } = renderHook(() => useWebsiteCrawl())
|
||||
const { result: driveResult } = renderHook(() => useOnlineDrive())
|
||||
|
||||
// Clear all data sources
|
||||
act(() => {
|
||||
docResult.current.clearOnlineDocumentData()
|
||||
crawlResult.current.clearWebsiteCrawlData()
|
||||
driveResult.current.clearOnlineDriveData()
|
||||
})
|
||||
|
||||
expect(mockSetDocumentsData).toHaveBeenCalledWith([])
|
||||
expect(mockSetStep).toHaveBeenCalledWith('init')
|
||||
expect(mockSetOnlineDriveFileList).toHaveBeenCalledWith([])
|
||||
})
|
||||
})
|
||||
})
|
||||
121
web/__tests__/share/text-generation-run-batch-flow.test.tsx
Normal file
121
web/__tests__/share/text-generation-run-batch-flow.test.tsx
Normal file
@ -0,0 +1,121 @@
|
||||
/**
|
||||
* Integration test: RunBatch CSV upload → Run flow
|
||||
*
|
||||
* Tests the complete user journey:
|
||||
* Upload CSV → parse → enable run → click run → results finish → run again
|
||||
*/
|
||||
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import RunBatch from '@/app/components/share/text-generation/run-batch'
|
||||
|
||||
vi.mock('@/hooks/use-breakpoints', () => ({
|
||||
default: vi.fn(() => 'pc'),
|
||||
MediaType: { pc: 'pc', pad: 'pad', mobile: 'mobile' },
|
||||
}))
|
||||
|
||||
// Capture the onParsed callback from CSVReader to simulate CSV uploads
|
||||
let capturedOnParsed: ((data: string[][]) => void) | undefined
|
||||
|
||||
vi.mock('@/app/components/share/text-generation/run-batch/csv-reader', () => ({
|
||||
default: ({ onParsed }: { onParsed: (data: string[][]) => void }) => {
|
||||
capturedOnParsed = onParsed
|
||||
return <div data-testid="csv-reader">CSV Reader</div>
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/share/text-generation/run-batch/csv-download', () => ({
|
||||
default: ({ vars }: { vars: { name: string }[] }) => (
|
||||
<div data-testid="csv-download">
|
||||
{vars.map(v => v.name).join(', ')}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
describe('RunBatch – integration flow', () => {
|
||||
const vars = [{ name: 'prompt' }, { name: 'context' }]
|
||||
|
||||
beforeEach(() => {
|
||||
capturedOnParsed = undefined
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('full lifecycle: upload CSV → run → finish → run again', async () => {
|
||||
const onSend = vi.fn()
|
||||
|
||||
const { rerender } = render(
|
||||
<RunBatch vars={vars} onSend={onSend} isAllFinished />,
|
||||
)
|
||||
|
||||
// Phase 1 – verify child components rendered
|
||||
expect(screen.getByTestId('csv-reader')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('csv-download')).toHaveTextContent('prompt, context')
|
||||
|
||||
// Run button should be disabled before CSV is parsed
|
||||
const runButton = screen.getByRole('button', { name: 'share.generation.run' })
|
||||
expect(runButton).toBeDisabled()
|
||||
|
||||
// Phase 2 – simulate CSV upload
|
||||
const csvData = [
|
||||
['prompt', 'context'],
|
||||
['Hello', 'World'],
|
||||
['Goodbye', 'Moon'],
|
||||
]
|
||||
await act(async () => {
|
||||
capturedOnParsed?.(csvData)
|
||||
})
|
||||
|
||||
// Run button should now be enabled
|
||||
await waitFor(() => {
|
||||
expect(runButton).not.toBeDisabled()
|
||||
})
|
||||
|
||||
// Phase 3 – click run
|
||||
fireEvent.click(runButton)
|
||||
expect(onSend).toHaveBeenCalledTimes(1)
|
||||
expect(onSend).toHaveBeenCalledWith(csvData)
|
||||
|
||||
// Phase 4 – simulate results still running
|
||||
rerender(<RunBatch vars={vars} onSend={onSend} isAllFinished={false} />)
|
||||
expect(runButton).toBeDisabled()
|
||||
|
||||
// Phase 5 – results finish → can run again
|
||||
rerender(<RunBatch vars={vars} onSend={onSend} isAllFinished />)
|
||||
await waitFor(() => {
|
||||
expect(runButton).not.toBeDisabled()
|
||||
})
|
||||
|
||||
onSend.mockClear()
|
||||
fireEvent.click(runButton)
|
||||
expect(onSend).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should remain disabled when CSV not uploaded even if all finished', () => {
|
||||
const onSend = vi.fn()
|
||||
render(<RunBatch vars={vars} onSend={onSend} isAllFinished />)
|
||||
|
||||
const runButton = screen.getByRole('button', { name: 'share.generation.run' })
|
||||
expect(runButton).toBeDisabled()
|
||||
|
||||
fireEvent.click(runButton)
|
||||
expect(onSend).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should show spinner icon when results are still running', async () => {
|
||||
const onSend = vi.fn()
|
||||
const { container } = render(
|
||||
<RunBatch vars={vars} onSend={onSend} isAllFinished={false} />,
|
||||
)
|
||||
|
||||
// Upload CSV first
|
||||
await act(async () => {
|
||||
capturedOnParsed?.([['data']])
|
||||
})
|
||||
|
||||
// Button disabled + spinning icon
|
||||
const runButton = screen.getByRole('button', { name: 'share.generation.run' })
|
||||
expect(runButton).toBeDisabled()
|
||||
|
||||
const icon = container.querySelector('svg')
|
||||
expect(icon).toHaveClass('animate-spin')
|
||||
})
|
||||
})
|
||||
218
web/__tests__/share/text-generation-run-once-flow.test.tsx
Normal file
218
web/__tests__/share/text-generation-run-once-flow.test.tsx
Normal file
@ -0,0 +1,218 @@
|
||||
/**
|
||||
* Integration test: RunOnce form lifecycle
|
||||
*
|
||||
* Tests the complete user journey:
|
||||
* Init defaults → edit fields → submit → running state → stop
|
||||
*/
|
||||
import type { InputValueTypes } from '@/app/components/share/text-generation/types'
|
||||
import type { PromptConfig, PromptVariable } from '@/models/debug'
|
||||
import type { SiteInfo } from '@/models/share'
|
||||
import type { VisionSettings } from '@/types/app'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { useRef, useState } from 'react'
|
||||
import RunOnce from '@/app/components/share/text-generation/run-once'
|
||||
import { Resolution, TransferMethod } from '@/types/app'
|
||||
|
||||
vi.mock('@/hooks/use-breakpoints', () => ({
|
||||
default: vi.fn(() => 'pc'),
|
||||
MediaType: { pc: 'pc', pad: 'pad', mobile: 'mobile' },
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/editor/code-editor', () => ({
|
||||
default: ({ value, onChange }: { value?: string, onChange?: (val: string) => void }) => (
|
||||
<textarea data-testid="code-editor" value={value ?? ''} onChange={e => onChange?.(e.target.value)} />
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/image-uploader/text-generation-image-uploader', () => ({
|
||||
default: () => <div data-testid="vision-uploader" />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/file-uploader', () => ({
|
||||
FileUploaderInAttachmentWrapper: () => <div data-testid="file-uploader" />,
|
||||
}))
|
||||
|
||||
// ----- helpers -----
|
||||
|
||||
const variable = (overrides: Partial<PromptVariable>): PromptVariable => ({
|
||||
key: 'k',
|
||||
name: 'Name',
|
||||
type: 'string',
|
||||
required: true,
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const visionOff: VisionSettings = {
|
||||
enabled: false,
|
||||
number_limits: 0,
|
||||
detail: Resolution.low,
|
||||
transfer_methods: [TransferMethod.local_file],
|
||||
image_file_size_limit: 5,
|
||||
}
|
||||
|
||||
const siteInfo: SiteInfo = { title: 'Test' }
|
||||
|
||||
/**
|
||||
* Stateful wrapper that mirrors what text-generation/index.tsx does:
|
||||
* owns `inputs` state and passes an `inputsRef`.
|
||||
*/
|
||||
function Harness({
|
||||
promptConfig,
|
||||
visionConfig = visionOff,
|
||||
onSendSpy,
|
||||
runControl = null,
|
||||
}: {
|
||||
promptConfig: PromptConfig
|
||||
visionConfig?: VisionSettings
|
||||
onSendSpy: () => void
|
||||
runControl?: React.ComponentProps<typeof RunOnce>['runControl']
|
||||
}) {
|
||||
const [inputs, setInputs] = useState<Record<string, InputValueTypes>>({})
|
||||
const inputsRef = useRef<Record<string, InputValueTypes>>({})
|
||||
|
||||
return (
|
||||
<RunOnce
|
||||
siteInfo={siteInfo}
|
||||
promptConfig={promptConfig}
|
||||
inputs={inputs}
|
||||
inputsRef={inputsRef}
|
||||
onInputsChange={(updated) => {
|
||||
inputsRef.current = updated
|
||||
setInputs(updated)
|
||||
}}
|
||||
onSend={onSendSpy}
|
||||
visionConfig={visionConfig}
|
||||
onVisionFilesChange={vi.fn()}
|
||||
runControl={runControl}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
// ----- tests -----
|
||||
|
||||
describe('RunOnce – integration flow', () => {
|
||||
it('full lifecycle: init → edit → submit → running → stop', async () => {
|
||||
const onSend = vi.fn()
|
||||
|
||||
const config: PromptConfig = {
|
||||
prompt_template: 'tpl',
|
||||
prompt_variables: [
|
||||
variable({ key: 'name', name: 'Name', type: 'string', default: '' }),
|
||||
variable({ key: 'age', name: 'Age', type: 'number', default: '' }),
|
||||
variable({ key: 'bio', name: 'Bio', type: 'paragraph', default: '' }),
|
||||
],
|
||||
}
|
||||
|
||||
// Phase 1 – render, wait for initialisation
|
||||
const { rerender } = render(
|
||||
<Harness promptConfig={config} onSendSpy={onSend} />,
|
||||
)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByPlaceholderText('Name')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Phase 2 – fill fields
|
||||
fireEvent.change(screen.getByPlaceholderText('Name'), { target: { value: 'Alice' } })
|
||||
fireEvent.change(screen.getByPlaceholderText('Age'), { target: { value: '30' } })
|
||||
fireEvent.change(screen.getByPlaceholderText('Bio'), { target: { value: 'Hello' } })
|
||||
|
||||
// Phase 3 – submit
|
||||
fireEvent.click(screen.getByTestId('run-button'))
|
||||
expect(onSend).toHaveBeenCalledTimes(1)
|
||||
|
||||
// Phase 4 – simulate "running" state
|
||||
const onStop = vi.fn()
|
||||
rerender(
|
||||
<Harness
|
||||
promptConfig={config}
|
||||
onSendSpy={onSend}
|
||||
runControl={{ onStop, isStopping: false }}
|
||||
/>,
|
||||
)
|
||||
|
||||
const stopBtn = screen.getByTestId('stop-button')
|
||||
expect(stopBtn).toBeInTheDocument()
|
||||
fireEvent.click(stopBtn)
|
||||
expect(onStop).toHaveBeenCalledTimes(1)
|
||||
|
||||
// Phase 5 – simulate "stopping" state
|
||||
rerender(
|
||||
<Harness
|
||||
promptConfig={config}
|
||||
onSendSpy={onSend}
|
||||
runControl={{ onStop, isStopping: true }}
|
||||
/>,
|
||||
)
|
||||
expect(screen.getByTestId('stop-button')).toBeDisabled()
|
||||
})
|
||||
|
||||
it('clear resets all field types and allows re-submit', async () => {
|
||||
const onSend = vi.fn()
|
||||
|
||||
const config: PromptConfig = {
|
||||
prompt_template: 'tpl',
|
||||
prompt_variables: [
|
||||
variable({ key: 'q', name: 'Question', type: 'string', default: 'Hi' }),
|
||||
variable({ key: 'flag', name: 'Flag', type: 'checkbox' }),
|
||||
],
|
||||
}
|
||||
|
||||
render(<Harness promptConfig={config} onSendSpy={onSend} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByPlaceholderText('Question')).toHaveValue('Hi')
|
||||
})
|
||||
|
||||
// Clear all
|
||||
fireEvent.click(screen.getByRole('button', { name: 'common.operation.clear' }))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByPlaceholderText('Question')).toHaveValue('')
|
||||
})
|
||||
|
||||
// Re-fill and submit
|
||||
fireEvent.change(screen.getByPlaceholderText('Question'), { target: { value: 'New' } })
|
||||
fireEvent.click(screen.getByTestId('run-button'))
|
||||
expect(onSend).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('mixed input types: string + select + json_object', async () => {
|
||||
const onSend = vi.fn()
|
||||
|
||||
const config: PromptConfig = {
|
||||
prompt_template: 'tpl',
|
||||
prompt_variables: [
|
||||
variable({ key: 'txt', name: 'Text', type: 'string', default: '' }),
|
||||
variable({
|
||||
key: 'sel',
|
||||
name: 'Dropdown',
|
||||
type: 'select',
|
||||
options: ['A', 'B'],
|
||||
default: 'A',
|
||||
}),
|
||||
variable({
|
||||
key: 'json',
|
||||
name: 'JSON',
|
||||
type: 'json_object' as PromptVariable['type'],
|
||||
}),
|
||||
],
|
||||
}
|
||||
|
||||
render(<Harness promptConfig={config} onSendSpy={onSend} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Text')).toBeInTheDocument()
|
||||
expect(screen.getByText('Dropdown')).toBeInTheDocument()
|
||||
expect(screen.getByText('JSON')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Edit text & json
|
||||
fireEvent.change(screen.getByPlaceholderText('Text'), { target: { value: 'hello' } })
|
||||
fireEvent.change(screen.getByTestId('code-editor'), { target: { value: '{"a":1}' } })
|
||||
|
||||
fireEvent.click(screen.getByTestId('run-button'))
|
||||
expect(onSend).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
369
web/__tests__/tools/tool-browsing-and-filtering.test.tsx
Normal file
369
web/__tests__/tools/tool-browsing-and-filtering.test.tsx
Normal file
@ -0,0 +1,369 @@
|
||||
import type { Collection } from '@/app/components/tools/types'
|
||||
/**
|
||||
* Integration Test: Tool Browsing & Filtering Flow
|
||||
*
|
||||
* Tests the integration between ProviderList, TabSliderNew, LabelFilter,
|
||||
* Input (search), and card rendering. Verifies that tab switching, keyword
|
||||
* filtering, and label filtering work together correctly.
|
||||
*/
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import { cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { CollectionType } from '@/app/components/tools/types'
|
||||
|
||||
// ---- Mocks ----
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
const map: Record<string, string> = {
|
||||
'type.builtIn': 'Built-in',
|
||||
'type.custom': 'Custom',
|
||||
'type.workflow': 'Workflow',
|
||||
'noTools': 'No tools found',
|
||||
}
|
||||
return map[key] ?? key
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('nuqs', () => ({
|
||||
useQueryState: () => ['builtin', vi.fn()],
|
||||
}))
|
||||
|
||||
vi.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: () => ({ enable_marketplace: false }),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/hooks', () => ({
|
||||
useTags: () => ({
|
||||
getTagLabel: (key: string) => key,
|
||||
tags: [],
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-plugins', () => ({
|
||||
useCheckInstalled: () => ({ data: null }),
|
||||
useInvalidateInstalledPluginList: () => vi.fn(),
|
||||
}))
|
||||
|
||||
const mockCollections: Collection[] = [
|
||||
{
|
||||
id: 'google-search',
|
||||
name: 'google_search',
|
||||
author: 'Dify',
|
||||
description: { en_US: 'Google Search Tool', zh_Hans: 'Google搜索工具' },
|
||||
icon: 'https://example.com/google.png',
|
||||
label: { en_US: 'Google Search', zh_Hans: 'Google搜索' },
|
||||
type: CollectionType.builtIn,
|
||||
team_credentials: {},
|
||||
is_team_authorization: true,
|
||||
allow_delete: false,
|
||||
labels: ['search'],
|
||||
},
|
||||
{
|
||||
id: 'weather-api',
|
||||
name: 'weather_api',
|
||||
author: 'Dify',
|
||||
description: { en_US: 'Weather API Tool', zh_Hans: '天气API工具' },
|
||||
icon: 'https://example.com/weather.png',
|
||||
label: { en_US: 'Weather API', zh_Hans: '天气API' },
|
||||
type: CollectionType.builtIn,
|
||||
team_credentials: {},
|
||||
is_team_authorization: false,
|
||||
allow_delete: false,
|
||||
labels: ['utility'],
|
||||
},
|
||||
{
|
||||
id: 'my-custom-tool',
|
||||
name: 'my_custom_tool',
|
||||
author: 'User',
|
||||
description: { en_US: 'My Custom Tool', zh_Hans: '我的自定义工具' },
|
||||
icon: 'https://example.com/custom.png',
|
||||
label: { en_US: 'My Custom Tool', zh_Hans: '我的自定义工具' },
|
||||
type: CollectionType.custom,
|
||||
team_credentials: {},
|
||||
is_team_authorization: false,
|
||||
allow_delete: true,
|
||||
labels: [],
|
||||
},
|
||||
{
|
||||
id: 'workflow-tool-1',
|
||||
name: 'workflow_tool_1',
|
||||
author: 'User',
|
||||
description: { en_US: 'Workflow Tool', zh_Hans: '工作流工具' },
|
||||
icon: 'https://example.com/workflow.png',
|
||||
label: { en_US: 'Workflow Tool', zh_Hans: '工作流工具' },
|
||||
type: CollectionType.workflow,
|
||||
team_credentials: {},
|
||||
is_team_authorization: false,
|
||||
allow_delete: true,
|
||||
labels: [],
|
||||
},
|
||||
]
|
||||
|
||||
const mockRefetch = vi.fn()
|
||||
vi.mock('@/service/use-tools', () => ({
|
||||
useAllToolProviders: () => ({
|
||||
data: mockCollections,
|
||||
refetch: mockRefetch,
|
||||
isSuccess: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/tab-slider-new', () => ({
|
||||
default: ({ value, onChange, options }: { value: string, onChange: (v: string) => void, options: Array<{ value: string, text: string }> }) => (
|
||||
<div data-testid="tab-slider">
|
||||
{options.map((opt: { value: string, text: string }) => (
|
||||
<button
|
||||
key={opt.value}
|
||||
data-testid={`tab-${opt.value}`}
|
||||
data-active={value === opt.value ? 'true' : 'false'}
|
||||
onClick={() => onChange(opt.value)}
|
||||
>
|
||||
{opt.text}
|
||||
</button>
|
||||
))}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/input', () => ({
|
||||
default: ({ value, onChange, onClear, showLeftIcon, showClearIcon, wrapperClassName }: {
|
||||
value: string
|
||||
onChange: (e: { target: { value: string } }) => void
|
||||
onClear: () => void
|
||||
showLeftIcon?: boolean
|
||||
showClearIcon?: boolean
|
||||
wrapperClassName?: string
|
||||
}) => (
|
||||
<div data-testid="search-input-wrapper" className={wrapperClassName}>
|
||||
<input
|
||||
data-testid="search-input"
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
data-left-icon={showLeftIcon ? 'true' : 'false'}
|
||||
data-clear-icon={showClearIcon ? 'true' : 'false'}
|
||||
/>
|
||||
{showClearIcon && value && (
|
||||
<button data-testid="clear-search" onClick={onClear}>Clear</button>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card', () => ({
|
||||
default: ({ payload, className }: { payload: { brief: Record<string, string> | string, name: string }, className?: string }) => {
|
||||
const briefText = typeof payload.brief === 'object' ? payload.brief?.en_US || '' : payload.brief
|
||||
return (
|
||||
<div data-testid={`card-${payload.name}`} className={className}>
|
||||
<span>{payload.name}</span>
|
||||
<span>{briefText}</span>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/card-more-info', () => ({
|
||||
default: ({ tags }: { tags: string[] }) => (
|
||||
<div data-testid="card-more-info">{tags.join(', ')}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/labels/filter', () => ({
|
||||
default: ({ value: _value, onChange }: { value: string[], onChange: (v: string[]) => void }) => (
|
||||
<div data-testid="label-filter">
|
||||
<button data-testid="filter-search" onClick={() => onChange(['search'])}>Filter: search</button>
|
||||
<button data-testid="filter-utility" onClick={() => onChange(['utility'])}>Filter: utility</button>
|
||||
<button data-testid="filter-clear" onClick={() => onChange([])}>Clear filter</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/provider/custom-create-card', () => ({
|
||||
default: () => <div data-testid="custom-create-card">Create Custom Tool</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/provider/detail', () => ({
|
||||
default: ({ collection, onHide }: { collection: Collection, onHide: () => void }) => (
|
||||
<div data-testid="provider-detail">
|
||||
<span data-testid="detail-name">{collection.name}</span>
|
||||
<button data-testid="detail-close" onClick={onHide}>Close</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/provider/empty', () => ({
|
||||
default: () => <div data-testid="workflow-empty">No workflow tools</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/plugin-detail-panel', () => ({
|
||||
default: ({ detail, onHide }: { detail: unknown, onHide: () => void }) => (
|
||||
detail ? <div data-testid="plugin-detail-panel"><button onClick={onHide}>Close</button></div> : null
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/marketplace/empty', () => ({
|
||||
default: ({ text }: { text: string }) => <div data-testid="empty-state">{text}</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/marketplace', () => ({
|
||||
default: () => null,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/mcp', () => ({
|
||||
default: () => <div data-testid="mcp-list">MCP List</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/classnames', () => ({
|
||||
cn: (...args: unknown[]) => args.filter(Boolean).join(' '),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/block-selector/types', () => ({
|
||||
ToolTypeEnum: { BuiltIn: 'builtin', Custom: 'api', Workflow: 'workflow', MCP: 'mcp' },
|
||||
}))
|
||||
|
||||
const { default: ProviderList } = await import('@/app/components/tools/provider-list')
|
||||
|
||||
const createWrapper = () => {
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: { queries: { retry: false } },
|
||||
})
|
||||
return ({ children }: { children: React.ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
)
|
||||
}
|
||||
|
||||
describe('Tool Browsing & Filtering Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
})
|
||||
|
||||
it('renders tab options and built-in tools by default', () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
expect(screen.getByTestId('tab-slider')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tab-builtin')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tab-api')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tab-workflow')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tab-mcp')).toBeInTheDocument()
|
||||
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('card-weather_api')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-my_custom_tool')).not.toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-workflow_tool_1')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('filters tools by keyword search', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
const searchInput = screen.getByTestId('search-input')
|
||||
fireEvent.change(searchInput, { target: { value: 'Google' } })
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-weather_api')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('clears search keyword and shows all tools again', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
const searchInput = screen.getByTestId('search-input')
|
||||
fireEvent.change(searchInput, { target: { value: 'Google' } })
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('card-weather_api')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.change(searchInput, { target: { value: '' } })
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('card-weather_api')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('filters tools by label tags', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
fireEvent.click(screen.getByTestId('filter-search'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-weather_api')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('clears label filter and shows all tools', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
fireEvent.click(screen.getByTestId('filter-utility'))
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('card-google_search')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('card-weather_api')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('filter-clear'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('card-weather_api')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('combines keyword search and label filter', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
fireEvent.click(screen.getByTestId('filter-search'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('card-google_search')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
const searchInput = screen.getByTestId('search-input')
|
||||
fireEvent.change(searchInput, { target: { value: 'Weather' } })
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('card-google_search')).not.toBeInTheDocument()
|
||||
expect(screen.queryByTestId('card-weather_api')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('opens provider detail when clicking a non-plugin collection card', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
const card = screen.getByTestId('card-google_search')
|
||||
fireEvent.click(card.parentElement!)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('provider-detail')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('detail-name')).toHaveTextContent('google_search')
|
||||
})
|
||||
})
|
||||
|
||||
it('closes provider detail and deselects current provider', async () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
const card = screen.getByTestId('card-google_search')
|
||||
fireEvent.click(card.parentElement!)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('provider-detail')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('detail-close'))
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByTestId('provider-detail')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows label filter for non-MCP tabs', () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
expect(screen.getByTestId('label-filter')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows search input on all tabs', () => {
|
||||
render(<ProviderList />, { wrapper: createWrapper() })
|
||||
|
||||
expect(screen.getByTestId('search-input')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
239
web/__tests__/tools/tool-data-processing.test.ts
Normal file
239
web/__tests__/tools/tool-data-processing.test.ts
Normal file
@ -0,0 +1,239 @@
|
||||
/**
|
||||
* Integration Test: Tool Data Processing Pipeline
|
||||
*
|
||||
* Tests the integration between tool utility functions and type conversions.
|
||||
* Verifies that data flows correctly through the processing pipeline:
|
||||
* raw API data → form schemas → form values → configured values.
|
||||
*/
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { addFileInfos, sortAgentSorts } from '@/app/components/tools/utils/index'
|
||||
import {
|
||||
addDefaultValue,
|
||||
generateFormValue,
|
||||
getConfiguredValue,
|
||||
getPlainValue,
|
||||
getStructureValue,
|
||||
toolCredentialToFormSchemas,
|
||||
toolParametersToFormSchemas,
|
||||
toType,
|
||||
triggerEventParametersToFormSchemas,
|
||||
} from '@/app/components/tools/utils/to-form-schema'
|
||||
|
||||
describe('Tool Data Processing Pipeline Integration', () => {
|
||||
describe('End-to-end: API schema → form schema → form value', () => {
|
||||
it('processes tool parameters through the full pipeline', () => {
|
||||
const rawParameters = [
|
||||
{
|
||||
name: 'query',
|
||||
label: { en_US: 'Search Query', zh_Hans: '搜索查询' },
|
||||
type: 'string',
|
||||
required: true,
|
||||
default: 'hello',
|
||||
form: 'llm',
|
||||
human_description: { en_US: 'Enter your search query', zh_Hans: '输入搜索查询' },
|
||||
llm_description: 'The search query string',
|
||||
options: [],
|
||||
},
|
||||
{
|
||||
name: 'limit',
|
||||
label: { en_US: 'Result Limit', zh_Hans: '结果限制' },
|
||||
type: 'number',
|
||||
required: false,
|
||||
default: '10',
|
||||
form: 'form',
|
||||
human_description: { en_US: 'Maximum results', zh_Hans: '最大结果数' },
|
||||
llm_description: 'Limit for results',
|
||||
options: [],
|
||||
},
|
||||
]
|
||||
|
||||
const formSchemas = toolParametersToFormSchemas(rawParameters as unknown as Parameters<typeof toolParametersToFormSchemas>[0])
|
||||
expect(formSchemas).toHaveLength(2)
|
||||
expect(formSchemas[0].variable).toBe('query')
|
||||
expect(formSchemas[0].required).toBe(true)
|
||||
expect(formSchemas[0].type).toBe('text-input')
|
||||
expect(formSchemas[1].variable).toBe('limit')
|
||||
expect(formSchemas[1].type).toBe('number-input')
|
||||
|
||||
const withDefaults = addDefaultValue({}, formSchemas)
|
||||
expect(withDefaults.query).toBe('hello')
|
||||
expect(withDefaults.limit).toBe('10')
|
||||
|
||||
const formValues = generateFormValue({}, formSchemas, false)
|
||||
expect(formValues).toBeDefined()
|
||||
expect(formValues.query).toBeDefined()
|
||||
expect(formValues.limit).toBeDefined()
|
||||
})
|
||||
|
||||
it('processes tool credentials through the pipeline', () => {
|
||||
const rawCredentials = [
|
||||
{
|
||||
name: 'api_key',
|
||||
label: { en_US: 'API Key', zh_Hans: 'API 密钥' },
|
||||
type: 'secret-input',
|
||||
required: true,
|
||||
default: '',
|
||||
placeholder: { en_US: 'Enter API key', zh_Hans: '输入 API 密钥' },
|
||||
help: { en_US: 'Your API key', zh_Hans: '你的 API 密钥' },
|
||||
url: 'https://example.com/get-key',
|
||||
options: [],
|
||||
},
|
||||
]
|
||||
|
||||
const credentialSchemas = toolCredentialToFormSchemas(rawCredentials as Parameters<typeof toolCredentialToFormSchemas>[0])
|
||||
expect(credentialSchemas).toHaveLength(1)
|
||||
expect(credentialSchemas[0].variable).toBe('api_key')
|
||||
expect(credentialSchemas[0].required).toBe(true)
|
||||
expect(credentialSchemas[0].type).toBe('secret-input')
|
||||
})
|
||||
|
||||
it('processes trigger event parameters through the pipeline', () => {
|
||||
const rawParams = [
|
||||
{
|
||||
name: 'event_type',
|
||||
label: { en_US: 'Event Type', zh_Hans: '事件类型' },
|
||||
type: 'select',
|
||||
required: true,
|
||||
default: 'push',
|
||||
form: 'form',
|
||||
description: { en_US: 'Type of event', zh_Hans: '事件类型' },
|
||||
options: [
|
||||
{ value: 'push', label: { en_US: 'Push', zh_Hans: '推送' } },
|
||||
{ value: 'pull', label: { en_US: 'Pull', zh_Hans: '拉取' } },
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
const schemas = triggerEventParametersToFormSchemas(rawParams as unknown as Parameters<typeof triggerEventParametersToFormSchemas>[0])
|
||||
expect(schemas).toHaveLength(1)
|
||||
expect(schemas[0].name).toBe('event_type')
|
||||
expect(schemas[0].type).toBe('select')
|
||||
expect(schemas[0].options).toHaveLength(2)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Type conversion integration', () => {
|
||||
it('converts all supported types correctly', () => {
|
||||
const typeConversions = [
|
||||
{ input: 'string', expected: 'text-input' },
|
||||
{ input: 'number', expected: 'number-input' },
|
||||
{ input: 'boolean', expected: 'checkbox' },
|
||||
{ input: 'select', expected: 'select' },
|
||||
{ input: 'secret-input', expected: 'secret-input' },
|
||||
{ input: 'file', expected: 'file' },
|
||||
{ input: 'files', expected: 'files' },
|
||||
]
|
||||
|
||||
typeConversions.forEach(({ input, expected }) => {
|
||||
expect(toType(input)).toBe(expected)
|
||||
})
|
||||
})
|
||||
|
||||
it('returns the original type for unrecognized types', () => {
|
||||
expect(toType('unknown-type')).toBe('unknown-type')
|
||||
expect(toType('app-selector')).toBe('app-selector')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Value extraction integration', () => {
|
||||
it('wraps values with getStructureValue and extracts inner value with getPlainValue', () => {
|
||||
const plainInput = { query: 'test', limit: 10 }
|
||||
const structured = getStructureValue(plainInput)
|
||||
|
||||
expect(structured.query).toEqual({ value: 'test' })
|
||||
expect(structured.limit).toEqual({ value: 10 })
|
||||
|
||||
const objectStructured = {
|
||||
query: { value: { type: 'constant', content: 'test search' } },
|
||||
limit: { value: { type: 'constant', content: 10 } },
|
||||
}
|
||||
const extracted = getPlainValue(objectStructured)
|
||||
expect(extracted.query).toEqual({ type: 'constant', content: 'test search' })
|
||||
expect(extracted.limit).toEqual({ type: 'constant', content: 10 })
|
||||
})
|
||||
|
||||
it('handles getConfiguredValue for workflow tool configurations', () => {
|
||||
const formSchemas = [
|
||||
{ variable: 'query', type: 'text-input', default: 'default-query' },
|
||||
{ variable: 'format', type: 'select', default: 'json' },
|
||||
]
|
||||
|
||||
const configured = getConfiguredValue({}, formSchemas)
|
||||
expect(configured).toBeDefined()
|
||||
expect(configured.query).toBeDefined()
|
||||
expect(configured.format).toBeDefined()
|
||||
})
|
||||
|
||||
it('preserves existing values in getConfiguredValue', () => {
|
||||
const formSchemas = [
|
||||
{ variable: 'query', type: 'text-input', default: 'default-query' },
|
||||
]
|
||||
|
||||
const configured = getConfiguredValue({ query: 'my-existing-query' }, formSchemas)
|
||||
expect(configured.query).toBe('my-existing-query')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Agent utilities integration', () => {
|
||||
it('sorts agent thoughts and enriches with file infos end-to-end', () => {
|
||||
const thoughts = [
|
||||
{ id: 't3', position: 3, tool: 'search', files: ['f1'] },
|
||||
{ id: 't1', position: 1, tool: 'analyze', files: [] },
|
||||
{ id: 't2', position: 2, tool: 'summarize', files: ['f2'] },
|
||||
] as Parameters<typeof sortAgentSorts>[0]
|
||||
|
||||
const messageFiles = [
|
||||
{ id: 'f1', name: 'result.txt', type: 'document' },
|
||||
{ id: 'f2', name: 'summary.pdf', type: 'document' },
|
||||
] as Parameters<typeof addFileInfos>[1]
|
||||
|
||||
const sorted = sortAgentSorts(thoughts)
|
||||
expect(sorted[0].id).toBe('t1')
|
||||
expect(sorted[1].id).toBe('t2')
|
||||
expect(sorted[2].id).toBe('t3')
|
||||
|
||||
const enriched = addFileInfos(sorted, messageFiles)
|
||||
expect(enriched[0].message_files).toBeUndefined()
|
||||
expect(enriched[1].message_files).toHaveLength(1)
|
||||
expect(enriched[1].message_files![0].id).toBe('f2')
|
||||
expect(enriched[2].message_files).toHaveLength(1)
|
||||
expect(enriched[2].message_files![0].id).toBe('f1')
|
||||
})
|
||||
|
||||
it('handles null inputs gracefully in the pipeline', () => {
|
||||
const sortedNull = sortAgentSorts(null as never)
|
||||
expect(sortedNull).toBeNull()
|
||||
|
||||
const enrichedNull = addFileInfos(null as never, [])
|
||||
expect(enrichedNull).toBeNull()
|
||||
|
||||
// addFileInfos with empty list and null files returns the mapped (empty) list
|
||||
const enrichedEmptyList = addFileInfos([], null as never)
|
||||
expect(enrichedEmptyList).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('Default value application', () => {
|
||||
it('applies defaults only to empty fields, preserving user values', () => {
|
||||
const userValues = { api_key: 'user-provided-key' }
|
||||
const schemas = [
|
||||
{ variable: 'api_key', type: 'text-input', default: 'default-key', name: 'api_key' },
|
||||
{ variable: 'secret', type: 'secret-input', default: 'default-secret', name: 'secret' },
|
||||
]
|
||||
|
||||
const result = addDefaultValue(userValues, schemas)
|
||||
expect(result.api_key).toBe('user-provided-key')
|
||||
expect(result.secret).toBe('default-secret')
|
||||
})
|
||||
|
||||
it('handles boolean type conversion in defaults', () => {
|
||||
const schemas = [
|
||||
{ variable: 'enabled', type: 'boolean', default: 'true', name: 'enabled' },
|
||||
]
|
||||
|
||||
const result = addDefaultValue({ enabled: 'true' }, schemas)
|
||||
expect(result.enabled).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
548
web/__tests__/tools/tool-provider-detail-flow.test.tsx
Normal file
548
web/__tests__/tools/tool-provider-detail-flow.test.tsx
Normal file
@ -0,0 +1,548 @@
|
||||
import type { Collection } from '@/app/components/tools/types'
|
||||
/**
|
||||
* Integration Test: Tool Provider Detail Flow
|
||||
*
|
||||
* Tests the integration between ProviderDetail, ConfigCredential,
|
||||
* EditCustomToolModal, WorkflowToolModal, and service APIs.
|
||||
* Verifies that different provider types render correctly and
|
||||
* handle auth/edit/delete flows.
|
||||
*/
|
||||
import { cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { CollectionType } from '@/app/components/tools/types'
|
||||
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string, opts?: Record<string, unknown>) => {
|
||||
const map: Record<string, string> = {
|
||||
'auth.authorized': 'Authorized',
|
||||
'auth.unauthorized': 'Set up credentials',
|
||||
'auth.setup': 'NEEDS SETUP',
|
||||
'createTool.editAction': 'Edit',
|
||||
'createTool.deleteToolConfirmTitle': 'Delete Tool',
|
||||
'createTool.deleteToolConfirmContent': 'Are you sure?',
|
||||
'createTool.toolInput.title': 'Tool Input',
|
||||
'createTool.toolInput.required': 'Required',
|
||||
'openInStudio': 'Open in Studio',
|
||||
'api.actionSuccess': 'Action succeeded',
|
||||
}
|
||||
if (key === 'detailPanel.actionNum')
|
||||
return `${opts?.num ?? 0} actions`
|
||||
if (key === 'includeToolNum')
|
||||
return `${opts?.num ?? 0} actions`
|
||||
return map[key] ?? key
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useLocale: () => 'en',
|
||||
}))
|
||||
|
||||
vi.mock('@/i18n-config/language', () => ({
|
||||
getLanguage: () => 'en_US',
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: () => ({
|
||||
isCurrentWorkspaceManager: true,
|
||||
}),
|
||||
}))
|
||||
|
||||
const mockSetShowModelModal = vi.fn()
|
||||
vi.mock('@/context/modal-context', () => ({
|
||||
useModalContext: () => ({
|
||||
setShowModelModal: mockSetShowModelModal,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/provider-context', () => ({
|
||||
useProviderContext: () => ({
|
||||
modelProviders: [
|
||||
{ provider: 'model-provider-1', name: 'Model Provider 1' },
|
||||
],
|
||||
}),
|
||||
}))
|
||||
|
||||
const mockFetchBuiltInToolList = vi.fn().mockResolvedValue([
|
||||
{ name: 'tool-1', description: { en_US: 'Tool 1' }, parameters: [] },
|
||||
{ name: 'tool-2', description: { en_US: 'Tool 2' }, parameters: [] },
|
||||
])
|
||||
const mockFetchModelToolList = vi.fn().mockResolvedValue([])
|
||||
const mockFetchCustomToolList = vi.fn().mockResolvedValue([])
|
||||
const mockFetchCustomCollection = vi.fn().mockResolvedValue({
|
||||
credentials: { auth_type: 'none' },
|
||||
schema: '',
|
||||
schema_type: 'openapi',
|
||||
})
|
||||
const mockFetchWorkflowToolDetail = vi.fn().mockResolvedValue({
|
||||
workflow_app_id: 'app-123',
|
||||
tool: {
|
||||
parameters: [
|
||||
{ name: 'query', llm_description: 'Search query', form: 'text', required: true, type: 'string' },
|
||||
],
|
||||
labels: ['search'],
|
||||
},
|
||||
})
|
||||
const mockUpdateBuiltInToolCredential = vi.fn().mockResolvedValue({})
|
||||
const mockRemoveBuiltInToolCredential = vi.fn().mockResolvedValue({})
|
||||
const mockUpdateCustomCollection = vi.fn().mockResolvedValue({})
|
||||
const mockRemoveCustomCollection = vi.fn().mockResolvedValue({})
|
||||
const mockDeleteWorkflowTool = vi.fn().mockResolvedValue({})
|
||||
const mockSaveWorkflowToolProvider = vi.fn().mockResolvedValue({})
|
||||
|
||||
vi.mock('@/service/tools', () => ({
|
||||
fetchBuiltInToolList: (...args: unknown[]) => mockFetchBuiltInToolList(...args),
|
||||
fetchModelToolList: (...args: unknown[]) => mockFetchModelToolList(...args),
|
||||
fetchCustomToolList: (...args: unknown[]) => mockFetchCustomToolList(...args),
|
||||
fetchCustomCollection: (...args: unknown[]) => mockFetchCustomCollection(...args),
|
||||
fetchWorkflowToolDetail: (...args: unknown[]) => mockFetchWorkflowToolDetail(...args),
|
||||
updateBuiltInToolCredential: (...args: unknown[]) => mockUpdateBuiltInToolCredential(...args),
|
||||
removeBuiltInToolCredential: (...args: unknown[]) => mockRemoveBuiltInToolCredential(...args),
|
||||
updateCustomCollection: (...args: unknown[]) => mockUpdateCustomCollection(...args),
|
||||
removeCustomCollection: (...args: unknown[]) => mockRemoveCustomCollection(...args),
|
||||
deleteWorkflowTool: (...args: unknown[]) => mockDeleteWorkflowTool(...args),
|
||||
saveWorkflowToolProvider: (...args: unknown[]) => mockSaveWorkflowToolProvider(...args),
|
||||
fetchBuiltInToolCredential: vi.fn().mockResolvedValue({}),
|
||||
fetchBuiltInToolCredentialSchema: vi.fn().mockResolvedValue([]),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-tools', () => ({
|
||||
useInvalidateAllWorkflowTools: () => vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/classnames', () => ({
|
||||
cn: (...args: unknown[]) => args.filter(Boolean).join(' '),
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/var', () => ({
|
||||
basePath: '',
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/drawer', () => ({
|
||||
default: ({ isOpen, children, onClose }: { isOpen: boolean, children: React.ReactNode, onClose: () => void }) => (
|
||||
isOpen
|
||||
? (
|
||||
<div data-testid="drawer">
|
||||
{children}
|
||||
<button data-testid="drawer-close" onClick={onClose}>Close Drawer</button>
|
||||
</div>
|
||||
)
|
||||
: null
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/confirm', () => ({
|
||||
default: ({ title, isShow, onConfirm, onCancel }: {
|
||||
title: string
|
||||
content: string
|
||||
isShow: boolean
|
||||
onConfirm: () => void
|
||||
onCancel: () => void
|
||||
}) => (
|
||||
isShow
|
||||
? (
|
||||
<div data-testid="confirm-dialog">
|
||||
<span>{title}</span>
|
||||
<button data-testid="confirm-ok" onClick={onConfirm}>Confirm</button>
|
||||
<button data-testid="confirm-cancel" onClick={onCancel}>Cancel</button>
|
||||
</div>
|
||||
)
|
||||
: null
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
default: { notify: vi.fn() },
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/icons/src/vender/line/general', () => ({
|
||||
LinkExternal02: () => <span data-testid="link-icon" />,
|
||||
Settings01: () => <span data-testid="settings-icon" />,
|
||||
}))
|
||||
|
||||
vi.mock('@remixicon/react', () => ({
|
||||
RiCloseLine: () => <span data-testid="close-icon" />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/header/account-setting/model-provider-page/declarations', () => ({
|
||||
ConfigurationMethodEnum: { predefinedModel: 'predefined-model' },
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/header/indicator', () => ({
|
||||
default: ({ color }: { color: string }) => <span data-testid={`indicator-${color}`} />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/card-icon', () => ({
|
||||
default: ({ src }: { src: string }) => <div data-testid="card-icon" data-src={typeof src === 'string' ? src : 'emoji'} />,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/description', () => ({
|
||||
default: ({ text }: { text: string }) => <div data-testid="description">{text}</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/org-info', () => ({
|
||||
default: ({ orgName, packageName }: { orgName: string, packageName: string }) => (
|
||||
<div data-testid="org-info">
|
||||
{orgName}
|
||||
{' '}
|
||||
/
|
||||
{' '}
|
||||
{packageName}
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/card/base/title', () => ({
|
||||
default: ({ title }: { title: string }) => <div data-testid="title">{title}</div>,
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/edit-custom-collection-modal', () => ({
|
||||
default: ({ onHide, onEdit, onRemove }: { onHide: () => void, onEdit: (data: unknown) => void, onRemove: () => void, payload: unknown }) => (
|
||||
<div data-testid="edit-custom-modal">
|
||||
<button data-testid="custom-modal-hide" onClick={onHide}>Hide</button>
|
||||
<button data-testid="custom-modal-save" onClick={() => onEdit({ name: 'updated', labels: [] })}>Save</button>
|
||||
<button data-testid="custom-modal-remove" onClick={onRemove}>Remove</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/setting/build-in/config-credentials', () => ({
|
||||
default: ({ onCancel, onSaved, onRemove }: { collection: Collection, onCancel: () => void, onSaved: (v: Record<string, unknown>) => void, onRemove: () => void }) => (
|
||||
<div data-testid="config-credential">
|
||||
<button data-testid="cred-cancel" onClick={onCancel}>Cancel</button>
|
||||
<button data-testid="cred-save" onClick={() => onSaved({ api_key: 'test-key' })}>Save</button>
|
||||
<button data-testid="cred-remove" onClick={onRemove}>Remove</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/workflow-tool', () => ({
|
||||
default: ({ onHide, onSave, onRemove }: { payload: unknown, onHide: () => void, onSave: (d: unknown) => void, onRemove: () => void }) => (
|
||||
<div data-testid="workflow-tool-modal">
|
||||
<button data-testid="wf-modal-hide" onClick={onHide}>Hide</button>
|
||||
<button data-testid="wf-modal-save" onClick={() => onSave({ name: 'updated-wf' })}>Save</button>
|
||||
<button data-testid="wf-modal-remove" onClick={onRemove}>Remove</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/tools/provider/tool-item', () => ({
|
||||
default: ({ tool }: { tool: { name: string } }) => (
|
||||
<div data-testid={`tool-item-${tool.name}`}>{tool.name}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
const { default: ProviderDetail } = await import('@/app/components/tools/provider/detail')
|
||||
|
||||
const makeCollection = (overrides: Partial<Collection> = {}): Collection => ({
|
||||
id: 'test-collection',
|
||||
name: 'test_collection',
|
||||
author: 'Dify',
|
||||
description: { en_US: 'Test collection description', zh_Hans: '测试集合描述' },
|
||||
icon: 'https://example.com/icon.png',
|
||||
label: { en_US: 'Test Collection', zh_Hans: '测试集合' },
|
||||
type: CollectionType.builtIn,
|
||||
team_credentials: {},
|
||||
is_team_authorization: false,
|
||||
allow_delete: false,
|
||||
labels: [],
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const mockOnHide = vi.fn()
|
||||
const mockOnRefreshData = vi.fn()
|
||||
|
||||
describe('Tool Provider Detail Flow Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
cleanup()
|
||||
})
|
||||
|
||||
describe('Built-in Provider', () => {
|
||||
it('renders provider detail with title, author, and description', async () => {
|
||||
const collection = makeCollection()
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('title')).toHaveTextContent('Test Collection')
|
||||
expect(screen.getByTestId('org-info')).toHaveTextContent('Dify')
|
||||
expect(screen.getByTestId('description')).toHaveTextContent('Test collection description')
|
||||
})
|
||||
})
|
||||
|
||||
it('loads tool list from API on mount', async () => {
|
||||
const collection = makeCollection()
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchBuiltInToolList).toHaveBeenCalledWith('test_collection')
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('tool-item-tool-1')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('tool-item-tool-2')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows "Set up credentials" button when not authorized and needs auth', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Set up credentials')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows "Authorized" button when authorized', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Authorized')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('indicator-green')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('opens ConfigCredential when clicking auth button (built-in type)', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Set up credentials')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Set up credentials'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('config-credential')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('saves credential and refreshes data', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Set up credentials')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Set up credentials'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('config-credential')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('cred-save'))
|
||||
await waitFor(() => {
|
||||
expect(mockUpdateBuiltInToolCredential).toHaveBeenCalledWith('test_collection', { api_key: 'test-key' })
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('removes credential and refreshes data', async () => {
|
||||
const collection = makeCollection({
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
fireEvent.click(screen.getByText('Set up credentials'))
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('config-credential')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('cred-remove'))
|
||||
await waitFor(() => {
|
||||
expect(mockRemoveBuiltInToolCredential).toHaveBeenCalledWith('test_collection')
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Model Provider', () => {
|
||||
it('opens model modal when clicking auth button for model type', async () => {
|
||||
const collection = makeCollection({
|
||||
id: 'model-provider-1',
|
||||
type: CollectionType.model,
|
||||
allow_delete: true,
|
||||
is_team_authorization: false,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Set up credentials')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Set up credentials'))
|
||||
await waitFor(() => {
|
||||
expect(mockSetShowModelModal).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
payload: expect.objectContaining({
|
||||
currentProvider: expect.objectContaining({ provider: 'model-provider-1' }),
|
||||
}),
|
||||
}),
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Custom Provider', () => {
|
||||
it('fetches custom collection details and shows edit button', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.custom,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchCustomCollection).toHaveBeenCalledWith('test_collection')
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('opens edit modal and saves changes', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.custom,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Edit'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('edit-custom-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('custom-modal-save'))
|
||||
await waitFor(() => {
|
||||
expect(mockUpdateCustomCollection).toHaveBeenCalled()
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows delete confirmation and removes collection', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.custom,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Edit'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('edit-custom-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('custom-modal-remove'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('confirm-dialog')).toBeInTheDocument()
|
||||
expect(screen.getByText('Delete Tool')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('confirm-ok'))
|
||||
await waitFor(() => {
|
||||
expect(mockRemoveCustomCollection).toHaveBeenCalledWith('test_collection')
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Workflow Provider', () => {
|
||||
it('fetches workflow tool detail and shows "Open in Studio" and "Edit" buttons', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.workflow,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockFetchWorkflowToolDetail).toHaveBeenCalledWith('test-collection')
|
||||
})
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Open in Studio')).toBeInTheDocument()
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('shows workflow tool parameters', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.workflow,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('query')).toBeInTheDocument()
|
||||
expect(screen.getByText('string')).toBeInTheDocument()
|
||||
expect(screen.getByText('Search query')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
it('deletes workflow tool through confirmation dialog', async () => {
|
||||
const collection = makeCollection({
|
||||
type: CollectionType.workflow,
|
||||
allow_delete: true,
|
||||
})
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Edit')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByText('Edit'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('workflow-tool-modal')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('wf-modal-remove'))
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('confirm-dialog')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('confirm-ok'))
|
||||
await waitFor(() => {
|
||||
expect(mockDeleteWorkflowTool).toHaveBeenCalledWith('test-collection')
|
||||
expect(mockOnRefreshData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Drawer Interaction', () => {
|
||||
it('calls onHide when closing the drawer', async () => {
|
||||
const collection = makeCollection()
|
||||
render(<ProviderDetail collection={collection} onHide={mockOnHide} onRefreshData={mockOnRefreshData} />)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByTestId('drawer')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
fireEvent.click(screen.getByTestId('drawer-close'))
|
||||
expect(mockOnHide).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1,261 +0,0 @@
|
||||
/**
|
||||
* MAX_PARALLEL_LIMIT Configuration Bug Test
|
||||
*
|
||||
* This test reproduces and verifies the fix for issue #23083:
|
||||
* MAX_PARALLEL_LIMIT environment variable does not take effect in iteration panel
|
||||
*/
|
||||
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
|
||||
// Mock environment variables before importing constants
|
||||
const originalEnv = process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
// Test with different environment values
|
||||
function setupEnvironment(value?: string) {
|
||||
if (value)
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = value
|
||||
else
|
||||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
// Clear module cache to force re-evaluation
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
function restoreEnvironment() {
|
||||
if (originalEnv)
|
||||
process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT = originalEnv
|
||||
else
|
||||
delete process.env.NEXT_PUBLIC_MAX_PARALLEL_LIMIT
|
||||
|
||||
vi.resetModules()
|
||||
}
|
||||
|
||||
// Mock i18next with proper implementation
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
if (key.includes('MaxParallelismTitle'))
|
||||
return 'Max Parallelism'
|
||||
if (key.includes('MaxParallelismDesc'))
|
||||
return 'Maximum number of parallel executions'
|
||||
if (key.includes('parallelMode'))
|
||||
return 'Parallel Mode'
|
||||
if (key.includes('parallelPanelDesc'))
|
||||
return 'Enable parallel execution'
|
||||
if (key.includes('errorResponseMethod'))
|
||||
return 'Error Response Method'
|
||||
return key
|
||||
},
|
||||
}),
|
||||
initReactI18next: {
|
||||
type: '3rdParty',
|
||||
init: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock i18next module completely to prevent initialization issues
|
||||
vi.mock('i18next', () => ({
|
||||
use: vi.fn().mockReturnThis(),
|
||||
init: vi.fn().mockReturnThis(),
|
||||
t: vi.fn(key => key),
|
||||
isInitialized: true,
|
||||
}))
|
||||
|
||||
// Mock the useConfig hook
|
||||
vi.mock('@/app/components/workflow/nodes/iteration/use-config', () => ({
|
||||
default: () => ({
|
||||
inputs: {
|
||||
is_parallel: true,
|
||||
parallel_nums: 5,
|
||||
error_handle_mode: 'terminated',
|
||||
},
|
||||
changeParallel: vi.fn(),
|
||||
changeParallelNums: vi.fn(),
|
||||
changeErrorHandleMode: vi.fn(),
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock other components
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/variable/var-reference-picker', () => ({
|
||||
default: function MockVarReferencePicker() {
|
||||
return <div data-testid="var-reference-picker">VarReferencePicker</div>
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/split', () => ({
|
||||
default: function MockSplit() {
|
||||
return <div data-testid="split">Split</div>
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/workflow/nodes/_base/components/field', () => ({
|
||||
default: function MockField({ title, children }: { title: string, children: React.ReactNode }) {
|
||||
return (
|
||||
<div data-testid="field">
|
||||
<label>{title}</label>
|
||||
{children}
|
||||
</div>
|
||||
)
|
||||
},
|
||||
}))
|
||||
|
||||
const getParallelControls = () => ({
|
||||
numberInput: screen.getByRole('spinbutton'),
|
||||
slider: screen.getByRole('slider'),
|
||||
})
|
||||
|
||||
describe('MAX_PARALLEL_LIMIT Configuration Bug', () => {
|
||||
const mockNodeData = {
|
||||
id: 'test-iteration-node',
|
||||
type: 'iteration' as const,
|
||||
data: {
|
||||
title: 'Test Iteration',
|
||||
desc: 'Test iteration node',
|
||||
iterator_selector: ['test'],
|
||||
output_selector: ['output'],
|
||||
is_parallel: true,
|
||||
parallel_nums: 5,
|
||||
error_handle_mode: 'terminated' as const,
|
||||
},
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
restoreEnvironment()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
restoreEnvironment()
|
||||
})
|
||||
|
||||
describe('Environment Variable Parsing', () => {
|
||||
it('should parse MAX_PARALLEL_LIMIT from NEXT_PUBLIC_MAX_PARALLEL_LIMIT environment variable', async () => {
|
||||
setupEnvironment('25')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(25)
|
||||
})
|
||||
|
||||
it('should fallback to default when environment variable is not set', async () => {
|
||||
setupEnvironment() // No environment variable
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle invalid environment variable values', async () => {
|
||||
setupEnvironment('invalid')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when parsing fails
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
it('should handle empty environment variable', async () => {
|
||||
setupEnvironment('')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
// Should fall back to default when empty
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10)
|
||||
})
|
||||
|
||||
// Edge cases for boundary values
|
||||
it('should clamp MAX_PARALLEL_LIMIT to MIN when env is 0 or negative', async () => {
|
||||
setupEnvironment('0')
|
||||
let { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
|
||||
setupEnvironment('-5')
|
||||
;({ MAX_PARALLEL_LIMIT } = await import('@/config'))
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(10) // Falls back to default
|
||||
})
|
||||
|
||||
it('should handle float numbers by parseInt behavior', async () => {
|
||||
setupEnvironment('12.7')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
// parseInt truncates to integer
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(12)
|
||||
})
|
||||
})
|
||||
|
||||
describe('UI Component Integration (Main Fix Verification)', () => {
|
||||
it('should render iteration panel with environment-configured max value', async () => {
|
||||
// Set environment variable to a different value
|
||||
setupEnvironment('30')
|
||||
|
||||
// Import Panel after setting environment
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Behavior-focused assertion: UI max should equal MAX_PARALLEL_LIMIT
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
expect(numberInput).toHaveAttribute('max', String(MAX_PARALLEL_LIMIT))
|
||||
expect(slider).toHaveAttribute('aria-valuemax', String(MAX_PARALLEL_LIMIT))
|
||||
|
||||
// Verify the actual values
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(30)
|
||||
expect(numberInput.getAttribute('max')).toBe('30')
|
||||
expect(slider.getAttribute('aria-valuemax')).toBe('30')
|
||||
})
|
||||
|
||||
it('should maintain UI consistency with different environment values', async () => {
|
||||
setupEnvironment('15')
|
||||
const Panel = await import('@/app/components/workflow/nodes/iteration/panel').then(mod => mod.default)
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
|
||||
render(
|
||||
<Panel
|
||||
id="test-node"
|
||||
// @ts-expect-error key type mismatch
|
||||
data={mockNodeData.data}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Both input and slider should use the same max value from MAX_PARALLEL_LIMIT
|
||||
const { numberInput, slider } = getParallelControls()
|
||||
|
||||
expect(numberInput.getAttribute('max')).toBe(slider.getAttribute('aria-valuemax'))
|
||||
expect(numberInput.getAttribute('max')).toBe(String(MAX_PARALLEL_LIMIT))
|
||||
})
|
||||
})
|
||||
|
||||
describe('Legacy Constant Verification (For Transition Period)', () => {
|
||||
// Marked as transition/deprecation tests
|
||||
it('should maintain MAX_ITERATION_PARALLEL_NUM for backward compatibility', async () => {
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10) // Hardcoded legacy value
|
||||
})
|
||||
|
||||
it('should demonstrate MAX_PARALLEL_LIMIT vs legacy constant difference', async () => {
|
||||
setupEnvironment('50')
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MAX_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
|
||||
// MAX_PARALLEL_LIMIT is configurable, MAX_ITERATION_PARALLEL_NUM is not
|
||||
expect(MAX_PARALLEL_LIMIT).toBe(50)
|
||||
expect(MAX_ITERATION_PARALLEL_NUM).toBe(10)
|
||||
expect(MAX_PARALLEL_LIMIT).not.toBe(MAX_ITERATION_PARALLEL_NUM)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Constants Validation', () => {
|
||||
it('should validate that required constants exist and have correct types', async () => {
|
||||
const { MAX_PARALLEL_LIMIT } = await import('@/config')
|
||||
const { MIN_ITERATION_PARALLEL_NUM } = await import('@/app/components/workflow/constants')
|
||||
expect(typeof MAX_PARALLEL_LIMIT).toBe('number')
|
||||
expect(typeof MIN_ITERATION_PARALLEL_NUM).toBe('number')
|
||||
expect(MAX_PARALLEL_LIMIT).toBeGreaterThanOrEqual(MIN_ITERATION_PARALLEL_NUM)
|
||||
})
|
||||
})
|
||||
})
|
||||
34
web/app/components/base/answer-icon/index.spec.tsx
Normal file
34
web/app/components/base/answer-icon/index.spec.tsx
Normal file
@ -0,0 +1,34 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import AnswerIcon from '.'
|
||||
|
||||
describe('AnswerIcon', () => {
|
||||
it('renders default emoji when no icon or image is provided', () => {
|
||||
const { container } = render(<AnswerIcon />)
|
||||
const emojiElement = container.querySelector('em-emoji')
|
||||
expect(emojiElement).toBeInTheDocument()
|
||||
expect(emojiElement).toHaveAttribute('id', '🤖')
|
||||
})
|
||||
|
||||
it('renders with custom emoji when icon is provided', () => {
|
||||
const { container } = render(<AnswerIcon icon="smile" />)
|
||||
const emojiElement = container.querySelector('em-emoji')
|
||||
expect(emojiElement).toBeInTheDocument()
|
||||
expect(emojiElement).toHaveAttribute('id', 'smile')
|
||||
})
|
||||
it('renders image when iconType is image and imageUrl is provided', () => {
|
||||
render(<AnswerIcon iconType="image" imageUrl="test-image.jpg" />)
|
||||
const imgElement = screen.getByAltText('answer icon')
|
||||
expect(imgElement).toBeInTheDocument()
|
||||
expect(imgElement).toHaveAttribute('src', 'test-image.jpg')
|
||||
})
|
||||
|
||||
it('applies custom background color', () => {
|
||||
const { container } = render(<AnswerIcon background="#FF5500" />)
|
||||
expect(container.firstChild).toHaveStyle('background: #FF5500')
|
||||
})
|
||||
|
||||
it('uses default background color when no background is provided for non-image icons', () => {
|
||||
const { container } = render(<AnswerIcon />)
|
||||
expect(container.firstChild).toHaveStyle('background: #D5F5F6')
|
||||
})
|
||||
})
|
||||
@ -3,7 +3,6 @@ import type { CSSProperties, ReactNode } from 'react'
|
||||
import { cva } from 'class-variance-authority'
|
||||
import * as React from 'react'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import './index.css'
|
||||
|
||||
enum BadgeState {
|
||||
Warning = 'warning',
|
||||
|
||||
@ -8,6 +8,7 @@ import { UserActionButtonType } from '@/app/components/workflow/nodes/human-inpu
|
||||
import 'dayjs/locale/en'
|
||||
import 'dayjs/locale/zh-cn'
|
||||
import 'dayjs/locale/ja'
|
||||
import 'dayjs/locale/nl'
|
||||
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(relativeTime)
|
||||
@ -45,6 +46,7 @@ const localeMap: Record<string, string> = {
|
||||
'en-US': 'en',
|
||||
'zh-Hans': 'zh-cn',
|
||||
'ja-JP': 'ja',
|
||||
'nl-NL': 'nl',
|
||||
}
|
||||
|
||||
export const getRelativeTime = (
|
||||
|
||||
@ -162,8 +162,10 @@ describe('useEmbeddedChatbot', () => {
|
||||
await waitFor(() => {
|
||||
expect(mockFetchChatList).toHaveBeenCalledWith('conversation-1', AppSourceType.webApp, 'app-1')
|
||||
})
|
||||
expect(result.current.pinnedConversationList).toEqual(pinnedData.data)
|
||||
expect(result.current.conversationList).toEqual(listData.data)
|
||||
await waitFor(() => {
|
||||
expect(result.current.pinnedConversationList).toEqual(pinnedData.data)
|
||||
expect(result.current.conversationList).toEqual(listData.data)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
54
web/app/components/base/copy-icon/index.spec.tsx
Normal file
54
web/app/components/base/copy-icon/index.spec.tsx
Normal file
@ -0,0 +1,54 @@
|
||||
import { fireEvent, render } from '@testing-library/react'
|
||||
import CopyIcon from '.'
|
||||
|
||||
const copy = vi.fn()
|
||||
const reset = vi.fn()
|
||||
let copied = false
|
||||
|
||||
vi.mock('foxact/use-clipboard', () => ({
|
||||
useClipboard: () => ({
|
||||
copy,
|
||||
reset,
|
||||
copied,
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('copy icon component', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks()
|
||||
copied = false
|
||||
})
|
||||
|
||||
it('renders normally', () => {
|
||||
const { container } = render(<CopyIcon content="this is some test content for the copy icon component" />)
|
||||
expect(container.querySelector('svg')).not.toBeNull()
|
||||
})
|
||||
|
||||
it('shows copy icon initially', () => {
|
||||
const { container } = render(<CopyIcon content="this is some test content for the copy icon component" />)
|
||||
const icon = container.querySelector('[data-icon="Copy"]')
|
||||
expect(icon).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows copy check icon when copied', () => {
|
||||
copied = true
|
||||
const { container } = render(<CopyIcon content="this is some test content for the copy icon component" />)
|
||||
const icon = container.querySelector('[data-icon="CopyCheck"]')
|
||||
expect(icon).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('handles copy when clicked', () => {
|
||||
const { container } = render(<CopyIcon content="this is some test content for the copy icon component" />)
|
||||
const icon = container.querySelector('[data-icon="Copy"]')
|
||||
fireEvent.click(icon as Element)
|
||||
expect(copy).toBeCalledTimes(1)
|
||||
})
|
||||
|
||||
it('resets on mouse leave', () => {
|
||||
const { container } = render(<CopyIcon content="this is some test content for the copy icon component" />)
|
||||
const icon = container.querySelector('[data-icon="Copy"]')
|
||||
const div = icon?.parentElement as HTMLElement
|
||||
fireEvent.mouseLeave(div)
|
||||
expect(reset).toBeCalledTimes(1)
|
||||
})
|
||||
})
|
||||
16
web/app/components/base/corner-label/index.spec.tsx
Normal file
16
web/app/components/base/corner-label/index.spec.tsx
Normal file
@ -0,0 +1,16 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import CornerLabel from '.'
|
||||
|
||||
describe('CornerLabel', () => {
|
||||
it('renders the label correctly', () => {
|
||||
render(<CornerLabel label="Test Label" />)
|
||||
expect(screen.getByText('Test Label')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('applies custom class names', () => {
|
||||
const { container } = render(<CornerLabel label="Test Label" className="custom-class" labelClassName="custom-label-class" />)
|
||||
expect(container.querySelector('.custom-class')).toBeInTheDocument()
|
||||
expect(container.querySelector('.custom-label-class')).toBeInTheDocument()
|
||||
expect(screen.getByText('Test Label')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
447
web/app/components/base/drawer-plus/index.spec.tsx
Normal file
447
web/app/components/base/drawer-plus/index.spec.tsx
Normal file
@ -0,0 +1,447 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import DrawerPlus from '.'
|
||||
|
||||
vi.mock('@/hooks/use-breakpoints', () => ({
|
||||
default: () => 'desktop',
|
||||
MediaType: { mobile: 'mobile', desktop: 'desktop', tablet: 'tablet' },
|
||||
}))
|
||||
|
||||
describe('DrawerPlus', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should not render when isShow is false', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={false}
|
||||
onHide={() => {}}
|
||||
title="Test Drawer"
|
||||
body={<div>Content</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render when isShow is true', () => {
|
||||
const bodyContent = <div>Body Content</div>
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test Drawer"
|
||||
body={bodyContent}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
expect(screen.getByText('Test Drawer')).toBeInTheDocument()
|
||||
expect(screen.getByText('Body Content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render footer when provided', () => {
|
||||
const footerContent = <div>Footer Content</div>
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test Drawer"
|
||||
body={<div>Body</div>}
|
||||
foot={footerContent}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('Footer Content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render JSX element as title', () => {
|
||||
const titleElement = <h1 data-testid="custom-title">Custom Title</h1>
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title={titleElement}
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('custom-title')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render titleDescription when provided', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test Drawer"
|
||||
titleDescription="Description text"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('Description text')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render titleDescription when not provided', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test Drawer"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.queryByText(/Description/)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render JSX element as titleDescription', () => {
|
||||
const descElement = <span data-testid="custom-desc">Custom Description</span>
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
titleDescription={descElement}
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('custom-desc')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Props - Display Options', () => {
|
||||
it('should apply default maxWidthClassName', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
const innerPanel = screen.getByText('Test').closest('.bg-components-panel-bg')
|
||||
const outerPanel = innerPanel?.parentElement
|
||||
expect(outerPanel?.className).toContain('!max-w-[640px]')
|
||||
})
|
||||
|
||||
it('should apply custom maxWidthClassName', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
maxWidthClassName="!max-w-[800px]"
|
||||
/>,
|
||||
)
|
||||
|
||||
const innerPanel = screen.getByText('Test').closest('.bg-components-panel-bg')
|
||||
const outerPanel = innerPanel?.parentElement
|
||||
expect(outerPanel?.className).toContain('!max-w-[800px]')
|
||||
})
|
||||
|
||||
it('should apply custom panelClassName', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
panelClassName="custom-panel"
|
||||
/>,
|
||||
)
|
||||
|
||||
const innerPanel = screen.getByText('Test').closest('.bg-components-panel-bg')
|
||||
const outerPanel = innerPanel?.parentElement
|
||||
expect(outerPanel?.className).toContain('custom-panel')
|
||||
})
|
||||
|
||||
it('should apply custom dialogClassName', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
dialogClassName="custom-dialog"
|
||||
/>,
|
||||
)
|
||||
|
||||
const dialog = screen.getByRole('dialog')
|
||||
expect(dialog.className).toContain('custom-dialog')
|
||||
})
|
||||
|
||||
it('should apply custom contentClassName', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
contentClassName="custom-content"
|
||||
/>,
|
||||
)
|
||||
const title = screen.getByText('Test')
|
||||
const header = title.closest('.shrink-0.border-b.border-divider-subtle')
|
||||
const content = header?.parentElement
|
||||
expect(content?.className).toContain('custom-content')
|
||||
})
|
||||
|
||||
it('should apply custom headerClassName', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
headerClassName="custom-header"
|
||||
/>,
|
||||
)
|
||||
|
||||
const title = screen.getByText('Test')
|
||||
const header = title.closest('.shrink-0.border-b.border-divider-subtle')
|
||||
expect(header?.className).toContain('custom-header')
|
||||
})
|
||||
|
||||
it('should apply custom height', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
height="500px"
|
||||
/>,
|
||||
)
|
||||
|
||||
const title = screen.getByText('Test')
|
||||
const header = title.closest('.shrink-0.border-b.border-divider-subtle')
|
||||
const content = header?.parentElement
|
||||
expect(content?.getAttribute('style')).toContain('height: 500px')
|
||||
})
|
||||
|
||||
it('should use default height', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
const title = screen.getByText('Test')
|
||||
const header = title.closest('.shrink-0.border-b.border-divider-subtle')
|
||||
const content = header?.parentElement
|
||||
expect(content?.getAttribute('style')).toContain('calc(100vh - 72px)')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Event Handlers', () => {
|
||||
it('should call onHide when close button is clicked', () => {
|
||||
const handleHide = vi.fn()
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={handleHide}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
const title = screen.getByText('Test')
|
||||
const headerRight = title.nextElementSibling // .flex items-center
|
||||
const closeDiv = headerRight?.querySelector('.cursor-pointer') as HTMLElement
|
||||
|
||||
fireEvent.click(closeDiv)
|
||||
expect(handleHide).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Complex Content', () => {
|
||||
it('should render complex JSX elements in body', () => {
|
||||
const complexBody = (
|
||||
<div>
|
||||
<h2>Header</h2>
|
||||
<p>Paragraph</p>
|
||||
<button>Action Button</button>
|
||||
</div>
|
||||
)
|
||||
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={complexBody}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText('Header')).toBeInTheDocument()
|
||||
expect(screen.getByText('Paragraph')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'Action Button' })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render complex footer', () => {
|
||||
const complexFooter = (
|
||||
<div className="footer-actions">
|
||||
<button>Cancel</button>
|
||||
<button>Save</button>
|
||||
</div>
|
||||
)
|
||||
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
foot={complexFooter}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('button', { name: 'Cancel' })).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'Save' })).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty title', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title=""
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle undefined titleDescription', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
titleDescription={undefined}
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle rapid isShow toggle', () => {
|
||||
const { rerender } = render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
|
||||
rerender(
|
||||
<DrawerPlus
|
||||
isShow={false}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.queryByRole('dialog')).not.toBeInTheDocument()
|
||||
|
||||
rerender(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle special characters in title', () => {
|
||||
const specialTitle = 'Test <> & " \' | Drawer'
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title={specialTitle}
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByText(specialTitle)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle empty body content', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div></div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByRole('dialog')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should apply both custom maxWidth and panel classNames', () => {
|
||||
render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
maxWidthClassName="!max-w-[500px]"
|
||||
panelClassName="custom-style"
|
||||
/>,
|
||||
)
|
||||
|
||||
const innerPanel = screen.getByText('Test').closest('.bg-components-panel-bg')
|
||||
const outerPanel = innerPanel?.parentElement
|
||||
expect(outerPanel?.className).toContain('!max-w-[500px]')
|
||||
expect(outerPanel?.className).toContain('custom-style')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Memoization', () => {
|
||||
it('should be memoized and not re-render on parent changes', () => {
|
||||
const { rerender } = render(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
const dialog = screen.getByRole('dialog')
|
||||
|
||||
rerender(
|
||||
<DrawerPlus
|
||||
isShow={true}
|
||||
onHide={() => {}}
|
||||
title="Test"
|
||||
body={<div>Body</div>}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(dialog).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
225
web/app/components/base/dropdown/index.spec.tsx
Normal file
225
web/app/components/base/dropdown/index.spec.tsx
Normal file
@ -0,0 +1,225 @@
|
||||
import { act, cleanup, fireEvent, render, screen } from '@testing-library/react'
|
||||
import Dropdown from './index'
|
||||
|
||||
describe('Dropdown Component', () => {
|
||||
const mockItems = [
|
||||
{ value: 'option1', text: 'Option 1' },
|
||||
{ value: 'option2', text: 'Option 2' },
|
||||
]
|
||||
const mockSecondItems = [
|
||||
{ value: 'option3', text: 'Option 3' },
|
||||
]
|
||||
const onSelect = vi.fn()
|
||||
|
||||
afterEach(() => {
|
||||
cleanup()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('renders default trigger properly', () => {
|
||||
const { container } = render(
|
||||
<Dropdown items={mockItems} onSelect={onSelect} />,
|
||||
)
|
||||
const trigger = container.querySelector('button')
|
||||
expect(trigger).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders custom trigger when provided', () => {
|
||||
render(
|
||||
<Dropdown
|
||||
items={mockItems}
|
||||
onSelect={onSelect}
|
||||
renderTrigger={open => <button data-testid="custom-trigger">{open ? 'Open' : 'Closed'}</button>}
|
||||
/>,
|
||||
)
|
||||
const trigger = screen.getByTestId('custom-trigger')
|
||||
expect(trigger).toBeInTheDocument()
|
||||
expect(trigger).toHaveTextContent('Closed')
|
||||
})
|
||||
|
||||
it('opens dropdown menu on trigger click and shows items', async () => {
|
||||
render(
|
||||
<Dropdown items={mockItems} onSelect={onSelect} />,
|
||||
)
|
||||
const trigger = screen.getByRole('button')
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(trigger)
|
||||
})
|
||||
|
||||
// Dropdown items are rendered in a portal (document.body)
|
||||
expect(screen.getByText('Option 1')).toBeInTheDocument()
|
||||
expect(screen.getByText('Option 2')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('calls onSelect and closes dropdown when an item is clicked', async () => {
|
||||
render(
|
||||
<Dropdown items={mockItems} onSelect={onSelect} />,
|
||||
)
|
||||
const trigger = screen.getByRole('button')
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(trigger)
|
||||
})
|
||||
|
||||
const option1 = screen.getByText('Option 1')
|
||||
await act(async () => {
|
||||
fireEvent.click(option1)
|
||||
})
|
||||
|
||||
expect(onSelect).toHaveBeenCalledWith(mockItems[0])
|
||||
expect(screen.queryByText('Option 1')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('calls onSelect and closes dropdown when a second item is clicked', async () => {
|
||||
render(
|
||||
<Dropdown items={mockItems} secondItems={mockSecondItems} onSelect={onSelect} />,
|
||||
)
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
})
|
||||
|
||||
const option3 = screen.getByText('Option 3')
|
||||
await act(async () => {
|
||||
fireEvent.click(option3)
|
||||
})
|
||||
expect(onSelect).toHaveBeenCalledWith(mockSecondItems[0])
|
||||
expect(screen.queryByText('Option 3')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders second items and divider when provided', async () => {
|
||||
render(
|
||||
<Dropdown
|
||||
items={mockItems}
|
||||
secondItems={mockSecondItems}
|
||||
onSelect={onSelect}
|
||||
/>,
|
||||
)
|
||||
const trigger = screen.getByRole('button')
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(trigger)
|
||||
})
|
||||
|
||||
expect(screen.getByText('Option 1')).toBeInTheDocument()
|
||||
expect(screen.getByText('Option 3')).toBeInTheDocument()
|
||||
|
||||
// Check for divider (h-px bg-divider-regular)
|
||||
const divider = document.body.querySelector('.bg-divider-regular.h-px')
|
||||
expect(divider).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('applies custom classNames', async () => {
|
||||
const popupClass = 'custom-popup'
|
||||
const itemClass = 'custom-item'
|
||||
const secondItemClass = 'custom-second-item'
|
||||
|
||||
render(
|
||||
<Dropdown
|
||||
items={mockItems}
|
||||
secondItems={mockSecondItems}
|
||||
onSelect={onSelect}
|
||||
popupClassName={popupClass}
|
||||
itemClassName={itemClass}
|
||||
secondItemClassName={secondItemClass}
|
||||
/>,
|
||||
)
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
})
|
||||
|
||||
const popup = document.body.querySelector(`.${popupClass}`)
|
||||
expect(popup).toBeInTheDocument()
|
||||
|
||||
const items = screen.getAllByText('Option 1')
|
||||
expect(items[0]).toHaveClass(itemClass)
|
||||
|
||||
const secondItems = screen.getAllByText('Option 3')
|
||||
expect(secondItems[0]).toHaveClass(secondItemClass)
|
||||
})
|
||||
|
||||
it('applies open class to trigger when menu is open', async () => {
|
||||
render(<Dropdown items={mockItems} onSelect={onSelect} />)
|
||||
const trigger = screen.getByRole('button')
|
||||
await act(async () => {
|
||||
fireEvent.click(trigger)
|
||||
})
|
||||
expect(trigger).toHaveClass('bg-divider-regular')
|
||||
})
|
||||
|
||||
it('handles JSX elements as item text', async () => {
|
||||
const itemsWithJSX = [
|
||||
{ value: 'jsx', text: <span data-testid="jsx-item">JSX Content</span> },
|
||||
]
|
||||
render(
|
||||
<Dropdown items={itemsWithJSX} onSelect={onSelect} />,
|
||||
)
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
})
|
||||
|
||||
expect(screen.getByTestId('jsx-item')).toBeInTheDocument()
|
||||
expect(screen.getByText('JSX Content')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not render items section if items list is empty', async () => {
|
||||
render(
|
||||
<Dropdown items={[]} secondItems={mockSecondItems} onSelect={onSelect} />,
|
||||
)
|
||||
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
})
|
||||
|
||||
const p1Divs = document.body.querySelectorAll('.p-1')
|
||||
expect(p1Divs.length).toBe(1)
|
||||
expect(screen.queryByText('Option 1')).not.toBeInTheDocument()
|
||||
expect(screen.getByText('Option 3')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not render divider if only one section is provided', async () => {
|
||||
const { rerender } = render(
|
||||
<Dropdown items={mockItems} onSelect={onSelect} />,
|
||||
)
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
})
|
||||
expect(document.body.querySelector('.bg-divider-regular.h-px')).not.toBeInTheDocument()
|
||||
|
||||
await act(async () => {
|
||||
rerender(
|
||||
<Dropdown items={[]} secondItems={mockSecondItems} onSelect={onSelect} />,
|
||||
)
|
||||
})
|
||||
expect(document.body.querySelector('.bg-divider-regular.h-px')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders nothing if both item lists are empty', async () => {
|
||||
render(<Dropdown items={[]} secondItems={[]} onSelect={onSelect} />)
|
||||
await act(async () => {
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
})
|
||||
const popup = document.body.querySelector('.bg-components-panel-bg')
|
||||
expect(popup?.children.length).toBe(0)
|
||||
})
|
||||
|
||||
it('passes triggerProps to ActionButton and applies custom className', () => {
|
||||
render(
|
||||
<Dropdown
|
||||
items={mockItems}
|
||||
onSelect={onSelect}
|
||||
triggerProps={{
|
||||
'disabled': true,
|
||||
'aria-label': 'dropdown-trigger',
|
||||
'className': 'custom-trigger-class',
|
||||
}}
|
||||
/>,
|
||||
)
|
||||
const trigger = screen.getByLabelText('dropdown-trigger')
|
||||
expect(trigger).toBeDisabled()
|
||||
expect(trigger).toHaveClass('custom-trigger-class')
|
||||
})
|
||||
})
|
||||
9
web/app/components/base/effect/index.spec.tsx
Normal file
9
web/app/components/base/effect/index.spec.tsx
Normal file
@ -0,0 +1,9 @@
|
||||
import { render } from '@testing-library/react'
|
||||
import Effect from '.'
|
||||
|
||||
describe('Effect', () => {
|
||||
it('applies custom class names', () => {
|
||||
const { container } = render(<Effect className="custom-class" />)
|
||||
expect(container.firstChild).toHaveClass('custom-class')
|
||||
})
|
||||
})
|
||||
15
web/app/components/base/encrypted-bottom/index.spec.tsx
Normal file
15
web/app/components/base/encrypted-bottom/index.spec.tsx
Normal file
@ -0,0 +1,15 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { EncryptedBottom } from '.'
|
||||
|
||||
describe('EncryptedBottom', () => {
|
||||
it('applies custom class names', () => {
|
||||
const { container } = render(<EncryptedBottom className="custom-class" />)
|
||||
expect(container.firstChild).toHaveClass('custom-class')
|
||||
})
|
||||
|
||||
it('passes keys', async () => {
|
||||
render(<EncryptedBottom frontTextKey="provider.encrypted.front" backTextKey="provider.encrypted.back" />)
|
||||
expect(await screen.findByText(/provider.encrypted.front/i)).toBeInTheDocument()
|
||||
expect(await screen.findByText(/provider.encrypted.back/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@ -98,7 +98,9 @@ const VoiceParamConfig = ({
|
||||
className="h-full w-full cursor-pointer rounded-lg border-0 bg-components-input-bg-normal py-1.5 pl-3 pr-10 focus-visible:bg-state-base-hover focus-visible:outline-none group-hover:bg-state-base-hover sm:text-sm sm:leading-6"
|
||||
>
|
||||
<span className={cn('block truncate text-left text-text-secondary', !languageItem?.name && 'text-text-tertiary')}>
|
||||
{languageItem?.name ? t(`voice.language.${replace(languageItem?.value, '-', '')}`, { ns: 'common' }) : localLanguagePlaceholder}
|
||||
{languageItem?.name
|
||||
? t(`voice.language.${replace(languageItem?.value ?? '', '-', '')}`, languageItem?.name, { ns: 'common' as const })
|
||||
: localLanguagePlaceholder}
|
||||
</span>
|
||||
<span className="pointer-events-none absolute inset-y-0 right-0 flex items-center pr-2">
|
||||
<ChevronDownIcon
|
||||
@ -129,7 +131,7 @@ const VoiceParamConfig = ({
|
||||
<span
|
||||
className={cn('block', selected && 'font-normal')}
|
||||
>
|
||||
{t(`voice.language.${replace((item.value), '-', '')}`, { ns: 'common' })}
|
||||
{t(`voice.language.${replace((item.value), '-', '')}`, item.name, { ns: 'common' as const })}
|
||||
</span>
|
||||
{(selected || item.value === text2speech?.language) && (
|
||||
<span
|
||||
|
||||
28
web/app/components/base/file-icon/index.spec.tsx
Normal file
28
web/app/components/base/file-icon/index.spec.tsx
Normal file
@ -0,0 +1,28 @@
|
||||
import { render } from '@testing-library/react'
|
||||
import FileIcon from '.'
|
||||
|
||||
describe('File icon component', () => {
|
||||
const testCases = [
|
||||
{ type: 'csv', icon: 'Csv' },
|
||||
{ type: 'doc', icon: 'Doc' },
|
||||
{ type: 'docx', icon: 'Docx' },
|
||||
{ type: 'htm', icon: 'Html' },
|
||||
{ type: 'html', icon: 'Html' },
|
||||
{ type: 'md', icon: 'Md' },
|
||||
{ type: 'mdx', icon: 'Md' },
|
||||
{ type: 'markdown', icon: 'Md' },
|
||||
{ type: 'pdf', icon: 'Pdf' },
|
||||
{ type: 'xls', icon: 'Xlsx' },
|
||||
{ type: 'xlsx', icon: 'Xlsx' },
|
||||
{ type: 'notion', icon: 'Notion' },
|
||||
{ type: 'something-else', icon: 'Unknown' },
|
||||
{ type: 'txt', icon: 'Txt' },
|
||||
{ type: 'json', icon: 'Json' },
|
||||
]
|
||||
|
||||
it.each(testCases)('renders $icon icon for type $type', ({ type, icon }) => {
|
||||
const { container } = render(<FileIcon type={type} />)
|
||||
const iconElement = container.querySelector(`[data-icon="${icon}"]`)
|
||||
expect(iconElement).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
@ -1,5 +1,5 @@
|
||||
import type { RemixiconComponentType } from '@remixicon/react'
|
||||
import { z } from 'zod'
|
||||
import * as z from 'zod'
|
||||
|
||||
export const InputTypeEnum = z.enum([
|
||||
'text-input',
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user