Compare commits

..

11 Commits

Author SHA1 Message Date
dfe24c83ab fix: use synchronous getByText assertion while in fake timer mode before restoring real timers
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 16:47:38 +00:00
569deaf0a4 fix: use findByText instead of getByText for async modal appearance in fake timer tests
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 15:40:40 +00:00
8c10513d6d fix: add Promise.resolve() after advancing fake timers to flush state updates
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 14:40:37 +00:00
e4aaabb079 fix: restore real timers before waitFor in fake timer tests and fix remaining tests
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 14:09:29 +00:00
d48d8488a6 fix: wrap all remaining tests with act() and increase timeouts for stability
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 14:02:10 +00:00
c257721f10 fix: correct flushFileReader helper to avoid infinite recursion
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 13:25:25 +00:00
34d7f8eceb refactor: extract flushFileReader helper function for better maintainability
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 13:00:06 +00:00
94f691a066 fix: apply act() wrapping to additional failing tests
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 12:52:27 +00:00
af325812e8 fix: wrap file upload and button clicks in act() for async state updates
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 12:50:30 +00:00
bb47a4732a fix: ensure vi.useRealTimers() is always called in afterEach for update-dsl-modal tests
Co-authored-by: hyoban <38493346+hyoban@users.noreply.github.com>
2026-02-04 12:40:39 +00:00
039ae14251 Initial plan 2026-02-04 12:08:36 +00:00
56 changed files with 1548 additions and 3943 deletions

View File

@ -0,0 +1 @@
../../.agents/skills/component-refactoring

View File

@ -0,0 +1 @@
../../.agents/skills/frontend-code-review

View File

@ -0,0 +1 @@
../../.agents/skills/frontend-testing

View File

@ -0,0 +1 @@
../../.agents/skills/orpc-contract-first

View File

@ -79,6 +79,29 @@ jobs:
find . -name "*.py" -type f -exec sed -i.bak -E 's/"([^"]+)" \| None/Optional["\1"]/g; s/'"'"'([^'"'"']+)'"'"' \| None/Optional['"'"'\1'"'"']/g' {} \;
find . -name "*.py.bak" -type f -delete
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
package_json_file: web/package.json
run_install: false
- name: Setup Node.js
uses: actions/setup-node@v6
with:
node-version: 24
cache: pnpm
cache-dependency-path: ./web/pnpm-lock.yaml
- name: Install web dependencies
run: |
cd web
pnpm install --frozen-lockfile
- name: ESLint autofix
run: |
cd web
pnpm lint:fix || true
# mdformat breaks YAML front matter in markdown files. Add --exclude for directories containing YAML front matter.
- name: mdformat
run: |

View File

@ -39,7 +39,7 @@ jobs:
run: pnpm install --frozen-lockfile
- name: Run tests
run: pnpm test:ci
run: pnpm test:coverage
- name: Coverage Summary
if: always()

View File

@ -102,8 +102,6 @@ forbidden_modules =
core.trigger
core.variables
ignore_imports =
core.workflow.nodes.agent.agent_node -> core.db.session_factory
core.workflow.nodes.agent.agent_node -> models.tools
core.workflow.nodes.loop.loop_node -> core.app.workflow.node_factory
core.workflow.graph_engine.command_channels.redis_channel -> extensions.ext_redis
core.workflow.workflow_entry -> core.app.workflow.layers.observability
@ -138,6 +136,7 @@ ignore_imports =
core.workflow.nodes.llm.llm_utils -> models.provider
core.workflow.nodes.llm.llm_utils -> services.credit_pool_service
core.workflow.nodes.llm.node -> core.tools.signature
core.workflow.nodes.template_transform.template_transform_node -> configs
core.workflow.nodes.tool.tool_node -> core.callback_handler.workflow_tool_callback_handler
core.workflow.nodes.tool.tool_node -> core.tools.tool_engine
core.workflow.nodes.tool.tool_node -> core.tools.tool_manager

View File

@ -739,10 +739,8 @@ def upgrade_db():
click.echo(click.style("Database migration successful!", fg="green"))
except Exception as e:
except Exception:
logger.exception("Failed to execute database migration")
click.echo(click.style(f"Database migration failed: {e}", fg="red"))
raise SystemExit(1)
finally:
lock.release()
else:

View File

@ -1,4 +1,3 @@
import logging
import uuid
from datetime import datetime
from typing import Any, Literal, TypeAlias
@ -55,8 +54,6 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
register_enum_models(console_ns, IconType)
_logger = logging.getLogger(__name__)
class AppListQuery(BaseModel):
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
@ -502,7 +499,6 @@ class AppListApi(Resource):
select(Workflow).where(
Workflow.version == Workflow.VERSION_DRAFT,
Workflow.app_id.in_(workflow_capable_app_ids),
Workflow.tenant_id == current_tenant_id,
)
)
.scalars()
@ -514,14 +510,12 @@ class AppListApi(Resource):
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
node_id = None
try:
for node_id, node_data in workflow.walk_nodes():
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
except Exception:
_logger.exception("error while walking nodes, workflow_id=%s, node_id=%s", workflow.id, node_id)
continue
for app in app_pagination.items:

View File

@ -47,7 +47,6 @@ class DifyNodeFactory(NodeFactory):
code_providers: Sequence[type[CodeNodeProvider]] | None = None,
code_limits: CodeNodeLimits | None = None,
template_renderer: Jinja2TemplateRenderer | None = None,
template_transform_max_output_length: int | None = None,
http_request_http_client: HttpClientProtocol | None = None,
http_request_tool_file_manager_factory: Callable[[], ToolFileManager] = ToolFileManager,
http_request_file_manager: FileManagerProtocol | None = None,
@ -69,9 +68,6 @@ class DifyNodeFactory(NodeFactory):
max_object_array_length=dify_config.CODE_MAX_OBJECT_ARRAY_LENGTH,
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
self._template_transform_max_output_length = (
template_transform_max_output_length or dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
)
self._http_request_http_client = http_request_http_client or ssrf_proxy
self._http_request_tool_file_manager_factory = http_request_tool_file_manager_factory
self._http_request_file_manager = http_request_file_manager or file_manager
@ -126,7 +122,6 @@ class DifyNodeFactory(NodeFactory):
graph_init_params=self.graph_init_params,
graph_runtime_state=self.graph_runtime_state,
template_renderer=self._template_renderer,
max_output_length=self._template_transform_max_output_length,
)
if node_type == NodeType.HTTP_REQUEST:

View File

@ -6,8 +6,7 @@ from yarl import URL
from configs import dify_config
from core.helper.download import download_with_size_limit
from core.plugin.entities.marketplace import MarketplacePluginDeclaration, MarketplacePluginSnapshot
from extensions.ext_redis import redis_client
from core.plugin.entities.marketplace import MarketplacePluginDeclaration
marketplace_api_url = URL(str(dify_config.MARKETPLACE_API_URL))
logger = logging.getLogger(__name__)
@ -44,37 +43,28 @@ def batch_fetch_plugin_by_ids(plugin_ids: list[str]) -> list[dict]:
return data.get("data", {}).get("plugins", [])
def batch_fetch_plugin_manifests_ignore_deserialization_error(
plugin_ids: list[str],
) -> Sequence[MarketplacePluginDeclaration]:
if len(plugin_ids) == 0:
return []
url = str(marketplace_api_url / "api/v1/plugins/batch")
response = httpx.post(url, json={"plugin_ids": plugin_ids}, headers={"X-Dify-Version": dify_config.project.version})
response.raise_for_status()
result: list[MarketplacePluginDeclaration] = []
for plugin in response.json()["data"]["plugins"]:
try:
result.append(MarketplacePluginDeclaration.model_validate(plugin))
except Exception:
logger.exception(
"Failed to deserialize marketplace plugin manifest for %s", plugin.get("plugin_id", "unknown")
)
return result
def record_install_plugin_event(plugin_unique_identifier: str):
url = str(marketplace_api_url / "api/v1/stats/plugins/install_count")
response = httpx.post(url, json={"unique_identifier": plugin_unique_identifier})
response.raise_for_status()
def fetch_global_plugin_manifest(cache_key_prefix: str, cache_ttl: int) -> None:
"""
Fetch all plugin manifests from marketplace and cache them in Redis.
This should be called once per check cycle to populate the instance-level cache.
Args:
cache_key_prefix: Redis key prefix for caching plugin manifests
cache_ttl: Cache TTL in seconds
Raises:
httpx.HTTPError: If the HTTP request fails
Exception: If any other error occurs during fetching or caching
"""
url = str(marketplace_api_url / "api/v1/dist/plugins/manifest.json")
response = httpx.get(url, headers={"X-Dify-Version": dify_config.project.version}, timeout=30)
response.raise_for_status()
raw_json = response.json()
plugins_data = raw_json.get("plugins", [])
# Parse and cache all plugin snapshots
for plugin_data in plugins_data:
plugin_snapshot = MarketplacePluginSnapshot.model_validate(plugin_data)
redis_client.setex(
name=f"{cache_key_prefix}{plugin_snapshot.plugin_id}",
time=cache_ttl,
value=plugin_snapshot.model_dump_json(),
)

View File

@ -1,4 +1,4 @@
from pydantic import BaseModel, Field, computed_field, model_validator
from pydantic import BaseModel, Field, model_validator
from core.model_runtime.entities.provider_entities import ProviderEntity
from core.plugin.entities.endpoint import EndpointProviderDeclaration
@ -48,15 +48,3 @@ class MarketplacePluginDeclaration(BaseModel):
if "tool" in data and not data["tool"]:
del data["tool"]
return data
class MarketplacePluginSnapshot(BaseModel):
org: str
name: str
latest_version: str
latest_package_identifier: str
latest_package_url: str
@computed_field
def plugin_id(self) -> str:
return f"{self.org}/{self.name}"

View File

@ -112,7 +112,7 @@ class ArrayBooleanVariable(ArrayBooleanSegment, ArrayVariable):
class RAGPipelineVariable(BaseModel):
belong_to_node_id: str = Field(description="belong to which node id, shared means public")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
type: str = Field(description="variable type, text-input, paragraph, select, number, file, file-list")
label: str = Field(description="label")
description: str | None = Field(description="description", default="")
variable: str = Field(description="variable key", default="")

View File

@ -2,7 +2,7 @@ from __future__ import annotations
import json
from collections.abc import Generator, Mapping, Sequence
from typing import TYPE_CHECKING, Any, Union, cast
from typing import TYPE_CHECKING, Any, cast
from packaging.version import Version
from pydantic import ValidationError
@ -11,7 +11,6 @@ from sqlalchemy.orm import Session
from core.agent.entities import AgentToolEntity
from core.agent.plugin_entities import AgentStrategyParameter
from core.db.session_factory import session_factory
from core.file import File, FileTransferMethod
from core.memory.token_buffer_memory import TokenBufferMemory
from core.model_manager import ModelInstance, ModelManager
@ -50,12 +49,6 @@ from factories import file_factory
from factories.agent_factory import get_plugin_agent_strategy
from models import ToolFile
from models.model import Conversation
from models.tools import (
ApiToolProvider,
BuiltinToolProvider,
MCPToolProvider,
WorkflowToolProvider,
)
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
from .exc import (
@ -266,7 +259,7 @@ class AgentNode(Node[AgentNodeData]):
value = cast(list[dict[str, Any]], value)
tool_value = []
for tool in value:
provider_type = self._infer_tool_provider_type(tool, self.tenant_id)
provider_type = ToolProviderType(tool.get("type", ToolProviderType.BUILT_IN))
setting_params = tool.get("settings", {})
parameters = tool.get("parameters", {})
manual_input_params = [key for key, value in parameters.items() if value is not None]
@ -755,34 +748,3 @@ class AgentNode(Node[AgentNodeData]):
llm_usage=llm_usage,
)
)
@staticmethod
def _infer_tool_provider_type(tool_config: dict[str, Any], tenant_id: str) -> ToolProviderType:
provider_type_str = tool_config.get("type")
if provider_type_str:
return ToolProviderType(provider_type_str)
provider_id = tool_config.get("provider_name")
if not provider_id:
return ToolProviderType.BUILT_IN
with session_factory.create_session() as session:
provider_map: dict[
type[Union[WorkflowToolProvider, MCPToolProvider, ApiToolProvider, BuiltinToolProvider]],
ToolProviderType,
] = {
WorkflowToolProvider: ToolProviderType.WORKFLOW,
MCPToolProvider: ToolProviderType.MCP,
ApiToolProvider: ToolProviderType.API,
BuiltinToolProvider: ToolProviderType.BUILT_IN,
}
for provider_model, provider_type in provider_map.items():
stmt = select(provider_model).where(
provider_model.id == provider_id,
provider_model.tenant_id == tenant_id,
)
if session.scalar(stmt):
return provider_type
raise AgentNodeError(f"Tool provider with ID '{provider_id}' not found.")

View File

@ -1,6 +1,7 @@
from collections.abc import Mapping, Sequence
from typing import TYPE_CHECKING, Any
from configs import dify_config
from core.workflow.enums import NodeType, WorkflowNodeExecutionStatus
from core.workflow.node_events import NodeRunResult
from core.workflow.nodes.base.node import Node
@ -15,13 +16,12 @@ if TYPE_CHECKING:
from core.workflow.entities import GraphInitParams
from core.workflow.runtime import GraphRuntimeState
DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH = 400_000
MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH = dify_config.TEMPLATE_TRANSFORM_MAX_LENGTH
class TemplateTransformNode(Node[TemplateTransformNodeData]):
node_type = NodeType.TEMPLATE_TRANSFORM
_template_renderer: Jinja2TemplateRenderer
_max_output_length: int
def __init__(
self,
@ -31,7 +31,6 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
graph_runtime_state: "GraphRuntimeState",
*,
template_renderer: Jinja2TemplateRenderer | None = None,
max_output_length: int | None = None,
) -> None:
super().__init__(
id=id,
@ -41,10 +40,6 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
)
self._template_renderer = template_renderer or CodeExecutorJinja2TemplateRenderer()
if max_output_length is not None and max_output_length <= 0:
raise ValueError("max_output_length must be a positive integer")
self._max_output_length = max_output_length or DEFAULT_TEMPLATE_TRANSFORM_MAX_OUTPUT_LENGTH
@classmethod
def get_default_config(cls, filters: Mapping[str, object] | None = None) -> Mapping[str, object]:
"""
@ -74,11 +69,11 @@ class TemplateTransformNode(Node[TemplateTransformNodeData]):
except TemplateRenderError as e:
return NodeRunResult(inputs=variables, status=WorkflowNodeExecutionStatus.FAILED, error=str(e))
if len(rendered) > self._max_output_length:
if len(rendered) > MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH:
return NodeRunResult(
inputs=variables,
status=WorkflowNodeExecutionStatus.FAILED,
error=f"Output length exceeds {self._max_output_length} characters",
error=f"Output length exceeds {MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH} characters",
)
return NodeRunResult(

View File

@ -10,10 +10,6 @@ import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def _is_pg(conn):
return conn.dialect.name == "postgresql"
# revision identifiers, used by Alembic.
revision = '7df29de0f6be'
down_revision = '03ea244985ce'
@ -23,31 +19,16 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
if _is_pg(conn):
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
else:
# For MySQL and other databases, UUID should be generated at application level
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.current_timestamp(), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),
sa.Column('quota_used', sa.BigInteger(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.PrimaryKeyConstraint('id', name='tenant_credit_pool_pkey')
)
with op.batch_alter_table('tenant_credit_pools', schema=None) as batch_op:
batch_op.create_index('tenant_credit_pool_pool_type_idx', ['pool_type'], unique=False)
batch_op.create_index('tenant_credit_pool_tenant_id_idx', ['tenant_id'], unique=False)

View File

@ -2166,9 +2166,7 @@ class TenantCreditPool(TypeBase):
sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"),
)
id: Mapped[str] = mapped_column(
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
)
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)

View File

@ -1,24 +1,16 @@
import logging
import math
import time
import click
import app
from core.helper.marketplace import fetch_global_plugin_manifest
from extensions.ext_database import db
from models.account import TenantPluginAutoUpgradeStrategy
from tasks import process_tenant_plugin_autoupgrade_check_task as check_task
logger = logging.getLogger(__name__)
AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL = 15 * 60 # 15 minutes
MAX_CONCURRENT_CHECK_TASKS = 20
# Import cache constants from the task module
CACHE_REDIS_KEY_PREFIX = check_task.CACHE_REDIS_KEY_PREFIX
CACHE_REDIS_TTL = check_task.CACHE_REDIS_TTL
@app.celery.task(queue="plugin")
def check_upgradable_plugin_task():
@ -48,22 +40,6 @@ def check_upgradable_plugin_task():
) # make sure all strategies are checked in this interval
batch_interval_time = (AUTO_UPGRADE_MINIMAL_CHECKING_INTERVAL / batch_chunk_count) if batch_chunk_count > 0 else 0
if total_strategies == 0:
click.echo(click.style("no strategies to process, skipping plugin manifest fetch.", fg="green"))
return
# Fetch and cache all plugin manifests before processing tenants
# This reduces load on marketplace from 300k requests to 1 request per check cycle
logger.info("fetching global plugin manifest from marketplace")
try:
fetch_global_plugin_manifest(CACHE_REDIS_KEY_PREFIX, CACHE_REDIS_TTL)
logger.info("successfully fetched and cached global plugin manifest")
except Exception as e:
logger.exception("failed to fetch global plugin manifest")
click.echo(click.style(f"failed to fetch global plugin manifest: {e}", fg="red"))
click.echo(click.style("skipping plugin upgrade check for this cycle", fg="yellow"))
return
for i in range(0, total_strategies, MAX_CONCURRENT_CHECK_TASKS):
batch_strategies = strategies[i : i + MAX_CONCURRENT_CHECK_TASKS]
for strategy in batch_strategies:

View File

@ -327,17 +327,6 @@ class AccountService:
@staticmethod
def delete_account(account: Account):
"""Delete account. This method only adds a task to the queue for deletion."""
# Queue account deletion sync tasks for all workspaces BEFORE account deletion (enterprise only)
from services.enterprise.account_deletion_sync import sync_account_deletion
sync_success = sync_account_deletion(account_id=account.id, source="account_deleted")
if not sync_success:
logger.warning(
"Enterprise account deletion sync failed for account %s; proceeding with local deletion.",
account.id,
)
# Now proceed with async account deletion
delete_account_task.delay(account.id)
@staticmethod
@ -1241,19 +1230,6 @@ class TenantService:
if dify_config.BILLING_ENABLED:
BillingService.clean_billing_info_cache(tenant.id)
# Queue account deletion sync task for enterprise backend to reassign resources (enterprise only)
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
sync_success = sync_workspace_member_removal(
workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed"
)
if not sync_success:
logger.warning(
"Enterprise workspace member removal sync failed: workspace_id=%s, member_id=%s",
tenant.id,
account.id,
)
@staticmethod
def update_member_role(tenant: Tenant, member: Account, new_role: str, operator: Account):
"""Update member role"""

View File

@ -1,115 +0,0 @@
import json
import logging
import uuid
from datetime import UTC, datetime
from redis import RedisError
from configs import dify_config
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.account import TenantAccountJoin
logger = logging.getLogger(__name__)
ACCOUNT_DELETION_SYNC_QUEUE = "enterprise:member:sync:queue"
ACCOUNT_DELETION_SYNC_TASK_TYPE = "sync_member_deletion_from_workspace"
def _queue_task(workspace_id: str, member_id: str, *, source: str) -> bool:
"""
Queue an account deletion sync task to Redis.
Internal helper function. Do not call directly - use the public functions instead.
Args:
workspace_id: The workspace/tenant ID to sync
member_id: The member/account ID that was removed
source: Source of the sync request (for debugging/tracking)
Returns:
bool: True if task was queued successfully, False otherwise
"""
try:
task = {
"task_id": str(uuid.uuid4()),
"workspace_id": workspace_id,
"member_id": member_id,
"retry_count": 0,
"created_at": datetime.now(UTC).isoformat(),
"source": source,
"type": ACCOUNT_DELETION_SYNC_TASK_TYPE,
}
# Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP
redis_client.lpush(ACCOUNT_DELETION_SYNC_QUEUE, json.dumps(task))
logger.info(
"Queued account deletion sync task for workspace %s, member %s, task_id: %s, source: %s",
workspace_id,
member_id,
task["task_id"],
source,
)
return True
except (RedisError, TypeError) as e:
logger.error(
"Failed to queue account deletion sync for workspace %s, member %s: %s",
workspace_id,
member_id,
str(e),
exc_info=True,
)
# Don't raise - we don't want to fail member deletion if queueing fails
return False
def sync_workspace_member_removal(workspace_id: str, member_id: str, *, source: str) -> bool:
"""
Sync a single workspace member removal (enterprise only).
Queues a task for the enterprise backend to reassign resources from the removed member.
Handles enterprise edition check internally. Safe to call in community edition (no-op).
Args:
workspace_id: The workspace/tenant ID
member_id: The member/account ID that was removed
source: Source of the sync request (e.g., "workspace_member_removed")
Returns:
bool: True if task was queued (or skipped in community), False if queueing failed
"""
if not dify_config.ENTERPRISE_ENABLED:
return True
return _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
def sync_account_deletion(account_id: str, *, source: str) -> bool:
"""
Sync full account deletion across all workspaces (enterprise only).
Fetches all workspace memberships for the account and queues a sync task for each.
Handles enterprise edition check internally. Safe to call in community edition (no-op).
Args:
account_id: The account ID being deleted
source: Source of the sync request (e.g., "account_deleted")
Returns:
bool: True if all tasks were queued (or skipped in community), False if any queueing failed
"""
if not dify_config.ENTERPRISE_ENABLED:
return True
# Fetch all workspaces the account belongs to
workspace_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).all()
# Queue sync task for each workspace
success = True
for join in workspace_joins:
if not _queue_task(workspace_id=join.tenant_id, member_id=account_id, source=source):
success = False
return success

View File

@ -8,6 +8,7 @@ from sqlalchemy import delete, select
from core.db.session_factory import session_factory
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
@ -26,7 +27,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Start update document: {document_id}", fg="green"))
start_at = time.perf_counter()
with session_factory.create_session() as session, session.begin():
with session_factory.create_session() as session:
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
if not document:
@ -35,6 +36,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
session.commit()
# delete all document segment and index
try:
@ -54,7 +56,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
db.session.commit()
end_at = time.perf_counter()
logger.info(
click.style(

View File

@ -6,8 +6,8 @@ import typing
import click
from celery import shared_task
from core.helper.marketplace import record_install_plugin_event
from core.plugin.entities.marketplace import MarketplacePluginSnapshot
from core.helper import marketplace
from core.helper.marketplace import MarketplacePluginDeclaration
from core.plugin.entities.plugin import PluginInstallationSource
from core.plugin.impl.plugin import PluginInstaller
from extensions.ext_redis import redis_client
@ -16,7 +16,7 @@ from models.account import TenantPluginAutoUpgradeStrategy
logger = logging.getLogger(__name__)
RETRY_TIMES_OF_ONE_PLUGIN_IN_ONE_TENANT = 3
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_snapshot:"
CACHE_REDIS_KEY_PREFIX = "plugin_autoupgrade_check_task:cached_plugin_manifests:"
CACHE_REDIS_TTL = 60 * 60 # 1 hour
@ -25,11 +25,11 @@ def _get_redis_cache_key(plugin_id: str) -> str:
return f"{CACHE_REDIS_KEY_PREFIX}{plugin_id}"
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapshot, None, bool]:
def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginDeclaration, None, bool]:
"""
Get cached plugin manifest from Redis.
Returns:
- MarketplacePluginSnapshot: if found in cache
- MarketplacePluginDeclaration: if found in cache
- None: if cached as not found (marketplace returned no result)
- False: if not in cache at all
"""
@ -43,31 +43,76 @@ def _get_cached_manifest(plugin_id: str) -> typing.Union[MarketplacePluginSnapsh
if cached_json is None:
return None
return MarketplacePluginSnapshot.model_validate(cached_json)
return MarketplacePluginDeclaration.model_validate(cached_json)
except Exception:
logger.exception("Failed to get cached manifest for plugin %s", plugin_id)
return False
def _set_cached_manifest(plugin_id: str, manifest: typing.Union[MarketplacePluginDeclaration, None]) -> None:
"""
Cache plugin manifest in Redis.
Args:
plugin_id: The plugin ID
manifest: The manifest to cache, or None if not found in marketplace
"""
try:
key = _get_redis_cache_key(plugin_id)
if manifest is None:
# Cache the fact that this plugin was not found
redis_client.setex(key, CACHE_REDIS_TTL, json.dumps(None))
else:
# Cache the manifest data
redis_client.setex(key, CACHE_REDIS_TTL, manifest.model_dump_json())
except Exception:
# If Redis fails, continue without caching
# traceback.print_exc()
logger.exception("Failed to set cached manifest for plugin %s", plugin_id)
def marketplace_batch_fetch_plugin_manifests(
plugin_ids_plain_list: list[str],
) -> list[MarketplacePluginSnapshot]:
"""
Fetch plugin manifests from Redis cache only.
This function assumes fetch_global_plugin_manifest() has been called
to pre-populate the cache with all marketplace plugins.
"""
result: list[MarketplacePluginSnapshot] = []
) -> list[MarketplacePluginDeclaration]:
"""Fetch plugin manifests with Redis caching support."""
cached_manifests: dict[str, typing.Union[MarketplacePluginDeclaration, None]] = {}
not_cached_plugin_ids: list[str] = []
# Check Redis cache for each plugin
for plugin_id in plugin_ids_plain_list:
cached_result = _get_cached_manifest(plugin_id)
if not isinstance(cached_result, MarketplacePluginSnapshot):
# cached_result is False (not in cache) or None (cached as not found)
logger.warning("plugin %s not found in cache, skipping", plugin_id)
continue
if cached_result is False:
# Not in cache, need to fetch
not_cached_plugin_ids.append(plugin_id)
else:
# Either found manifest or cached as None (not found in marketplace)
# At this point, cached_result is either MarketplacePluginDeclaration or None
if isinstance(cached_result, bool):
# This should never happen due to the if condition above, but for type safety
continue
cached_manifests[plugin_id] = cached_result
result.append(cached_result)
# Fetch uncached plugins from marketplace
if not_cached_plugin_ids:
manifests = marketplace.batch_fetch_plugin_manifests_ignore_deserialization_error(not_cached_plugin_ids)
# Cache the fetched manifests
for manifest in manifests:
cached_manifests[manifest.plugin_id] = manifest
_set_cached_manifest(manifest.plugin_id, manifest)
# Cache plugins that were not found in marketplace
fetched_plugin_ids = {manifest.plugin_id for manifest in manifests}
for plugin_id in not_cached_plugin_ids:
if plugin_id not in fetched_plugin_ids:
cached_manifests[plugin_id] = None
_set_cached_manifest(plugin_id, None)
# Build result list from cached manifests
result: list[MarketplacePluginDeclaration] = []
for plugin_id in plugin_ids_plain_list:
cached_manifest: typing.Union[MarketplacePluginDeclaration, None] = cached_manifests.get(plugin_id)
if cached_manifest is not None:
result.append(cached_manifest)
return result
@ -166,7 +211,7 @@ def process_tenant_plugin_autoupgrade_check_task(
# execute upgrade
new_unique_identifier = manifest.latest_package_identifier
record_install_plugin_event(new_unique_identifier)
marketplace.record_install_plugin_event(new_unique_identifier)
click.echo(
click.style(
f"Upgrade plugin: {original_unique_identifier} -> {new_unique_identifier}",

View File

@ -1016,7 +1016,7 @@ class TestAccountService:
def test_delete_account(self, db_session_with_containers, mock_external_service_dependencies):
"""
Test account deletion (should add task to queue and sync to enterprise).
Test account deletion (should add task to queue).
"""
fake = Faker()
email = fake.email()
@ -1034,18 +1034,10 @@ class TestAccountService:
password=password,
)
with (
patch("services.account_service.delete_account_task") as mock_delete_task,
patch("services.enterprise.account_deletion_sync.sync_account_deletion") as mock_sync,
):
mock_sync.return_value = True
with patch("services.account_service.delete_account_task") as mock_delete_task:
# Delete account
AccountService.delete_account(account)
# Verify sync was called
mock_sync.assert_called_once_with(account_id=account.id, source="account_deleted")
# Verify task was added to queue
mock_delete_task.delay.assert_called_once_with(account.id)
@ -1724,7 +1716,7 @@ class TestTenantService:
def test_remove_member_from_tenant_success(self, db_session_with_containers, mock_external_service_dependencies):
"""
Test successful member removal from tenant (should sync to enterprise).
Test successful member removal from tenant.
"""
fake = Faker()
tenant_name = fake.company()
@ -1759,15 +1751,7 @@ class TestTenantService:
TenantService.create_tenant_member(tenant, member_account, role="normal")
# Remove member
with patch("services.enterprise.account_deletion_sync.sync_workspace_member_removal") as mock_sync:
mock_sync.return_value = True
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
# Verify sync was called
mock_sync.assert_called_once_with(
workspace_id=tenant.id, member_id=member_account.id, source="workspace_member_removed"
)
TenantService.remove_member_from_tenant(tenant, member_account, owner_account)
# Verify member was removed
from extensions.ext_database import db

View File

@ -1,182 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from faker import Faker
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.dataset import Dataset, Document, DocumentSegment
from tasks.document_indexing_update_task import document_indexing_update_task
class TestDocumentIndexingUpdateTask:
@pytest.fixture
def mock_external_dependencies(self):
"""Patch external collaborators used by the update task.
- IndexProcessorFactory.init_index_processor().clean(...)
- IndexingRunner.run([...])
"""
with (
patch("tasks.document_indexing_update_task.IndexProcessorFactory") as mock_factory,
patch("tasks.document_indexing_update_task.IndexingRunner") as mock_runner,
):
processor_instance = MagicMock()
mock_factory.return_value.init_index_processor.return_value = processor_instance
runner_instance = MagicMock()
mock_runner.return_value = runner_instance
yield {
"factory": mock_factory,
"processor": processor_instance,
"runner": mock_runner,
"runner_instance": runner_instance,
}
def _create_dataset_document_with_segments(self, db_session_with_containers, *, segment_count: int = 2):
fake = Faker()
# Account and tenant
account = Account(
email=fake.email(),
name=fake.name(),
interface_language="en-US",
status="active",
)
db_session_with_containers.add(account)
db_session_with_containers.commit()
tenant = Tenant(name=fake.company(), status="normal")
db_session_with_containers.add(tenant)
db_session_with_containers.commit()
join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=TenantAccountRole.OWNER,
current=True,
)
db_session_with_containers.add(join)
db_session_with_containers.commit()
# Dataset and document
dataset = Dataset(
tenant_id=tenant.id,
name=fake.company(),
description=fake.text(max_nb_chars=64),
data_source_type="upload_file",
indexing_technique="high_quality",
created_by=account.id,
)
db_session_with_containers.add(dataset)
db_session_with_containers.commit()
document = Document(
tenant_id=tenant.id,
dataset_id=dataset.id,
position=0,
data_source_type="upload_file",
batch="test_batch",
name=fake.file_name(),
created_from="upload_file",
created_by=account.id,
indexing_status="waiting",
enabled=True,
doc_form="text_model",
)
db_session_with_containers.add(document)
db_session_with_containers.commit()
# Segments
node_ids = []
for i in range(segment_count):
node_id = f"node-{i + 1}"
seg = DocumentSegment(
tenant_id=tenant.id,
dataset_id=dataset.id,
document_id=document.id,
position=i,
content=fake.text(max_nb_chars=32),
answer=None,
word_count=10,
tokens=5,
index_node_id=node_id,
status="completed",
created_by=account.id,
)
db_session_with_containers.add(seg)
node_ids.append(node_id)
db_session_with_containers.commit()
# Refresh to ensure ORM state
db_session_with_containers.refresh(dataset)
db_session_with_containers.refresh(document)
return dataset, document, node_ids
def test_cleans_segments_and_reindexes(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Act
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Assert document status updated before reindex
updated = db_session_with_containers.query(Document).where(Document.id == document.id).first()
assert updated.indexing_status == "parsing"
assert updated.processing_started_at is not None
# Segments should be deleted
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining == 0
# Assert index processor clean was called with expected args
clean_call = mock_external_dependencies["processor"].clean.call_args
assert clean_call is not None
args, kwargs = clean_call
# args[0] is a Dataset instance (from another session) — validate by id
assert getattr(args[0], "id", None) == dataset.id
# args[1] should contain our node_ids
assert set(args[1]) == set(node_ids)
assert kwargs.get("with_keywords") is True
assert kwargs.get("delete_child_chunks") is True
# Assert indexing runner invoked with the updated document
run_call = mock_external_dependencies["runner_instance"].run.call_args
assert run_call is not None
run_docs = run_call[0][0]
assert len(run_docs) == 1
first = run_docs[0]
assert getattr(first, "id", None) == document.id
def test_clean_error_is_logged_and_indexing_continues(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Force clean to raise; task should continue to indexing
mock_external_dependencies["processor"].clean.side_effect = Exception("boom")
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Indexing should still be triggered
mock_external_dependencies["runner_instance"].run.assert_called_once()
# Segments should remain (since clean failed before DB delete)
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining > 0
def test_document_not_found_noop(self, db_session_with_containers, mock_external_dependencies):
fake = Faker()
# Act with non-existent document id
document_indexing_update_task(dataset_id=fake.uuid4(), document_id=fake.uuid4())
# Neither processor nor runner should be called
mock_external_dependencies["processor"].clean.assert_not_called()
mock_external_dependencies["runner_instance"].run.assert_not_called()

View File

@ -1,197 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from core.tools.entities.tool_entities import ToolProviderType
from core.workflow.nodes.agent.agent_node import AgentNode
class TestInferToolProviderType:
"""Test cases for AgentNode._infer_tool_provider_type method."""
def test_infer_type_from_config_workflow(self):
"""Test inferring workflow provider type from config."""
tool_config = {
"type": "workflow",
"provider_name": "workflow-provider-id",
}
tenant_id = "test-tenant"
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
assert result == ToolProviderType.WORKFLOW
def test_infer_type_from_config_builtin(self):
"""Test inferring builtin provider type from config."""
tool_config = {
"type": "builtin",
"provider_name": "builtin-provider-id",
}
tenant_id = "test-tenant"
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
assert result == ToolProviderType.BUILT_IN
def test_infer_type_from_config_api(self):
"""Test inferring API provider type from config."""
tool_config = {
"type": "api",
"provider_name": "api-provider-id",
}
tenant_id = "test-tenant"
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
assert result == ToolProviderType.API
def test_infer_type_from_config_mcp(self):
"""Test inferring MCP provider type from config."""
tool_config = {
"type": "mcp",
"provider_name": "mcp-provider-id",
}
tenant_id = "test-tenant"
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
assert result == ToolProviderType.MCP
def test_infer_type_invalid_config_value_raises_error(self):
"""Test that invalid type value in config raises ValueError."""
tool_config = {
"type": "invalid-type",
"provider_name": "workflow-provider-id",
}
tenant_id = "test-tenant"
with pytest.raises(ValueError):
AgentNode._infer_tool_provider_type(tool_config, tenant_id)
def test_infer_workflow_type_from_database(self):
"""Test inferring workflow provider type from database."""
tool_config = {
"provider_name": "workflow-provider-id",
}
tenant_id = "test-tenant"
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
mock_session = MagicMock()
mock_create_session.return_value.__enter__.return_value = mock_session
# First query (WorkflowToolProvider) returns a result
mock_session.scalar.return_value = True
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
assert result == ToolProviderType.WORKFLOW
# Should only query once (after finding WorkflowToolProvider)
assert mock_session.scalar.call_count == 1
def test_infer_mcp_type_from_database(self):
"""Test inferring MCP provider type from database."""
tool_config = {
"provider_name": "mcp-provider-id",
}
tenant_id = "test-tenant"
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
mock_session = MagicMock()
mock_create_session.return_value.__enter__.return_value = mock_session
# First query (WorkflowToolProvider) returns None
# Second query (MCPToolProvider) returns a result
mock_session.scalar.side_effect = [None, True]
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
assert result == ToolProviderType.MCP
assert mock_session.scalar.call_count == 2
def test_infer_api_type_from_database(self):
"""Test inferring API provider type from database."""
tool_config = {
"provider_name": "api-provider-id",
}
tenant_id = "test-tenant"
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
mock_session = MagicMock()
mock_create_session.return_value.__enter__.return_value = mock_session
# First query (WorkflowToolProvider) returns None
# Second query (MCPToolProvider) returns None
# Third query (ApiToolProvider) returns a result
mock_session.scalar.side_effect = [None, None, True]
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
assert result == ToolProviderType.API
assert mock_session.scalar.call_count == 3
def test_infer_builtin_type_from_database(self):
"""Test inferring builtin provider type from database."""
tool_config = {
"provider_name": "builtin-provider-id",
}
tenant_id = "test-tenant"
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
mock_session = MagicMock()
mock_create_session.return_value.__enter__.return_value = mock_session
# First three queries return None
# Fourth query (BuiltinToolProvider) returns a result
mock_session.scalar.side_effect = [None, None, None, True]
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
assert result == ToolProviderType.BUILT_IN
assert mock_session.scalar.call_count == 4
def test_infer_type_default_when_not_found(self):
"""Test raising AgentNodeError when provider is not found in database."""
tool_config = {
"provider_name": "unknown-provider-id",
}
tenant_id = "test-tenant"
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
mock_session = MagicMock()
mock_create_session.return_value.__enter__.return_value = mock_session
# All queries return None
mock_session.scalar.return_value = None
# Current implementation raises AgentNodeError when provider not found
from core.workflow.nodes.agent.exc import AgentNodeError
with pytest.raises(AgentNodeError, match="Tool provider with ID 'unknown-provider-id' not found"):
AgentNode._infer_tool_provider_type(tool_config, tenant_id)
def test_infer_type_default_when_no_provider_name(self):
"""Test defaulting to BUILT_IN when provider_name is missing."""
tool_config = {}
tenant_id = "test-tenant"
result = AgentNode._infer_tool_provider_type(tool_config, tenant_id)
assert result == ToolProviderType.BUILT_IN
def test_infer_type_database_exception_propagates(self):
"""Test that database exception propagates (current implementation doesn't catch it)."""
tool_config = {
"provider_name": "provider-id",
}
tenant_id = "test-tenant"
with patch("core.db.session_factory.session_factory.create_session") as mock_create_session:
mock_session = MagicMock()
mock_create_session.return_value.__enter__.return_value = mock_session
# Database query raises exception
mock_session.scalar.side_effect = Exception("Database error")
# Current implementation doesn't catch exceptions, so it propagates
with pytest.raises(Exception, match="Database error"):
AgentNode._infer_tool_provider_type(tool_config, tenant_id)

View File

@ -217,6 +217,7 @@ class TestTemplateTransformNode:
@patch(
"core.workflow.nodes.template_transform.template_transform_node.CodeExecutorJinja2TemplateRenderer.render_template"
)
@patch("core.workflow.nodes.template_transform.template_transform_node.MAX_TEMPLATE_TRANSFORM_OUTPUT_LENGTH", 10)
def test_run_output_length_exceeds_limit(
self, mock_execute, basic_node_data, mock_graph, mock_graph_runtime_state, graph_init_params
):
@ -230,7 +231,6 @@ class TestTemplateTransformNode:
graph_init_params=graph_init_params,
graph=mock_graph,
graph_runtime_state=mock_graph_runtime_state,
max_output_length=10,
)
result = node._run()

View File

@ -1,276 +0,0 @@
"""Unit tests for account deletion synchronization.
This test module verifies the enterprise account deletion sync functionality,
including Redis queuing, error handling, and community vs enterprise behavior.
"""
from unittest.mock import MagicMock, patch
import pytest
from redis import RedisError
from services.enterprise.account_deletion_sync import (
_queue_task,
sync_account_deletion,
sync_workspace_member_removal,
)
class TestQueueTask:
"""Unit tests for the _queue_task helper function."""
@pytest.fixture
def mock_redis_client(self):
"""Mock redis_client for testing."""
with patch("services.enterprise.account_deletion_sync.redis_client") as mock_redis:
yield mock_redis
@pytest.fixture
def mock_uuid(self):
"""Mock UUID generation for predictable task IDs."""
with patch("services.enterprise.account_deletion_sync.uuid.uuid4") as mock_uuid_gen:
mock_uuid_gen.return_value = MagicMock(hex="test-task-id-1234")
yield mock_uuid_gen
def test_queue_task_success(self, mock_redis_client, mock_uuid):
"""Test successful task queueing to Redis."""
# Arrange
workspace_id = "ws-123"
member_id = "member-456"
source = "test_source"
# Act
result = _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
# Assert
assert result is True
mock_redis_client.lpush.assert_called_once()
# Verify the task payload structure
call_args = mock_redis_client.lpush.call_args[0]
assert call_args[0] == "enterprise:member:sync:queue"
import json
task_data = json.loads(call_args[1])
assert task_data["workspace_id"] == workspace_id
assert task_data["member_id"] == member_id
assert task_data["source"] == source
assert task_data["type"] == "sync_member_deletion_from_workspace"
assert task_data["retry_count"] == 0
assert "task_id" in task_data
assert "created_at" in task_data
def test_queue_task_redis_error(self, mock_redis_client, caplog):
"""Test handling of Redis connection errors."""
# Arrange
mock_redis_client.lpush.side_effect = RedisError("Connection failed")
# Act
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
# Assert
assert result is False
assert "Failed to queue account deletion sync" in caplog.text
def test_queue_task_type_error(self, mock_redis_client, caplog):
"""Test handling of JSON serialization errors."""
# Arrange
mock_redis_client.lpush.side_effect = TypeError("Cannot serialize")
# Act
result = _queue_task(workspace_id="ws-123", member_id="member-456", source="test_source")
# Assert
assert result is False
assert "Failed to queue account deletion sync" in caplog.text
class TestSyncWorkspaceMemberRemoval:
"""Unit tests for sync_workspace_member_removal function."""
@pytest.fixture
def mock_queue_task(self):
"""Mock _queue_task for testing."""
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
mock_queue.return_value = True
yield mock_queue
def test_sync_workspace_member_removal_enterprise_enabled(self, mock_queue_task):
"""Test sync when ENTERPRISE_ENABLED is True."""
# Arrange
workspace_id = "ws-123"
member_id = "member-456"
source = "workspace_member_removed"
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_workspace_member_removal(workspace_id=workspace_id, member_id=member_id, source=source)
# Assert
assert result is True
mock_queue_task.assert_called_once_with(workspace_id=workspace_id, member_id=member_id, source=source)
def test_sync_workspace_member_removal_enterprise_disabled(self, mock_queue_task):
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
# Arrange
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = False
# Act
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
# Assert
assert result is True
mock_queue_task.assert_not_called()
def test_sync_workspace_member_removal_queue_failure(self, mock_queue_task):
"""Test handling of queue task failures."""
# Arrange
mock_queue_task.return_value = False
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_workspace_member_removal(workspace_id="ws-123", member_id="member-456", source="test_source")
# Assert
assert result is False
class TestSyncAccountDeletion:
"""Unit tests for sync_account_deletion function."""
@pytest.fixture
def mock_db_session(self):
"""Mock database session for testing."""
with patch("services.enterprise.account_deletion_sync.db.session") as mock_session:
yield mock_session
@pytest.fixture
def mock_queue_task(self):
"""Mock _queue_task for testing."""
with patch("services.enterprise.account_deletion_sync._queue_task") as mock_queue:
mock_queue.return_value = True
yield mock_queue
def test_sync_account_deletion_enterprise_disabled(self, mock_db_session, mock_queue_task):
"""Test sync when ENTERPRISE_ENABLED is False (community edition)."""
# Arrange
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = False
# Act
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
# Assert
assert result is True
mock_db_session.query.assert_not_called()
mock_queue_task.assert_not_called()
def test_sync_account_deletion_multiple_workspaces(self, mock_db_session, mock_queue_task):
"""Test sync for account with multiple workspace memberships."""
# Arrange
account_id = "acc-123"
# Mock workspace joins
mock_join1 = MagicMock()
mock_join1.tenant_id = "tenant-1"
mock_join2 = MagicMock()
mock_join2.tenant_id = "tenant-2"
mock_join3 = MagicMock()
mock_join3.tenant_id = "tenant-3"
mock_query = MagicMock()
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
mock_db_session.query.return_value = mock_query
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_account_deletion(account_id=account_id, source="account_deleted")
# Assert
assert result is True
assert mock_queue_task.call_count == 3
# Verify each workspace was queued
mock_queue_task.assert_any_call(workspace_id="tenant-1", member_id=account_id, source="account_deleted")
mock_queue_task.assert_any_call(workspace_id="tenant-2", member_id=account_id, source="account_deleted")
mock_queue_task.assert_any_call(workspace_id="tenant-3", member_id=account_id, source="account_deleted")
def test_sync_account_deletion_no_workspaces(self, mock_db_session, mock_queue_task):
"""Test sync for account with no workspace memberships."""
# Arrange
mock_query = MagicMock()
mock_query.filter_by.return_value.all.return_value = []
mock_db_session.query.return_value = mock_query
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
# Assert
assert result is True
mock_queue_task.assert_not_called()
def test_sync_account_deletion_partial_failure(self, mock_db_session, mock_queue_task):
"""Test sync when some tasks fail to queue."""
# Arrange
account_id = "acc-123"
# Mock workspace joins
mock_join1 = MagicMock()
mock_join1.tenant_id = "tenant-1"
mock_join2 = MagicMock()
mock_join2.tenant_id = "tenant-2"
mock_join3 = MagicMock()
mock_join3.tenant_id = "tenant-3"
mock_query = MagicMock()
mock_query.filter_by.return_value.all.return_value = [mock_join1, mock_join2, mock_join3]
mock_db_session.query.return_value = mock_query
# Mock queue_task to fail for second workspace
def queue_side_effect(workspace_id, member_id, source):
return workspace_id != "tenant-2"
mock_queue_task.side_effect = queue_side_effect
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_account_deletion(account_id=account_id, source="account_deleted")
# Assert
assert result is False # Should return False if any task fails
assert mock_queue_task.call_count == 3
def test_sync_account_deletion_all_failures(self, mock_db_session, mock_queue_task):
"""Test sync when all tasks fail to queue."""
# Arrange
mock_join = MagicMock()
mock_join.tenant_id = "tenant-1"
mock_query = MagicMock()
mock_query.filter_by.return_value.all.return_value = [mock_join]
mock_db_session.query.return_value = mock_query
mock_queue_task.return_value = False
with patch("services.enterprise.account_deletion_sync.dify_config") as mock_config:
mock_config.ENTERPRISE_ENABLED = True
# Act
result = sync_account_deletion(account_id="acc-123", source="account_deleted")
# Assert
assert result is False
mock_queue_task.assert_called_once()

View File

@ -1,3 +1,4 @@
/* eslint-disable tailwindcss/classnames-order */
import type { Meta, StoryObj } from '@storybook/nextjs-vite'
import Effect from '.'
@ -28,8 +29,8 @@ type Story = StoryObj<typeof meta>
export const Playground: Story = {
render: () => (
<div className="relative h-40 w-72 overflow-hidden rounded-2xl border border-divider-subtle bg-background-default-subtle">
<Effect className="left-8 top-6" />
<Effect className="bg-util-colors-purple-brand-purple-brand-500 right-10 top-14" />
<Effect className="top-6 left-8" />
<Effect className="top-14 right-10 bg-util-colors-purple-brand-purple-brand-500" />
<div className="absolute inset-x-0 bottom-4 flex justify-center text-xs text-text-secondary">
Accent glow
</div>

View File

@ -4,7 +4,7 @@ import type { FC } from 'react'
import { RiQuestionLine } from '@remixicon/react'
import { useBoolean } from 'ahooks'
import * as React from 'react'
import { useCallback, useEffect, useRef, useState } from 'react'
import { useEffect, useRef, useState } from 'react'
import { PortalToFollowElem, PortalToFollowElemContent, PortalToFollowElemTrigger } from '@/app/components/base/portal-to-follow-elem'
import { cn } from '@/utils/classnames'
import { tooltipManager } from './TooltipManager'
@ -61,20 +61,6 @@ const Tooltip: FC<TooltipProps> = ({
isHoverTriggerRef.current = isHoverTrigger
}, [isHoverTrigger])
const closeTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null)
const clearCloseTimeout = useCallback(() => {
if (closeTimeoutRef.current) {
clearTimeout(closeTimeoutRef.current)
closeTimeoutRef.current = null
}
}, [])
useEffect(() => {
return () => {
clearCloseTimeout()
}
}, [clearCloseTimeout])
const close = () => setOpen(false)
const handleLeave = (isTrigger: boolean) => {
@ -85,9 +71,7 @@ const Tooltip: FC<TooltipProps> = ({
// give time to move to the popup
if (needsDelay) {
clearCloseTimeout()
closeTimeoutRef.current = setTimeout(() => {
closeTimeoutRef.current = null
setTimeout(() => {
if (!isHoverPopupRef.current && !isHoverTriggerRef.current) {
setOpen(false)
tooltipManager.clear(close)
@ -95,7 +79,6 @@ const Tooltip: FC<TooltipProps> = ({
}, 300)
}
else {
clearCloseTimeout()
setOpen(false)
tooltipManager.clear(close)
}
@ -112,7 +95,6 @@ const Tooltip: FC<TooltipProps> = ({
onClick={() => triggerMethod === 'click' && setOpen(v => !v)}
onMouseEnter={() => {
if (triggerMethod === 'hover') {
clearCloseTimeout()
setHoverTrigger()
tooltipManager.register(close)
setOpen(true)
@ -133,12 +115,7 @@ const Tooltip: FC<TooltipProps> = ({
!noDecoration && 'system-xs-regular relative max-w-[300px] break-words rounded-md bg-components-panel-bg px-3 py-2 text-left text-text-tertiary shadow-lg',
popupClassName,
)}
onMouseEnter={() => {
if (triggerMethod === 'hover') {
clearCloseTimeout()
setHoverPopup()
}
}}
onMouseEnter={() => triggerMethod === 'hover' && setHoverPopup()}
onMouseLeave={() => triggerMethod === 'hover' && handleLeave(false)}
>
{popupContent}

View File

@ -14,6 +14,7 @@ const ErrorMessage = ({
errorMsg,
}: ErrorMessageProps) => {
return (
// eslint-disable-next-line tailwindcss/migration-from-tailwind-2
<div className={cn(
'flex gap-x-0.5 rounded-xl border-[0.5px] border-components-panel-border bg-opacity-40 bg-toast-error-bg p-2 shadow-xs shadow-shadow-shadow-3',
className,

View File

@ -1,129 +0,0 @@
'use client'
import type { FC } from 'react'
import type { DocType } from '@/models/datasets'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import Radio from '@/app/components/base/radio'
import Tooltip from '@/app/components/base/tooltip'
import { useMetadataMap } from '@/hooks/use-metadata'
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
import { cn } from '@/utils/classnames'
import s from '../style.module.css'
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
return <div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
}
const IconButton: FC<{ type: DocType, isChecked: boolean }> = ({ type, isChecked = false }) => {
const metadataMap = useMetadataMap()
return (
<Tooltip popupContent={metadataMap[type].text}>
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
<TypeIcon
iconName={metadataMap[type].iconName || ''}
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
/>
</button>
</Tooltip>
)
}
type DocTypeSelectorProps = {
docType: DocType | ''
documentType?: DocType | ''
tempDocType: DocType | ''
onTempDocTypeChange: (type: DocType | '') => void
onConfirm: () => void
onCancel: () => void
}
const DocTypeSelector: FC<DocTypeSelectorProps> = ({
docType,
documentType,
tempDocType,
onTempDocTypeChange,
onConfirm,
onCancel,
}) => {
const { t } = useTranslation()
const isFirstTime = !docType && !documentType
const currValue = tempDocType ?? documentType
return (
<>
{isFirstTime && (
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
)}
<div className={s.operationWrapper}>
{isFirstTime && (
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
)}
{documentType && (
<>
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
</>
)}
<Radio.Group value={currValue ?? ''} onChange={onTempDocTypeChange} className={s.radioGroup}>
{CUSTOMIZABLE_DOC_TYPES.map(type => (
<Radio key={type} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
<IconButton type={type} isChecked={currValue === type} />
</Radio>
))}
</Radio.Group>
{isFirstTime && (
<Button variant="primary" onClick={onConfirm} disabled={!tempDocType}>
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
</Button>
)}
{documentType && (
<div className={s.opBtnWrapper}>
<Button onClick={onConfirm} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">
{t('operation.save', { ns: 'common' })}
</Button>
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>
{t('operation.cancel', { ns: 'common' })}
</Button>
</div>
)}
</div>
</>
)
}
type DocumentTypeDisplayProps = {
displayType: DocType | ''
showChangeLink?: boolean
onChangeClick?: () => void
}
export const DocumentTypeDisplay: FC<DocumentTypeDisplayProps> = ({
displayType,
showChangeLink = false,
onChangeClick,
}) => {
const { t } = useTranslation()
const metadataMap = useMetadataMap()
const effectiveType = displayType || 'book'
return (
<div className={s.documentTypeShow}>
{(displayType || !showChangeLink) && (
<>
<TypeIcon iconName={metadataMap[effectiveType]?.iconName || ''} className={s.iconShow} />
{metadataMap[effectiveType].text}
{showChangeLink && (
<div className="ml-1 inline-flex items-center gap-1">
·
<div onClick={onChangeClick} className="cursor-pointer hover:text-text-accent">
{t('operation.change', { ns: 'common' })}
</div>
</div>
)}
</>
)}
</div>
)
}
export default DocTypeSelector

View File

@ -1,89 +0,0 @@
'use client'
import type { FC, ReactNode } from 'react'
import type { inputType } from '@/hooks/use-metadata'
import { useTranslation } from 'react-i18next'
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
import Input from '@/app/components/base/input'
import { SimpleSelect } from '@/app/components/base/select'
import { getTextWidthWithCanvas } from '@/utils'
import { cn } from '@/utils/classnames'
import s from '../style.module.css'
type FieldInfoProps = {
label: string
value?: string
valueIcon?: ReactNode
displayedValue?: string
defaultValue?: string
showEdit?: boolean
inputType?: inputType
selectOptions?: Array<{ value: string, name: string }>
onUpdate?: (v: string) => void
}
const FieldInfo: FC<FieldInfoProps> = ({
label,
value = '',
valueIcon,
displayedValue = '',
defaultValue,
showEdit = false,
inputType = 'input',
selectOptions = [],
onUpdate,
}) => {
const { t } = useTranslation()
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
const editAlignTop = showEdit && inputType === 'textarea'
const readAlignTop = !showEdit && textNeedWrap
const renderContent = () => {
if (!showEdit)
return displayedValue
if (inputType === 'select') {
return (
<SimpleSelect
onSelect={({ value }) => onUpdate?.(value as string)}
items={selectOptions}
defaultValue={value}
className={s.select}
wrapperClassName={s.selectWrapper}
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
if (inputType === 'textarea') {
return (
<AutoHeightTextarea
onChange={e => onUpdate?.(e.target.value)}
value={value}
className={s.textArea}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<Input
onChange={e => onUpdate?.(e.target.value)}
value={value}
defaultValue={defaultValue}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
<div className="flex grow items-center gap-1 text-text-secondary">
{valueIcon}
{renderContent()}
</div>
</div>
)
}
export default FieldInfo

View File

@ -1,88 +0,0 @@
'use client'
import type { FC } from 'react'
import type { metadataType } from '@/hooks/use-metadata'
import type { FullDocumentDetail } from '@/models/datasets'
import { get } from 'es-toolkit/compat'
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
import FieldInfo from './field-info'
const map2Options = (map: Record<string, string>) => {
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
}
function useCategoryMapResolver(mainField: metadataType | '') {
const languageMap = useLanguages()
const bookCategoryMap = useBookCategories()
const personalDocCategoryMap = usePersonalDocCategories()
const businessDocCategoryMap = useBusinessDocCategories()
return (field: string): Record<string, string> => {
if (field === 'language')
return languageMap
if (field === 'category' && mainField === 'book')
return bookCategoryMap
if (field === 'document_type') {
if (mainField === 'personal_document')
return personalDocCategoryMap
if (mainField === 'business_document')
return businessDocCategoryMap
}
return {}
}
}
type MetadataFieldListProps = {
mainField: metadataType | ''
canEdit?: boolean
metadata?: Record<string, string>
docDetail?: FullDocumentDetail
onFieldUpdate?: (field: string, value: string) => void
}
const MetadataFieldList: FC<MetadataFieldListProps> = ({
mainField,
canEdit = false,
metadata,
docDetail,
onFieldUpdate,
}) => {
const metadataMap = useMetadataMap()
const getCategoryMap = useCategoryMapResolver(mainField)
if (!mainField)
return null
const fieldMap = metadataMap[mainField]?.subFieldsMap
const isFixedField = ['originInfo', 'technicalParameters'].includes(mainField)
const sourceData = isFixedField ? docDetail : metadata
const getDisplayValue = (field: string) => {
const val = get(sourceData, field, '')
if (!val && val !== 0)
return '-'
if (fieldMap[field]?.inputType === 'select')
return getCategoryMap(field)[val]
if (fieldMap[field]?.render)
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
return val
}
return (
<div className="flex flex-col gap-1">
{Object.keys(fieldMap).map(field => (
<FieldInfo
key={fieldMap[field]?.label}
label={fieldMap[field]?.label}
displayedValue={getDisplayValue(field)}
value={get(sourceData, field, '')}
inputType={fieldMap[field]?.inputType || 'input'}
showEdit={canEdit}
onUpdate={val => onFieldUpdate?.(field, val)}
selectOptions={map2Options(getCategoryMap(field))}
/>
))}
</div>
)
}
export default MetadataFieldList

View File

@ -1,137 +0,0 @@
'use client'
import type { CommonResponse } from '@/models/common'
import type { DocType, FullDocumentDetail } from '@/models/datasets'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { ToastContext } from '@/app/components/base/toast'
import { modifyDocMetadata } from '@/service/datasets'
import { asyncRunSafe } from '@/utils'
import { useDocumentContext } from '../../context'
type MetadataState = {
documentType?: DocType | ''
metadata: Record<string, string>
}
/**
* Normalize raw doc_type: treat 'others' as empty string.
*/
const normalizeDocType = (rawDocType: string): DocType | '' => {
return rawDocType === 'others' ? '' : rawDocType as DocType | ''
}
type UseMetadataStateOptions = {
docDetail?: FullDocumentDetail
onUpdate?: () => void
}
export function useMetadataState({ docDetail, onUpdate }: UseMetadataStateOptions) {
const { doc_metadata = {} } = docDetail || {}
const rawDocType = docDetail?.doc_type ?? ''
const docType = normalizeDocType(rawDocType)
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const datasetId = useDocumentContext(s => s.datasetId)
const documentId = useDocumentContext(s => s.documentId)
// If no documentType yet, start in editing + showDocTypes mode
const [editStatus, setEditStatus] = useState(!docType)
const [metadataParams, setMetadataParams] = useState<MetadataState>(
docType
? { documentType: docType, metadata: (doc_metadata || {}) as Record<string, string> }
: { metadata: {} },
)
const [showDocTypes, setShowDocTypes] = useState(!docType)
const [tempDocType, setTempDocType] = useState<DocType | ''>('')
const [saveLoading, setSaveLoading] = useState(false)
// Sync local state when the upstream docDetail changes (e.g. after save or navigation).
// These setters are intentionally called together to batch-reset multiple pieces
// of derived editing state that cannot be expressed as pure derived values.
useEffect(() => {
if (docDetail?.doc_type) {
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setEditStatus(false)
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setShowDocTypes(false)
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setTempDocType(docType)
// eslint-disable-next-line react-hooks-extra/no-direct-set-state-in-use-effect
setMetadataParams({
documentType: docType,
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
})
}
}, [docDetail?.doc_type, docDetail?.doc_metadata, docType])
const confirmDocType = () => {
if (!tempDocType)
return
setMetadataParams({
documentType: tempDocType,
// Clear metadata when switching to a different doc type
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {},
})
setEditStatus(true)
setShowDocTypes(false)
}
const cancelDocType = () => {
setTempDocType(metadataParams.documentType ?? '')
setEditStatus(true)
setShowDocTypes(false)
}
const enableEdit = () => {
setEditStatus(true)
}
const cancelEdit = () => {
setMetadataParams({ documentType: docType || '', metadata: { ...(docDetail?.doc_metadata || {}) } })
setEditStatus(!docType)
if (!docType)
setShowDocTypes(true)
}
const saveMetadata = async () => {
setSaveLoading(true)
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
datasetId,
documentId,
body: {
doc_type: metadataParams.documentType || docType || '',
doc_metadata: metadataParams.metadata,
},
}) as Promise<CommonResponse>)
if (!e)
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
else
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
onUpdate?.()
setEditStatus(false)
setSaveLoading(false)
}
const updateMetadataField = (field: string, value: string) => {
setMetadataParams(prev => ({ ...prev, metadata: { ...prev.metadata, [field]: value } }))
}
return {
docType,
editStatus,
showDocTypes,
tempDocType,
saveLoading,
metadataParams,
setTempDocType,
setShowDocTypes,
confirmDocType,
cancelDocType,
enableEdit,
cancelEdit,
saveMetadata,
updateMetadataField,
}
}

View File

@ -1,6 +1,7 @@
import type { FullDocumentDetail } from '@/models/datasets'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import Metadata, { FieldInfo } from './index'
// Mock document context
@ -120,6 +121,7 @@ vi.mock('@/hooks/use-metadata', () => ({
}),
}))
// Mock getTextWidthWithCanvas
vi.mock('@/utils', () => ({
asyncRunSafe: async (promise: Promise<unknown>) => {
try {
@ -133,32 +135,33 @@ vi.mock('@/utils', () => ({
getTextWidthWithCanvas: () => 100,
}))
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
id: 'doc-1',
name: 'Test Document',
doc_type: 'book',
doc_metadata: {
title: 'Test Book',
author: 'Test Author',
language: 'en',
},
data_source_type: 'upload_file',
segment_count: 10,
hit_count: 5,
...overrides,
} as FullDocumentDetail)
describe('Metadata', () => {
beforeEach(() => {
vi.clearAllMocks()
})
const createMockDocDetail = (overrides = {}): FullDocumentDetail => ({
id: 'doc-1',
name: 'Test Document',
doc_type: 'book',
doc_metadata: {
title: 'Test Book',
author: 'Test Author',
language: 'en',
},
data_source_type: 'upload_file',
segment_count: 10,
hit_count: 5,
...overrides,
} as FullDocumentDetail)
const defaultProps = {
docDetail: createMockDocDetail(),
loading: false,
onUpdate: vi.fn(),
}
// Rendering tests
describe('Rendering', () => {
it('should render without crashing', () => {
// Arrange & Act
@ -188,7 +191,7 @@ describe('Metadata', () => {
// Arrange & Act
render(<Metadata {...defaultProps} loading={true} />)
// Assert - Loading component should be rendered, title should not
// Assert - Loading component should be rendered
expect(screen.queryByText(/metadata\.title/i)).not.toBeInTheDocument()
})
@ -201,7 +204,7 @@ describe('Metadata', () => {
})
})
// Edit mode (tests useMetadataState hook integration)
// Edit mode tests
describe('Edit Mode', () => {
it('should enter edit mode when edit button is clicked', () => {
// Arrange
@ -300,7 +303,7 @@ describe('Metadata', () => {
})
})
// Document type selection (tests DocTypeSelector sub-component integration)
// Document type selection
describe('Document Type Selection', () => {
it('should show doc type selection when no doc_type exists', () => {
// Arrange
@ -350,13 +353,13 @@ describe('Metadata', () => {
})
})
// Fixed fields (tests MetadataFieldList sub-component integration)
// Origin info and technical parameters
describe('Fixed Fields', () => {
it('should render origin info fields', () => {
// Arrange & Act
render(<Metadata {...defaultProps} />)
// Assert
// Assert - Origin info fields should be displayed
expect(screen.getByText('Data Source Type')).toBeInTheDocument()
})
@ -379,7 +382,7 @@ describe('Metadata', () => {
// Act
const { container } = render(<Metadata {...defaultProps} docDetail={docDetail} />)
// Assert
// Assert - should render without crashing
expect(container.firstChild).toBeInTheDocument()
})
@ -387,7 +390,7 @@ describe('Metadata', () => {
// Arrange & Act
const { container } = render(<Metadata {...defaultProps} docDetail={undefined} loading={false} />)
// Assert
// Assert - should render without crashing
expect(container.firstChild).toBeInTheDocument()
})
@ -422,6 +425,7 @@ describe('Metadata', () => {
})
})
// FieldInfo component tests
describe('FieldInfo', () => {
beforeEach(() => {
vi.clearAllMocks()
@ -539,149 +543,3 @@ describe('FieldInfo', () => {
})
})
})
// --- useMetadataState hook coverage tests (via component interactions) ---
describe('useMetadataState coverage', () => {
beforeEach(() => {
vi.clearAllMocks()
})
const defaultProps = {
docDetail: createMockDocDetail(),
loading: false,
onUpdate: vi.fn(),
}
describe('cancelDocType', () => {
it('should cancel doc type change and return to edit mode', () => {
// Arrange
render(<Metadata {...defaultProps} />)
// Enter edit mode → click change to open doc type selector
fireEvent.click(screen.getByText(/operation\.edit/i))
fireEvent.click(screen.getByText(/operation\.change/i))
// Now in doc type selector mode — should show cancel button
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
// Act — cancel the doc type change
fireEvent.click(screen.getByText(/operation\.cancel/i))
// Assert — should be back to edit mode (cancel + save buttons visible)
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
})
})
describe('confirmDocType', () => {
it('should confirm same doc type and return to edit mode keeping metadata', () => {
// Arrange — useEffect syncs tempDocType='book' from docDetail
render(<Metadata {...defaultProps} />)
// Enter edit mode → click change to open doc type selector
fireEvent.click(screen.getByText(/operation\.edit/i))
fireEvent.click(screen.getByText(/operation\.change/i))
// DocTypeSelector shows save/cancel buttons
expect(screen.getByText(/metadata\.docTypeChangeTitle/i)).toBeInTheDocument()
// Act — click save to confirm same doc type (tempDocType='book')
fireEvent.click(screen.getByText(/operation\.save/i))
// Assert — should return to edit mode with metadata fields visible
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
expect(screen.getByText(/operation\.save/i)).toBeInTheDocument()
})
})
describe('cancelEdit when no docType', () => {
it('should show doc type selection when cancel is clicked with doc_type others', () => {
// Arrange — doc with 'others' type normalizes to '' internally.
// The useEffect sees doc_type='others' (truthy) and syncs state,
// so the component initially shows view mode. Enter edit → cancel to trigger cancelEdit.
const docDetail = createMockDocDetail({ doc_type: 'others' })
render(<Metadata {...defaultProps} docDetail={docDetail} />)
// 'others' is normalized to '' → useEffect fires (doc_type truthy) → view mode
// The rendered type uses default 'book' fallback for display
expect(screen.getByText(/operation\.edit/i)).toBeInTheDocument()
// Enter edit mode
fireEvent.click(screen.getByText(/operation\.edit/i))
expect(screen.getByText(/operation\.cancel/i)).toBeInTheDocument()
// Act — cancel edit; internally docType is '' so cancelEdit goes to showDocTypes
fireEvent.click(screen.getByText(/operation\.cancel/i))
// Assert — should show doc type selection since normalized docType was ''
expect(screen.getByText(/metadata\.docTypeSelectTitle/i)).toBeInTheDocument()
})
})
describe('updateMetadataField', () => {
it('should update metadata field value via input', () => {
// Arrange
render(<Metadata {...defaultProps} />)
// Enter edit mode
fireEvent.click(screen.getByText(/operation\.edit/i))
// Act — find an input and change its value (Title field)
const inputs = screen.getAllByRole('textbox')
expect(inputs.length).toBeGreaterThan(0)
fireEvent.change(inputs[0], { target: { value: 'Updated Title' } })
// Assert — the input should have the new value
expect(inputs[0]).toHaveValue('Updated Title')
})
})
describe('saveMetadata calls modifyDocMetadata with correct body', () => {
it('should pass doc_type and doc_metadata in save request', async () => {
// Arrange
mockModifyDocMetadata.mockResolvedValueOnce({})
render(<Metadata {...defaultProps} />)
// Enter edit mode
fireEvent.click(screen.getByText(/operation\.edit/i))
// Act — save
fireEvent.click(screen.getByText(/operation\.save/i))
// Assert
await waitFor(() => {
expect(mockModifyDocMetadata).toHaveBeenCalledWith(
expect.objectContaining({
datasetId: 'test-dataset-id',
documentId: 'test-document-id',
body: expect.objectContaining({
doc_type: 'book',
}),
}),
)
})
})
})
describe('useEffect sync', () => {
it('should handle doc_metadata being null in effect sync', () => {
// Arrange — first render with null metadata
const { rerender } = render(
<Metadata
{...defaultProps}
docDetail={createMockDocDetail({ doc_metadata: null })}
/>,
)
// Act — rerender with a different doc_type to trigger useEffect sync
rerender(
<Metadata
{...defaultProps}
docDetail={createMockDocDetail({ doc_type: 'paper', doc_metadata: null })}
/>,
)
// Assert — should render without crashing, showing Paper type
expect(screen.getByText('Paper')).toBeInTheDocument()
})
})
})

View File

@ -1,124 +1,422 @@
'use client'
import type { FC } from 'react'
import type { FullDocumentDetail } from '@/models/datasets'
import type { FC, ReactNode } from 'react'
import type { inputType, metadataType } from '@/hooks/use-metadata'
import type { CommonResponse } from '@/models/common'
import type { DocType, FullDocumentDetail } from '@/models/datasets'
import { PencilIcon } from '@heroicons/react/24/outline'
import { get } from 'es-toolkit/compat'
import * as React from 'react'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import AutoHeightTextarea from '@/app/components/base/auto-height-textarea'
import Button from '@/app/components/base/button'
import Divider from '@/app/components/base/divider'
import Input from '@/app/components/base/input'
import Loading from '@/app/components/base/loading'
import { useMetadataMap } from '@/hooks/use-metadata'
import DocTypeSelector, { DocumentTypeDisplay } from './components/doc-type-selector'
import MetadataFieldList from './components/metadata-field-list'
import { useMetadataState } from './hooks/use-metadata-state'
import Radio from '@/app/components/base/radio'
import { SimpleSelect } from '@/app/components/base/select'
import { ToastContext } from '@/app/components/base/toast'
import Tooltip from '@/app/components/base/tooltip'
import { useBookCategories, useBusinessDocCategories, useLanguages, useMetadataMap, usePersonalDocCategories } from '@/hooks/use-metadata'
import { CUSTOMIZABLE_DOC_TYPES } from '@/models/datasets'
import { modifyDocMetadata } from '@/service/datasets'
import { asyncRunSafe, getTextWidthWithCanvas } from '@/utils'
import { cn } from '@/utils/classnames'
import { useDocumentContext } from '../context'
import s from './style.module.css'
export { default as FieldInfo } from './components/field-info'
const map2Options = (map: { [key: string]: string }) => {
return Object.keys(map).map(key => ({ value: key, name: map[key] }))
}
type MetadataProps = {
type IFieldInfoProps = {
label: string
value?: string
valueIcon?: ReactNode
displayedValue?: string
defaultValue?: string
showEdit?: boolean
inputType?: inputType
selectOptions?: Array<{ value: string, name: string }>
onUpdate?: (v: any) => void
}
export const FieldInfo: FC<IFieldInfoProps> = ({
label,
value = '',
valueIcon,
displayedValue = '',
defaultValue,
showEdit = false,
inputType = 'input',
selectOptions = [],
onUpdate,
}) => {
const { t } = useTranslation()
const textNeedWrap = getTextWidthWithCanvas(displayedValue) > 190
const editAlignTop = showEdit && inputType === 'textarea'
const readAlignTop = !showEdit && textNeedWrap
const renderContent = () => {
if (!showEdit)
return displayedValue
if (inputType === 'select') {
return (
<SimpleSelect
onSelect={({ value }) => onUpdate?.(value as string)}
items={selectOptions}
defaultValue={value}
className={s.select}
wrapperClassName={s.selectWrapper}
placeholder={`${t('metadata.placeholder.select', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
if (inputType === 'textarea') {
return (
<AutoHeightTextarea
onChange={e => onUpdate?.(e.target.value)}
value={value}
className={s.textArea}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<Input
onChange={e => onUpdate?.(e.target.value)}
value={value}
defaultValue={defaultValue}
placeholder={`${t('metadata.placeholder.add', { ns: 'datasetDocuments' })}${label}`}
/>
)
}
return (
<div className={cn('flex min-h-5 items-center gap-1 py-0.5 text-xs', editAlignTop && '!items-start', readAlignTop && '!items-start pt-1')}>
<div className={cn('w-[200px] shrink-0 overflow-hidden text-ellipsis whitespace-nowrap text-text-tertiary', editAlignTop && 'pt-1')}>{label}</div>
<div className="flex grow items-center gap-1 text-text-secondary">
{valueIcon}
{renderContent()}
</div>
</div>
)
}
const TypeIcon: FC<{ iconName: string, className?: string }> = ({ iconName, className = '' }) => {
return (
<div className={cn(s.commonIcon, s[`${iconName}Icon`], className)} />
)
}
const IconButton: FC<{
type: DocType
isChecked: boolean
}> = ({ type, isChecked = false }) => {
const metadataMap = useMetadataMap()
return (
<Tooltip
popupContent={metadataMap[type].text}
>
<button type="button" className={cn(s.iconWrapper, 'group', isChecked ? s.iconCheck : '')}>
<TypeIcon
iconName={metadataMap[type].iconName || ''}
className={`group-hover:bg-primary-600 ${isChecked ? '!bg-primary-600' : ''}`}
/>
</button>
</Tooltip>
)
}
type IMetadataProps = {
docDetail?: FullDocumentDetail
loading: boolean
onUpdate: () => void
}
const Metadata: FC<MetadataProps> = ({ docDetail, loading, onUpdate }) => {
type MetadataState = {
documentType?: DocType | ''
metadata: Record<string, string>
}
const Metadata: FC<IMetadataProps> = ({ docDetail, loading, onUpdate }) => {
const { doc_metadata = {} } = docDetail || {}
const rawDocType = docDetail?.doc_type ?? ''
const doc_type = rawDocType === 'others' ? '' : rawDocType
const { t } = useTranslation()
const metadataMap = useMetadataMap()
const languageMap = useLanguages()
const bookCategoryMap = useBookCategories()
const personalDocCategoryMap = usePersonalDocCategories()
const businessDocCategoryMap = useBusinessDocCategories()
const [editStatus, setEditStatus] = useState(!doc_type) // if no documentType, in editing status by default
// the initial values are according to the documentType
const [metadataParams, setMetadataParams] = useState<MetadataState>(
doc_type
? {
documentType: doc_type as DocType,
metadata: (doc_metadata || {}) as Record<string, string>,
}
: { metadata: {} },
)
const [showDocTypes, setShowDocTypes] = useState(!doc_type) // whether show doc types
const [tempDocType, setTempDocType] = useState<DocType | ''>('') // for remember icon click
const [saveLoading, setSaveLoading] = useState(false)
const {
docType,
editStatus,
showDocTypes,
tempDocType,
saveLoading,
metadataParams,
setTempDocType,
setShowDocTypes,
confirmDocType,
cancelDocType,
enableEdit,
cancelEdit,
saveMetadata,
updateMetadataField,
} = useMetadataState({ docDetail, onUpdate })
const { notify } = useContext(ToastContext)
const datasetId = useDocumentContext(s => s.datasetId)
const documentId = useDocumentContext(s => s.documentId)
useEffect(() => {
if (docDetail?.doc_type) {
setEditStatus(false)
setShowDocTypes(false)
setTempDocType(doc_type as DocType | '')
setMetadataParams({
documentType: doc_type as DocType | '',
metadata: (docDetail?.doc_metadata || {}) as Record<string, string>,
})
}
}, [docDetail?.doc_type, docDetail?.doc_metadata, doc_type])
// confirm doc type
const confirmDocType = () => {
if (!tempDocType)
return
setMetadataParams({
documentType: tempDocType,
metadata: tempDocType === metadataParams.documentType ? metadataParams.metadata : {} as Record<string, string>, // change doc type, clear metadata
})
setEditStatus(true)
setShowDocTypes(false)
}
// cancel doc type
const cancelDocType = () => {
setTempDocType(metadataParams.documentType ?? '')
setEditStatus(true)
setShowDocTypes(false)
}
// show doc type select
const renderSelectDocType = () => {
const { documentType } = metadataParams
if (loading) {
return (
<div className={`${s.main} bg-gray-25`}>
<Loading type="app" />
<>
{!doc_type && !documentType && (
<>
<div className={s.desc}>{t('metadata.desc', { ns: 'datasetDocuments' })}</div>
</>
)}
<div className={s.operationWrapper}>
{!doc_type && !documentType && (
<>
<span className={s.title}>{t('metadata.docTypeSelectTitle', { ns: 'datasetDocuments' })}</span>
</>
)}
{documentType && (
<>
<span className={s.title}>{t('metadata.docTypeChangeTitle', { ns: 'datasetDocuments' })}</span>
<span className={s.changeTip}>{t('metadata.docTypeSelectWarning', { ns: 'datasetDocuments' })}</span>
</>
)}
<Radio.Group value={tempDocType ?? documentType ?? ''} onChange={setTempDocType} className={s.radioGroup}>
{CUSTOMIZABLE_DOC_TYPES.map((type, index) => {
const currValue = tempDocType ?? documentType
return (
<Radio key={index} value={type} className={`${s.radio} ${currValue === type ? 'shadow-none' : ''}`}>
<IconButton
type={type}
isChecked={currValue === type}
/>
</Radio>
)
})}
</Radio.Group>
{!doc_type && !documentType && (
<Button
variant="primary"
onClick={confirmDocType}
disabled={!tempDocType}
>
{t('metadata.firstMetaAction', { ns: 'datasetDocuments' })}
</Button>
)}
{documentType && (
<div className={s.opBtnWrapper}>
<Button onClick={confirmDocType} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary">{t('operation.save', { ns: 'common' })}</Button>
<Button onClick={cancelDocType} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
</div>
)}
</div>
</>
)
}
// show metadata info and edit
const renderFieldInfos = ({ mainField = 'book', canEdit }: { mainField?: metadataType | '', canEdit?: boolean }) => {
if (!mainField)
return null
const fieldMap = metadataMap[mainField]?.subFieldsMap
const sourceData = ['originInfo', 'technicalParameters'].includes(mainField) ? docDetail : metadataParams.metadata
const getTargetMap = (field: string) => {
if (field === 'language')
return languageMap
if (field === 'category' && mainField === 'book')
return bookCategoryMap
if (field === 'document_type') {
if (mainField === 'personal_document')
return personalDocCategoryMap
if (mainField === 'business_document')
return businessDocCategoryMap
}
return {} as any
}
const getTargetValue = (field: string) => {
const val = get(sourceData, field, '')
if (!val && val !== 0)
return '-'
if (fieldMap[field]?.inputType === 'select')
return getTargetMap(field)[val]
if (fieldMap[field]?.render)
return fieldMap[field]?.render?.(val, field === 'hit_count' ? get(sourceData, 'segment_count', 0) as number : undefined)
return val
}
return (
<div className="flex flex-col gap-1">
{Object.keys(fieldMap).map((field) => {
return (
<FieldInfo
key={fieldMap[field]?.label}
label={fieldMap[field]?.label}
displayedValue={getTargetValue(field)}
value={get(sourceData, field, '')}
inputType={fieldMap[field]?.inputType || 'input'}
showEdit={canEdit}
onUpdate={(val) => {
setMetadataParams(pre => ({ ...pre, metadata: { ...pre.metadata, [field]: val } }))
}}
selectOptions={map2Options(getTargetMap(field))}
/>
)
})}
</div>
)
}
const enabledEdit = () => {
setEditStatus(true)
}
const onCancel = () => {
setMetadataParams({ documentType: doc_type || '', metadata: { ...docDetail?.doc_metadata } })
setEditStatus(!doc_type)
if (!doc_type)
setShowDocTypes(true)
}
const onSave = async () => {
setSaveLoading(true)
const [e] = await asyncRunSafe<CommonResponse>(modifyDocMetadata({
datasetId,
documentId,
body: {
doc_type: metadataParams.documentType || doc_type || '',
doc_metadata: metadataParams.metadata,
},
}) as Promise<CommonResponse>)
if (!e)
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
else
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
onUpdate?.()
setEditStatus(false)
setSaveLoading(false)
}
return (
<div className={`${s.main} ${editStatus ? 'bg-white' : 'bg-gray-25'}`}>
{/* Header: title + action buttons */}
<div className={s.titleWrapper}>
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
{!editStatus
? (
<Button onClick={enableEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
<PencilIcon className={s.opIcon} />
{t('operation.edit', { ns: 'common' })}
</Button>
)
: !showDocTypes && (
<div className={s.opBtnWrapper}>
<Button onClick={cancelEdit} className={`${s.opBtn} ${s.opCancelBtn}`}>
{t('operation.cancel', { ns: 'common' })}
</Button>
<Button onClick={saveMetadata} className={`${s.opBtn} ${s.opSaveBtn}`} variant="primary" loading={saveLoading}>
{t('operation.save', { ns: 'common' })}
</Button>
</div>
)}
</div>
{/* Document type display / selector */}
{!editStatus
? <DocumentTypeDisplay displayType={docType} />
: showDocTypes
? null
: (
<DocumentTypeDisplay
displayType={metadataParams.documentType || ''}
showChangeLink={editStatus}
onChangeClick={() => setShowDocTypes(true)}
/>
)}
{/* Divider between type display and fields (skip when in first-time selection) */}
{(!docType && showDocTypes) ? null : <Divider />}
{/* Doc type selector or editable metadata fields */}
{showDocTypes
? (
<DocTypeSelector
docType={docType}
documentType={metadataParams.documentType}
tempDocType={tempDocType}
onTempDocTypeChange={setTempDocType}
onConfirm={confirmDocType}
onCancel={cancelDocType}
/>
)
{loading
? (<Loading type="app" />)
: (
<MetadataFieldList
mainField={metadataParams.documentType || ''}
canEdit={editStatus}
metadata={metadataParams.metadata}
docDetail={docDetail}
onFieldUpdate={updateMetadataField}
/>
<>
<div className={s.titleWrapper}>
<span className={s.title}>{t('metadata.title', { ns: 'datasetDocuments' })}</span>
{!editStatus
? (
<Button onClick={enabledEdit} className={`${s.opBtn} ${s.opEditBtn}`}>
<PencilIcon className={s.opIcon} />
{t('operation.edit', { ns: 'common' })}
</Button>
)
: showDocTypes
? null
: (
<div className={s.opBtnWrapper}>
<Button onClick={onCancel} className={`${s.opBtn} ${s.opCancelBtn}`}>{t('operation.cancel', { ns: 'common' })}</Button>
<Button
onClick={onSave}
className={`${s.opBtn} ${s.opSaveBtn}`}
variant="primary"
loading={saveLoading}
>
{t('operation.save', { ns: 'common' })}
</Button>
</div>
)}
</div>
{/* show selected doc type and changing entry */}
{!editStatus
? (
<div className={s.documentTypeShow}>
<TypeIcon iconName={metadataMap[doc_type || 'book']?.iconName || ''} className={s.iconShow} />
{metadataMap[doc_type || 'book'].text}
</div>
)
: showDocTypes
? null
: (
<div className={s.documentTypeShow}>
{metadataParams.documentType && (
<>
<TypeIcon iconName={metadataMap[metadataParams.documentType || 'book'].iconName || ''} className={s.iconShow} />
{metadataMap[metadataParams.documentType || 'book'].text}
{editStatus && (
<div className="ml-1 inline-flex items-center gap-1">
·
<div
onClick={() => { setShowDocTypes(true) }}
className="cursor-pointer hover:text-text-accent"
>
{t('operation.change', { ns: 'common' })}
</div>
</div>
)}
</>
)}
</div>
)}
{(!doc_type && showDocTypes) ? null : <Divider />}
{showDocTypes ? renderSelectDocType() : renderFieldInfos({ mainField: metadataParams.documentType, canEdit: editStatus })}
{/* show fixed fields */}
<Divider />
{renderFieldInfos({ mainField: 'originInfo', canEdit: false })}
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
<Divider />
{renderFieldInfos({ mainField: 'technicalParameters', canEdit: false })}
</>
)}
{/* Fixed fields: origin info */}
<Divider />
<MetadataFieldList mainField="originInfo" docDetail={docDetail} />
{/* Fixed fields: technical parameters */}
<div className={`${s.title} mt-8`}>{metadataMap.technicalParameters.text}</div>
<Divider />
<MetadataFieldList mainField="technicalParameters" docDetail={docDetail} />
</div>
)
}

View File

@ -159,74 +159,69 @@ const Apps = ({
return (
<div className={cn(
'flex h-full min-h-0 flex-col overflow-hidden border-l-[0.5px] border-divider-regular',
'flex h-full flex-col border-l-[0.5px] border-divider-regular',
)}
>
<div className="flex flex-1 flex-col overflow-y-auto">
{systemFeatures.enable_explore_banner && (
<div className="mt-4 px-12">
<Banner />
</div>
)}
<div className="sticky top-0 z-10 bg-background-body">
<div className={cn(
'flex items-center justify-between px-12 pt-6',
{systemFeatures.enable_explore_banner && (
<div className="mt-4 px-12">
<Banner />
</div>
)}
<div className={cn(
'mt-6 flex items-center justify-between px-12',
)}
>
<div className="flex items-center">
<div className="system-xl-semibold grow truncate text-text-primary">{!hasFilterCondition ? t('apps.title', { ns: 'explore' }) : t('apps.resultNum', { num: searchFilteredList.length, ns: 'explore' })}</div>
{hasFilterCondition && (
<>
<div className="mx-3 h-4 w-px bg-divider-regular"></div>
<Button size="medium" onClick={handleResetFilter}>{t('apps.resetFilter', { ns: 'explore' })}</Button>
</>
)}
>
<div className="flex items-center">
<div className="system-xl-semibold grow truncate text-text-primary">{!hasFilterCondition ? t('apps.title', { ns: 'explore' }) : t('apps.resultNum', { num: searchFilteredList.length, ns: 'explore' })}</div>
{hasFilterCondition && (
<>
<div className="mx-3 h-4 w-px bg-divider-regular"></div>
<Button size="medium" onClick={handleResetFilter}>{t('apps.resetFilter', { ns: 'explore' })}</Button>
</>
)}
</div>
<Input
showLeftIcon
showClearIcon
wrapperClassName="w-[200px] self-start"
value={keywords}
onChange={e => handleKeywordsChange(e.target.value)}
onClear={() => handleKeywordsChange('')}
/>
</div>
<div className="px-12 pb-4 pt-2">
<Category
list={categories}
value={currCategory}
onChange={setCurrCategory}
allCategoriesEn={allCategoriesEn}
/>
</div>
</div>
<Input
showLeftIcon
showClearIcon
wrapperClassName="w-[200px] self-start"
value={keywords}
onChange={e => handleKeywordsChange(e.target.value)}
onClear={() => handleKeywordsChange('')}
/>
</div>
<div className={cn(
'relative flex flex-1 shrink-0 grow flex-col pb-6',
)}
<div className="mt-2 px-12">
<Category
list={categories}
value={currCategory}
onChange={setCurrCategory}
allCategoriesEn={allCategoriesEn}
/>
</div>
<div className={cn(
'relative mt-4 flex flex-1 shrink-0 grow flex-col overflow-auto pb-6',
)}
>
<nav
className={cn(
s.appList,
'grid shrink-0 content-start gap-4 px-6 sm:px-12',
)}
>
<nav
className={cn(
s.appList,
'grid shrink-0 content-start gap-4 px-6 sm:px-12',
)}
>
{searchFilteredList.map(app => (
<AppCard
key={app.app_id}
isExplore
app={app}
canCreate={hasEditPermission}
onCreate={() => {
setCurrApp(app)
setIsShowCreateModal(true)
}}
/>
))}
</nav>
</div>
{searchFilteredList.map(app => (
<AppCard
key={app.app_id}
isExplore
app={app}
canCreate={hasEditPermission}
onCreate={() => {
setCurrApp(app)
setIsShowCreateModal(true)
}}
/>
))}
</nav>
</div>
{isShowCreateModal && (
<CreateAppModal

View File

@ -71,7 +71,7 @@ const Explore: FC<IExploreProps> = ({
}
>
<Sidebar controlUpdateInstalledApps={controlUpdateInstalledApps} />
<div className="h-full min-h-0 w-0 grow">
<div className="w-0 grow">
{children}
</div>
</ExploreContext.Provider>

View File

@ -599,30 +599,20 @@ describe('CommonCreateModal', () => {
},
})
mockUsePluginStore.mockReturnValue(detailWithCredentials)
const existingBuilder = createMockSubscriptionBuilder()
mockVerifyCredentials.mockImplementation((params, { onSuccess }) => {
onSuccess()
})
render(<CommonCreateModal {...defaultProps} builder={existingBuilder} />)
render(<CommonCreateModal {...defaultProps} />)
await waitFor(() => {
expect(mockCreateBuilder).toHaveBeenCalled()
})
fireEvent.click(screen.getByTestId('modal-confirm'))
await waitFor(() => {
expect(mockVerifyCredentials).toHaveBeenCalledWith(
expect.objectContaining({
provider: 'test-provider',
subscriptionBuilderId: existingBuilder.id,
}),
expect.objectContaining({
onSuccess: expect.any(Function),
onError: expect.any(Function),
}),
)
})
await waitFor(() => {
expect(screen.getByTestId('modal-confirm')).toHaveTextContent('pluginTrigger.modal.common.create')
expect(mockVerifyCredentials).toHaveBeenCalled()
})
})
@ -639,12 +629,15 @@ describe('CommonCreateModal', () => {
},
})
mockUsePluginStore.mockReturnValue(detailWithCredentials)
const existingBuilder = createMockSubscriptionBuilder()
mockVerifyCredentials.mockImplementation((params, { onError }) => {
onError(new Error('Verification failed'))
})
render(<CommonCreateModal {...defaultProps} builder={existingBuilder} />)
render(<CommonCreateModal {...defaultProps} />)
await waitFor(() => {
expect(mockCreateBuilder).toHaveBeenCalled()
})
fireEvent.click(screen.getByTestId('modal-confirm'))

View File

@ -1,20 +1,9 @@
import type { PropsWithChildren } from 'react'
import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { act, cleanup, fireEvent, render, screen, waitFor } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { DSLImportStatus } from '@/models/app'
import UpdateDSLModal from './update-dsl-modal'
class MockFileReader {
onload: ((this: FileReader, event: ProgressEvent<FileReader>) => void) | null = null
readAsText(_file: Blob) {
const event = { target: { result: 'test content' } } as unknown as ProgressEvent<FileReader>
this.onload?.call(this as unknown as FileReader, event)
}
}
vi.stubGlobal('FileReader', MockFileReader as unknown as typeof FileReader)
// Mock react-i18next
vi.mock('react-i18next', () => ({
useTranslation: () => ({
@ -145,6 +134,12 @@ vi.mock('@/app/components/workflow/constants', () => ({
WORKFLOW_DATA_UPDATE: 'WORKFLOW_DATA_UPDATE',
}))
afterEach(() => {
cleanup()
vi.clearAllMocks()
vi.useRealTimers()
})
describe('UpdateDSLModal', () => {
const mockOnCancel = vi.fn()
const mockOnBackup = vi.fn()
@ -156,6 +151,11 @@ describe('UpdateDSLModal', () => {
onImport: mockOnImport,
}
// Helper function to flush FileReader microtasks
const flushFileReader = async () => {
await new Promise<void>(resolve => queueMicrotask(resolve))
}
beforeEach(() => {
vi.clearAllMocks()
mockImportDSL.mockResolvedValue({
@ -365,7 +365,11 @@ describe('UpdateDSLModal', () => {
// Select a file
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
// Wait for FileReader to process
await waitFor(() => {
@ -375,12 +379,15 @@ describe('UpdateDSLModal', () => {
// Click import button
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
// Wait for import to be called
await waitFor(() => {
expect(mockImportDSL).toHaveBeenCalled()
})
}, { timeout: 5000 })
})
it('should show success notification on completed import', async () => {
@ -395,7 +402,12 @@ describe('UpdateDSLModal', () => {
// Select a file and click import
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
// Wait for FileReader to process
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -403,13 +415,16 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
type: 'success',
}))
})
}, { timeout: 5000 })
})
it('should call onCancel after successful import', async () => {
@ -423,7 +438,12 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
// Wait for FileReader to process
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -431,7 +451,10 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(mockOnCancel).toHaveBeenCalled()
@ -449,7 +472,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -457,7 +484,10 @@ describe('UpdateDSLModal', () => {
}, { timeout: 1000 })
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(mockOnImport).toHaveBeenCalled()
@ -475,7 +505,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -483,7 +517,10 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
@ -503,7 +540,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -511,7 +552,10 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
@ -531,7 +575,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
// Wait for FileReader to process and button to be enabled
await waitFor(() => {
@ -540,7 +588,10 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
@ -556,7 +607,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
// Wait for FileReader to complete and button to be enabled
await waitFor(() => {
@ -565,7 +620,10 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
@ -594,7 +652,7 @@ describe('UpdateDSLModal', () => {
// Flush the FileReader microtask to ensure fileContent is set
await act(async () => {
await new Promise<void>(resolve => queueMicrotask(resolve))
await flushFileReader()
})
const importButton = screen.getByText('common.overwriteAndImport')
@ -650,7 +708,7 @@ describe('UpdateDSLModal', () => {
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
// Flush microtasks scheduled by the FileReader mock (which uses queueMicrotask)
await new Promise<void>(resolve => queueMicrotask(resolve))
await flushFileReader()
})
const importButton = screen.getByText('common.overwriteAndImport')
@ -662,12 +720,13 @@ describe('UpdateDSLModal', () => {
await Promise.resolve()
// Advance past the 300ms setTimeout in the component
await vi.advanceTimersByTimeAsync(350)
// Flush any pending state updates
await Promise.resolve()
})
await waitFor(() => {
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
// Element should be visible immediately after advancing timers
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
vi.useRealTimers()
})
@ -684,7 +743,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -692,7 +755,10 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
// Wait for error modal with version info
await waitFor(() => {
@ -714,7 +780,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -722,7 +792,10 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
// Wait for error modal
await waitFor(() => {
@ -768,7 +841,7 @@ describe('UpdateDSLModal', () => {
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
// Flush microtasks scheduled by the FileReader mock (which uses queueMicrotask)
await new Promise<void>(resolve => queueMicrotask(resolve))
await flushFileReader()
})
const importButton = screen.getByText('common.overwriteAndImport')
@ -780,21 +853,25 @@ describe('UpdateDSLModal', () => {
await Promise.resolve()
// Advance past the 300ms setTimeout in the component
await vi.advanceTimersByTimeAsync(350)
// Flush any pending state updates
await Promise.resolve()
})
await waitFor(() => {
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
}, { timeout: 1000 })
// Element should be visible immediately after advancing timers
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
vi.useRealTimers()
// Click confirm button
const confirmButton = screen.getByText('newApp.Confirm')
fireEvent.click(confirmButton)
await act(async () => {
fireEvent.click(confirmButton)
})
await waitFor(() => {
expect(mockImportDSLConfirm).toHaveBeenCalledWith('import-id')
})
vi.useRealTimers()
}, { timeout: 5000 })
})
it('should show success notification after confirm completes', async () => {
@ -857,7 +934,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -865,20 +946,26 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
}, { timeout: 1000 })
}, { timeout: 5000 })
const confirmButton = screen.getByText('newApp.Confirm')
fireEvent.click(confirmButton)
await act(async () => {
fireEvent.click(confirmButton)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
type: 'error',
}))
})
}, { timeout: 5000 })
})
it('should show error notification when confirm throws exception', async () => {
@ -896,7 +983,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -904,20 +995,26 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
}, { timeout: 1000 })
}, { timeout: 5000 })
const confirmButton = screen.getByText('newApp.Confirm')
fireEvent.click(confirmButton)
await act(async () => {
fireEvent.click(confirmButton)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
type: 'error',
}))
})
}, { timeout: 5000 })
})
it('should show error when confirm completes but pipeline_id is missing', async () => {
@ -938,7 +1035,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -946,20 +1047,26 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
}, { timeout: 1000 })
}, { timeout: 5000 })
const confirmButton = screen.getByText('newApp.Confirm')
fireEvent.click(confirmButton)
await act(async () => {
fireEvent.click(confirmButton)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({
type: 'error',
}))
})
}, { timeout: 5000 })
})
it('should call onImport after confirm completes successfully', async () => {
@ -980,7 +1087,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -988,18 +1099,24 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
await waitFor(() => {
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
}, { timeout: 1000 })
}, { timeout: 5000 })
const confirmButton = screen.getByText('newApp.Confirm')
fireEvent.click(confirmButton)
await act(async () => {
fireEvent.click(confirmButton)
})
await waitFor(() => {
expect(mockOnImport).toHaveBeenCalled()
})
}, { timeout: 5000 })
})
it('should call handleCheckPluginDependencies after confirm', async () => {
@ -1026,7 +1143,7 @@ describe('UpdateDSLModal', () => {
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
// Flush microtasks scheduled by the FileReader mock (which uses queueMicrotask)
await new Promise<void>(resolve => queueMicrotask(resolve))
await flushFileReader()
})
const importButton = screen.getByText('common.overwriteAndImport')
@ -1038,20 +1155,24 @@ describe('UpdateDSLModal', () => {
await Promise.resolve()
// Advance past the 300ms setTimeout in the component
await vi.advanceTimersByTimeAsync(350)
// Flush any pending state updates
await Promise.resolve()
})
await waitFor(() => {
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
}, { timeout: 1000 })
// Element should be visible immediately after advancing timers
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
vi.useRealTimers()
const confirmButton = screen.getByText('newApp.Confirm')
fireEvent.click(confirmButton)
await act(async () => {
fireEvent.click(confirmButton)
})
await waitFor(() => {
expect(mockHandleCheckPluginDependencies).toHaveBeenCalledWith('test-pipeline-id', true)
})
vi.useRealTimers()
}, { timeout: 5000 })
})
it('should handle undefined imported_dsl_version and current_dsl_version', async () => {
@ -1067,7 +1188,11 @@ describe('UpdateDSLModal', () => {
const fileInput = screen.getByTestId('file-input')
const file = new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
fireEvent.change(fileInput, { target: { files: [file] } })
await act(async () => {
fireEvent.change(fileInput, { target: { files: [file] } })
await flushFileReader()
})
await waitFor(() => {
const importButton = screen.getByText('common.overwriteAndImport')
@ -1075,12 +1200,15 @@ describe('UpdateDSLModal', () => {
})
const importButton = screen.getByText('common.overwriteAndImport')
fireEvent.click(importButton)
await act(async () => {
fireEvent.click(importButton)
})
// Should show error modal even with undefined versions
await waitFor(() => {
expect(screen.getByText('newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
}, { timeout: 1000 })
}, { timeout: 5000 })
})
it('should not call importDSLConfirm when importId is not set', async () => {

View File

@ -1,17 +1,40 @@
'use client'
import type { MouseEventHandler } from 'react'
import {
RiAlertFill,
RiCloseLine,
RiFileDownloadLine,
} from '@remixicon/react'
import { memo } from 'react'
import {
memo,
useCallback,
useRef,
useState,
} from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import Uploader from '@/app/components/app/create-from-dsl-modal/uploader'
import Button from '@/app/components/base/button'
import Modal from '@/app/components/base/modal'
import { useUpdateDSLModal } from '../hooks/use-update-dsl-modal'
import VersionMismatchModal from './version-mismatch-modal'
import { ToastContext } from '@/app/components/base/toast'
import { WORKFLOW_DATA_UPDATE } from '@/app/components/workflow/constants'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import { useWorkflowStore } from '@/app/components/workflow/store'
import {
initialEdges,
initialNodes,
} from '@/app/components/workflow/utils'
import { useEventEmitterContextContext } from '@/context/event-emitter'
import {
DSLImportMode,
DSLImportStatus,
} from '@/models/app'
import {
useImportPipelineDSL,
useImportPipelineDSLConfirm,
} from '@/service/use-pipeline'
import { fetchWorkflowDraft } from '@/service/workflow'
type UpdateDSLModalProps = {
onCancel: () => void
@ -25,17 +48,146 @@ const UpdateDSLModal = ({
onImport,
}: UpdateDSLModalProps) => {
const { t } = useTranslation()
const {
currentFile,
handleFile,
show,
showErrorModal,
setShowErrorModal,
loading,
versions,
handleImport,
onUpdateDSLConfirm,
} = useUpdateDSLModal({ onCancel, onImport })
const { notify } = useContext(ToastContext)
const [currentFile, setDSLFile] = useState<File>()
const [fileContent, setFileContent] = useState<string>()
const [loading, setLoading] = useState(false)
const { eventEmitter } = useEventEmitterContextContext()
const [show, setShow] = useState(true)
const [showErrorModal, setShowErrorModal] = useState(false)
const [versions, setVersions] = useState<{ importedVersion: string, systemVersion: string }>()
const [importId, setImportId] = useState<string>()
const { handleCheckPluginDependencies } = usePluginDependencies()
const { mutateAsync: importDSL } = useImportPipelineDSL()
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
const workflowStore = useWorkflowStore()
const readFile = (file: File) => {
const reader = new FileReader()
reader.onload = function (event) {
const content = event.target?.result
setFileContent(content as string)
}
reader.readAsText(file)
}
const handleFile = (file?: File) => {
setDSLFile(file)
if (file)
readFile(file)
if (!file)
setFileContent('')
}
const handleWorkflowUpdate = useCallback(async (pipelineId: string) => {
const {
graph,
hash,
rag_pipeline_variables,
} = await fetchWorkflowDraft(`/rag/pipelines/${pipelineId}/workflows/draft`)
const { nodes, edges, viewport } = graph
eventEmitter?.emit({
type: WORKFLOW_DATA_UPDATE,
payload: {
nodes: initialNodes(nodes, edges),
edges: initialEdges(edges, nodes),
viewport,
hash,
rag_pipeline_variables: rag_pipeline_variables || [],
},
} as any)
}, [eventEmitter])
const isCreatingRef = useRef(false)
const handleImport: MouseEventHandler = useCallback(async () => {
const { pipelineId } = workflowStore.getState()
if (isCreatingRef.current)
return
isCreatingRef.current = true
if (!currentFile)
return
try {
if (pipelineId && fileContent) {
setLoading(true)
const response = await importDSL({ mode: DSLImportMode.YAML_CONTENT, yaml_content: fileContent, pipeline_id: pipelineId })
const { id, status, pipeline_id, imported_dsl_version, current_dsl_version } = response
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
if (!pipeline_id) {
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
return
}
handleWorkflowUpdate(pipeline_id)
if (onImport)
onImport()
notify({
type: status === DSLImportStatus.COMPLETED ? 'success' : 'warning',
message: t(status === DSLImportStatus.COMPLETED ? 'common.importSuccess' : 'common.importWarning', { ns: 'workflow' }),
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('common.importWarningDetails', { ns: 'workflow' }),
})
await handleCheckPluginDependencies(pipeline_id, true)
setLoading(false)
onCancel()
}
else if (status === DSLImportStatus.PENDING) {
setShow(false)
setTimeout(() => {
setShowErrorModal(true)
}, 300)
setVersions({
importedVersion: imported_dsl_version ?? '',
systemVersion: current_dsl_version ?? '',
})
setImportId(id)
}
else {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
}
}
// eslint-disable-next-line unused-imports/no-unused-vars
catch (e) {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
isCreatingRef.current = false
}, [currentFile, fileContent, onCancel, notify, t, onImport, handleWorkflowUpdate, handleCheckPluginDependencies, workflowStore, importDSL])
const onUpdateDSLConfirm: MouseEventHandler = async () => {
try {
if (!importId)
return
const response = await importDSLConfirm(importId)
const { status, pipeline_id } = response
if (status === DSLImportStatus.COMPLETED) {
if (!pipeline_id) {
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
return
}
handleWorkflowUpdate(pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
if (onImport)
onImport()
notify({ type: 'success', message: t('common.importSuccess', { ns: 'workflow' }) })
setLoading(false)
onCancel()
}
else if (status === DSLImportStatus.FAILED) {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
}
// eslint-disable-next-line unused-imports/no-unused-vars
catch (e) {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}
}
return (
<>
@ -98,12 +250,32 @@ const UpdateDSLModal = ({
</Button>
</div>
</Modal>
<VersionMismatchModal
<Modal
isShow={showErrorModal}
versions={versions}
onClose={() => setShowErrorModal(false)}
onConfirm={onUpdateDSLConfirm}
/>
className="w-[480px]"
>
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="system-md-regular flex grow flex-col text-text-secondary">
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
<br />
<div>
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
<span className="system-md-medium">{versions?.importedVersion}</span>
</div>
<div>
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
<span className="system-md-medium">{versions?.systemVersion}</span>
</div>
</div>
</div>
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
<Button variant="secondary" onClick={() => setShowErrorModal(false)}>{t('newApp.Cancel', { ns: 'app' })}</Button>
<Button variant="primary" destructive onClick={onUpdateDSLConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
</div>
</Modal>
</>
)
}

View File

@ -1,117 +0,0 @@
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import VersionMismatchModal from './version-mismatch-modal'
describe('VersionMismatchModal', () => {
const mockOnClose = vi.fn()
const mockOnConfirm = vi.fn()
const defaultVersions = {
importedVersion: '0.8.0',
systemVersion: '1.0.0',
}
const defaultProps = {
isShow: true,
versions: defaultVersions,
onClose: mockOnClose,
onConfirm: mockOnConfirm,
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render dialog when isShow is true', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByRole('dialog')).toBeInTheDocument()
})
it('should not render dialog when isShow is false', () => {
render(<VersionMismatchModal {...defaultProps} isShow={false} />)
expect(screen.queryByRole('dialog')).not.toBeInTheDocument()
})
it('should render error title', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
it('should render all error description parts', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorPart1')).toBeInTheDocument()
expect(screen.getByText('app.newApp.appCreateDSLErrorPart2')).toBeInTheDocument()
expect(screen.getByText('app.newApp.appCreateDSLErrorPart3')).toBeInTheDocument()
expect(screen.getByText('app.newApp.appCreateDSLErrorPart4')).toBeInTheDocument()
})
it('should display imported and system version numbers', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByText('0.8.0')).toBeInTheDocument()
expect(screen.getByText('1.0.0')).toBeInTheDocument()
})
it('should render cancel and confirm buttons', () => {
render(<VersionMismatchModal {...defaultProps} />)
expect(screen.getByRole('button', { name: /app\.newApp\.Cancel/ })).toBeInTheDocument()
expect(screen.getByRole('button', { name: /app\.newApp\.Confirm/ })).toBeInTheDocument()
})
})
describe('user interactions', () => {
it('should call onClose when cancel button is clicked', () => {
render(<VersionMismatchModal {...defaultProps} />)
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Cancel/ }))
expect(mockOnClose).toHaveBeenCalledTimes(1)
})
it('should call onConfirm when confirm button is clicked', () => {
render(<VersionMismatchModal {...defaultProps} />)
fireEvent.click(screen.getByRole('button', { name: /app\.newApp\.Confirm/ }))
expect(mockOnConfirm).toHaveBeenCalledTimes(1)
})
})
describe('button variants', () => {
it('should render cancel button with secondary variant', () => {
render(<VersionMismatchModal {...defaultProps} />)
const cancelBtn = screen.getByRole('button', { name: /app\.newApp\.Cancel/ })
expect(cancelBtn).toHaveClass('btn-secondary')
})
it('should render confirm button with primary destructive variant', () => {
render(<VersionMismatchModal {...defaultProps} />)
const confirmBtn = screen.getByRole('button', { name: /app\.newApp\.Confirm/ })
expect(confirmBtn).toHaveClass('btn-primary')
expect(confirmBtn).toHaveClass('btn-destructive')
})
})
describe('edge cases', () => {
it('should handle undefined versions gracefully', () => {
render(<VersionMismatchModal {...defaultProps} versions={undefined} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
it('should handle empty version strings', () => {
const emptyVersions = { importedVersion: '', systemVersion: '' }
render(<VersionMismatchModal {...defaultProps} versions={emptyVersions} />)
expect(screen.getByText('app.newApp.appCreateDSLErrorTitle')).toBeInTheDocument()
})
})
})

View File

@ -1,54 +0,0 @@
import type { MouseEventHandler } from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import Modal from '@/app/components/base/modal'
type VersionMismatchModalProps = {
isShow: boolean
versions?: {
importedVersion: string
systemVersion: string
}
onClose: () => void
onConfirm: MouseEventHandler
}
const VersionMismatchModal = ({
isShow,
versions,
onClose,
onConfirm,
}: VersionMismatchModalProps) => {
const { t } = useTranslation()
return (
<Modal
isShow={isShow}
onClose={onClose}
className="w-[480px]"
>
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="system-md-regular flex grow flex-col text-text-secondary">
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
<br />
<div>
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
<span className="system-md-medium">{versions?.importedVersion}</span>
</div>
<div>
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
<span className="system-md-medium">{versions?.systemVersion}</span>
</div>
</div>
</div>
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
<Button variant="secondary" onClick={onClose}>{t('newApp.Cancel', { ns: 'app' })}</Button>
<Button variant="primary" destructive onClick={onConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
</div>
</Modal>
)
}
export default VersionMismatchModal

View File

@ -1,551 +0,0 @@
import { act, renderHook } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { DSLImportMode, DSLImportStatus } from '@/models/app'
import { useUpdateDSLModal } from './use-update-dsl-modal'
// --- FileReader stub ---
class MockFileReader {
onload: ((this: FileReader, event: ProgressEvent<FileReader>) => void) | null = null
readAsText(_file: Blob) {
const event = { target: { result: 'test content' } } as unknown as ProgressEvent<FileReader>
this.onload?.call(this as unknown as FileReader, event)
}
}
vi.stubGlobal('FileReader', MockFileReader as unknown as typeof FileReader)
// --- Module-level mock functions ---
const mockNotify = vi.fn()
const mockEmit = vi.fn()
const mockImportDSL = vi.fn()
const mockImportDSLConfirm = vi.fn()
const mockHandleCheckPluginDependencies = vi.fn()
// --- Mocks ---
vi.mock('react-i18next', () => ({
useTranslation: () => ({ t: (key: string) => key }),
}))
vi.mock('use-context-selector', () => ({
useContext: () => ({ notify: mockNotify }),
}))
vi.mock('@/app/components/base/toast', () => ({
ToastContext: {},
}))
vi.mock('@/context/event-emitter', () => ({
useEventEmitterContextContext: () => ({
eventEmitter: { emit: mockEmit },
}),
}))
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: () => ({ pipelineId: 'test-pipeline-id' }),
}),
}))
vi.mock('@/app/components/workflow/utils', () => ({
initialNodes: (nodes: unknown[]) => nodes,
initialEdges: (edges: unknown[]) => edges,
}))
vi.mock('@/app/components/workflow/constants', () => ({
WORKFLOW_DATA_UPDATE: 'WORKFLOW_DATA_UPDATE',
}))
vi.mock('@/app/components/workflow/plugin-dependency/hooks', () => ({
usePluginDependencies: () => ({
handleCheckPluginDependencies: mockHandleCheckPluginDependencies,
}),
}))
vi.mock('@/service/use-pipeline', () => ({
useImportPipelineDSL: () => ({ mutateAsync: mockImportDSL }),
useImportPipelineDSLConfirm: () => ({ mutateAsync: mockImportDSLConfirm }),
}))
vi.mock('@/service/workflow', () => ({
fetchWorkflowDraft: vi.fn().mockResolvedValue({
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
hash: 'test-hash',
rag_pipeline_variables: [],
}),
}))
// --- Helpers ---
const createFile = () => new File(['test content'], 'test.pipeline', { type: 'text/yaml' })
// Cast MouseEventHandler to a plain callable for tests (event param is unused)
type AsyncFn = () => Promise<void>
describe('useUpdateDSLModal', () => {
const mockOnCancel = vi.fn()
const mockOnImport = vi.fn()
const renderUpdateDSLModal = (overrides?: { onImport?: () => void }) =>
renderHook(() =>
useUpdateDSLModal({
onCancel: mockOnCancel,
onImport: overrides?.onImport ?? mockOnImport,
}),
)
beforeEach(() => {
vi.clearAllMocks()
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.COMPLETED,
pipeline_id: 'test-pipeline-id',
})
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
})
// Initial state values
describe('initial state', () => {
it('should return correct defaults', () => {
const { result } = renderUpdateDSLModal()
expect(result.current.currentFile).toBeUndefined()
expect(result.current.show).toBe(true)
expect(result.current.showErrorModal).toBe(false)
expect(result.current.loading).toBe(false)
expect(result.current.versions).toBeUndefined()
})
})
// File handling
describe('handleFile', () => {
it('should set currentFile when file is provided', () => {
const { result } = renderUpdateDSLModal()
const file = createFile()
act(() => {
result.current.handleFile(file)
})
expect(result.current.currentFile).toBe(file)
})
it('should clear currentFile when called with undefined', () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
act(() => {
result.current.handleFile(undefined)
})
expect(result.current.currentFile).toBeUndefined()
})
})
// Modal state management
describe('modal state', () => {
it('should allow toggling showErrorModal', () => {
const { result } = renderUpdateDSLModal()
expect(result.current.showErrorModal).toBe(false)
act(() => {
result.current.setShowErrorModal(true)
})
expect(result.current.showErrorModal).toBe(true)
act(() => {
result.current.setShowErrorModal(false)
})
expect(result.current.showErrorModal).toBe(false)
})
})
// Import flow
describe('handleImport', () => {
it('should call importDSL with correct parameters', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockImportDSL).toHaveBeenCalledWith({
mode: DSLImportMode.YAML_CONTENT,
yaml_content: 'test content',
pipeline_id: 'test-pipeline-id',
})
})
it('should not call importDSL when no file is selected', async () => {
const { result } = renderUpdateDSLModal()
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockImportDSL).not.toHaveBeenCalled()
})
// COMPLETED status
it('should notify success on COMPLETED status', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'success' }))
})
it('should call onImport on successful import', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockOnImport).toHaveBeenCalled()
})
it('should call onCancel on successful import', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockOnCancel).toHaveBeenCalled()
})
it('should emit workflow update event on success', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockEmit).toHaveBeenCalled()
})
it('should call handleCheckPluginDependencies on success', async () => {
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockHandleCheckPluginDependencies).toHaveBeenCalledWith('test-pipeline-id', true)
})
// COMPLETED_WITH_WARNINGS status
it('should notify warning on COMPLETED_WITH_WARNINGS status', async () => {
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.COMPLETED_WITH_WARNINGS,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'warning' }))
})
// PENDING status (version mismatch)
it('should switch to version mismatch modal on PENDING status', async () => {
vi.useFakeTimers({ shouldAdvanceTime: true })
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.PENDING,
pipeline_id: 'test-pipeline-id',
imported_dsl_version: '0.8.0',
current_dsl_version: '1.0.0',
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
await vi.advanceTimersByTimeAsync(350)
})
expect(result.current.show).toBe(false)
expect(result.current.showErrorModal).toBe(true)
expect(result.current.versions).toEqual({
importedVersion: '0.8.0',
systemVersion: '1.0.0',
})
vi.useRealTimers()
})
it('should default version strings to empty when undefined', async () => {
vi.useFakeTimers({ shouldAdvanceTime: true })
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.PENDING,
pipeline_id: 'test-pipeline-id',
imported_dsl_version: undefined,
current_dsl_version: undefined,
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
await vi.advanceTimersByTimeAsync(350)
})
expect(result.current.versions).toEqual({
importedVersion: '',
systemVersion: '',
})
vi.useRealTimers()
})
// FAILED / unknown status
it('should notify error on FAILED status', async () => {
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.FAILED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
// Exception
it('should notify error when importDSL throws', async () => {
mockImportDSL.mockRejectedValue(new Error('Network error'))
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
// Missing pipeline_id
it('should notify error when pipeline_id is missing on success', async () => {
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.COMPLETED,
pipeline_id: undefined,
})
const { result } = renderUpdateDSLModal()
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
})
// Confirm flow (after PENDING → version mismatch)
describe('onUpdateDSLConfirm', () => {
// Helper: drive the hook into PENDING state so importId is set
const setupPendingState = async (result: { current: ReturnType<typeof useUpdateDSLModal> }) => {
vi.useFakeTimers({ shouldAdvanceTime: true })
mockImportDSL.mockResolvedValue({
id: 'import-id',
status: DSLImportStatus.PENDING,
pipeline_id: 'test-pipeline-id',
imported_dsl_version: '0.8.0',
current_dsl_version: '1.0.0',
})
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
await vi.advanceTimersByTimeAsync(350)
})
vi.useRealTimers()
vi.clearAllMocks()
mockHandleCheckPluginDependencies.mockResolvedValue(undefined)
}
it('should call importDSLConfirm with the stored importId', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.COMPLETED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockImportDSLConfirm).toHaveBeenCalledWith('import-id')
})
it('should notify success and call onCancel after successful confirm', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.COMPLETED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'success' }))
expect(mockOnCancel).toHaveBeenCalled()
})
it('should call onImport after successful confirm', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.COMPLETED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockOnImport).toHaveBeenCalled()
})
it('should notify error on FAILED confirm status', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.FAILED,
pipeline_id: 'test-pipeline-id',
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
it('should notify error when confirm throws exception', async () => {
mockImportDSLConfirm.mockRejectedValue(new Error('Confirm failed'))
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
it('should notify error when confirm succeeds but pipeline_id is missing', async () => {
mockImportDSLConfirm.mockResolvedValue({
status: DSLImportStatus.COMPLETED,
pipeline_id: undefined,
})
const { result } = renderUpdateDSLModal()
await setupPendingState(result)
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockNotify).toHaveBeenCalledWith(expect.objectContaining({ type: 'error' }))
})
it('should not call importDSLConfirm when importId is not set', async () => {
const { result } = renderUpdateDSLModal()
// No pending state → importId is undefined
await act(async () => {
await (result.current.onUpdateDSLConfirm as unknown as AsyncFn)()
})
expect(mockImportDSLConfirm).not.toHaveBeenCalled()
})
})
// Optional onImport callback
describe('optional onImport', () => {
it('should work without onImport callback', async () => {
const { result } = renderHook(() =>
useUpdateDSLModal({ onCancel: mockOnCancel }),
)
act(() => {
result.current.handleFile(createFile())
})
await act(async () => {
await (result.current.handleImport as unknown as AsyncFn)()
})
// Should succeed without throwing
expect(mockOnCancel).toHaveBeenCalled()
})
})
})

View File

@ -1,205 +0,0 @@
import type { MouseEventHandler } from 'react'
import {
useCallback,
useRef,
useState,
} from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { ToastContext } from '@/app/components/base/toast'
import { WORKFLOW_DATA_UPDATE } from '@/app/components/workflow/constants'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import { useWorkflowStore } from '@/app/components/workflow/store'
import {
initialEdges,
initialNodes,
} from '@/app/components/workflow/utils'
import { useEventEmitterContextContext } from '@/context/event-emitter'
import {
DSLImportMode,
DSLImportStatus,
} from '@/models/app'
import {
useImportPipelineDSL,
useImportPipelineDSLConfirm,
} from '@/service/use-pipeline'
import { fetchWorkflowDraft } from '@/service/workflow'
type VersionInfo = {
importedVersion: string
systemVersion: string
}
type UseUpdateDSLModalParams = {
onCancel: () => void
onImport?: () => void
}
const isCompletedStatus = (status: DSLImportStatus): boolean =>
status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS
export const useUpdateDSLModal = ({ onCancel, onImport }: UseUpdateDSLModalParams) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const { eventEmitter } = useEventEmitterContextContext()
const workflowStore = useWorkflowStore()
const { handleCheckPluginDependencies } = usePluginDependencies()
const { mutateAsync: importDSL } = useImportPipelineDSL()
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
// File state
const [currentFile, setDSLFile] = useState<File>()
const [fileContent, setFileContent] = useState<string>()
// Modal state
const [show, setShow] = useState(true)
const [showErrorModal, setShowErrorModal] = useState(false)
// Import state
const [loading, setLoading] = useState(false)
const [versions, setVersions] = useState<VersionInfo>()
const [importId, setImportId] = useState<string>()
const isCreatingRef = useRef(false)
const readFile = (file: File) => {
const reader = new FileReader()
reader.onload = (event) => {
setFileContent(event.target?.result as string)
}
reader.readAsText(file)
}
const handleFile = (file?: File) => {
setDSLFile(file)
if (file)
readFile(file)
if (!file)
setFileContent('')
}
const notifyError = useCallback(() => {
setLoading(false)
notify({ type: 'error', message: t('common.importFailure', { ns: 'workflow' }) })
}, [notify, t])
const updateWorkflow = useCallback(async (pipelineId: string) => {
const { graph, hash, rag_pipeline_variables } = await fetchWorkflowDraft(
`/rag/pipelines/${pipelineId}/workflows/draft`,
)
const { nodes, edges, viewport } = graph
eventEmitter?.emit({
type: WORKFLOW_DATA_UPDATE,
payload: {
nodes: initialNodes(nodes, edges),
edges: initialEdges(edges, nodes),
viewport,
hash,
rag_pipeline_variables: rag_pipeline_variables || [],
},
})
}, [eventEmitter])
const completeImport = useCallback(async (
pipelineId: string | undefined,
status: DSLImportStatus = DSLImportStatus.COMPLETED,
) => {
if (!pipelineId) {
notifyError()
return
}
updateWorkflow(pipelineId)
onImport?.()
const isWarning = status === DSLImportStatus.COMPLETED_WITH_WARNINGS
notify({
type: isWarning ? 'warning' : 'success',
message: t(isWarning ? 'common.importWarning' : 'common.importSuccess', { ns: 'workflow' }),
children: isWarning && t('common.importWarningDetails', { ns: 'workflow' }),
})
await handleCheckPluginDependencies(pipelineId, true)
setLoading(false)
onCancel()
}, [updateWorkflow, onImport, notify, t, handleCheckPluginDependencies, onCancel, notifyError])
const showVersionMismatch = useCallback((
id: string,
importedVersion?: string,
systemVersion?: string,
) => {
setShow(false)
setTimeout(() => setShowErrorModal(true), 300)
setVersions({
importedVersion: importedVersion ?? '',
systemVersion: systemVersion ?? '',
})
setImportId(id)
}, [])
const handleImport: MouseEventHandler = useCallback(async () => {
const { pipelineId } = workflowStore.getState()
if (isCreatingRef.current)
return
isCreatingRef.current = true
if (!currentFile)
return
try {
if (!pipelineId || !fileContent)
return
setLoading(true)
const response = await importDSL({
mode: DSLImportMode.YAML_CONTENT,
yaml_content: fileContent,
pipeline_id: pipelineId,
})
const { id, status, pipeline_id, imported_dsl_version, current_dsl_version } = response
if (isCompletedStatus(status))
await completeImport(pipeline_id, status)
else if (status === DSLImportStatus.PENDING)
showVersionMismatch(id, imported_dsl_version, current_dsl_version)
else
notifyError()
}
catch {
notifyError()
}
isCreatingRef.current = false
}, [currentFile, fileContent, workflowStore, importDSL, completeImport, showVersionMismatch, notifyError])
const onUpdateDSLConfirm: MouseEventHandler = useCallback(async () => {
if (!importId)
return
try {
const { status, pipeline_id } = await importDSLConfirm(importId)
if (status === DSLImportStatus.COMPLETED) {
await completeImport(pipeline_id)
return
}
if (status === DSLImportStatus.FAILED)
notifyError()
}
catch {
notifyError()
}
}, [importId, importDSLConfirm, completeImport, notifyError])
return {
currentFile,
handleFile,
show,
showErrorModal,
setShowErrorModal,
loading,
versions,
handleImport,
onUpdateDSLConfirm,
}
}

View File

@ -1,11 +1,14 @@
import { spawnSync } from 'node:child_process'
import { randomUUID } from 'node:crypto'
import { createSerwistRoute } from '@serwist/turbopack'
const basePath = process.env.NEXT_PUBLIC_BASE_PATH || ''
const revision = spawnSync('git', ['rev-parse', 'HEAD'], { encoding: 'utf-8' }).stdout?.trim() || randomUUID()
export const { dynamic, dynamicParams, revalidate, generateStaticParams, GET } = createSerwistRoute({
additionalPrecacheEntries: [{ url: `${basePath}/_offline.html`, revision }],
swSrc: 'app/sw.ts',
nextConfig: {
basePath,
},
useNativeEsbuild: true,
})

View File

@ -3,9 +3,7 @@
/// <reference lib="webworker" />
import type { PrecacheEntry, SerwistGlobalConfig } from 'serwist'
import { defaultCache } from '@serwist/turbopack/worker'
import { Serwist } from 'serwist'
import { withLeadingSlash } from 'ufo'
import { CacheableResponsePlugin, CacheFirst, ExpirationPlugin, NetworkFirst, Serwist, StaleWhileRevalidate } from 'serwist'
declare global {
// eslint-disable-next-line ts/consistent-type-definitions
@ -20,30 +18,78 @@ const scopePathname = new URL(self.registration.scope).pathname
const basePath = scopePathname.replace(/\/serwist\/$/, '').replace(/\/$/, '')
const offlineUrl = `${basePath}/_offline.html`
const normalizeManifestUrl = (url: string): string => {
if (url.startsWith('/serwist/'))
return url.replace(/^\/serwist\//, '/')
return withLeadingSlash(url)
}
const manifest = self.__SW_MANIFEST?.map((entry) => {
if (typeof entry === 'string')
return normalizeManifestUrl(entry)
return {
...entry,
url: normalizeManifestUrl(entry.url),
}
})
const serwist = new Serwist({
precacheEntries: manifest,
precacheEntries: self.__SW_MANIFEST,
skipWaiting: true,
disableDevLogs: true,
clientsClaim: true,
navigationPreload: true,
runtimeCaching: defaultCache,
runtimeCaching: [
{
matcher: ({ url }) => url.origin === 'https://fonts.googleapis.com',
handler: new CacheFirst({
cacheName: 'google-fonts',
plugins: [
new CacheableResponsePlugin({ statuses: [0, 200] }),
new ExpirationPlugin({
maxEntries: 4,
maxAgeSeconds: 365 * 24 * 60 * 60,
}),
],
}),
},
{
matcher: ({ url }) => url.origin === 'https://fonts.gstatic.com',
handler: new CacheFirst({
cacheName: 'google-fonts-webfonts',
plugins: [
new CacheableResponsePlugin({ statuses: [0, 200] }),
new ExpirationPlugin({
maxEntries: 4,
maxAgeSeconds: 365 * 24 * 60 * 60,
}),
],
}),
},
{
matcher: ({ request }) => request.destination === 'image',
handler: new CacheFirst({
cacheName: 'images',
plugins: [
new CacheableResponsePlugin({ statuses: [0, 200] }),
new ExpirationPlugin({
maxEntries: 64,
maxAgeSeconds: 30 * 24 * 60 * 60,
}),
],
}),
},
{
matcher: ({ request }) => request.destination === 'script' || request.destination === 'style',
handler: new StaleWhileRevalidate({
cacheName: 'static-resources',
plugins: [
new ExpirationPlugin({
maxEntries: 32,
maxAgeSeconds: 24 * 60 * 60,
}),
],
}),
},
{
matcher: ({ url, sameOrigin }) => sameOrigin && url.pathname.startsWith('/api/'),
handler: new NetworkFirst({
cacheName: 'api-cache',
networkTimeoutSeconds: 10,
plugins: [
new ExpirationPlugin({
maxEntries: 16,
maxAgeSeconds: 60 * 60,
}),
],
}),
},
],
fallbacks: {
entries: [
{

View File

@ -4,19 +4,7 @@ import type { EventEmitter } from 'ahooks/lib/useEventEmitter'
import { useEventEmitter } from 'ahooks'
import { createContext, useContext } from 'use-context-selector'
/**
* Typed event object emitted via the shared EventEmitter.
* Covers workflow updates, prompt-editor commands, DSL export checks, etc.
*/
export type EventEmitterMessage = {
type: string
payload?: unknown
instanceId?: string
}
export type EventEmitterValue = string | EventEmitterMessage
const EventEmitterContext = createContext<{ eventEmitter: EventEmitter<EventEmitterValue> | null }>({
const EventEmitterContext = createContext<{ eventEmitter: EventEmitter<string> | null }>({
eventEmitter: null,
})
@ -28,7 +16,7 @@ type EventEmitterContextProviderProps = {
export const EventEmitterContextProvider = ({
children,
}: EventEmitterContextProviderProps) => {
const eventEmitter = useEventEmitter<EventEmitterValue>()
const eventEmitter = useEventEmitter<string>()
return (
<EventEmitterContext.Provider value={{ eventEmitter }}>

View File

@ -38,11 +38,6 @@ pnpm lint:tss
This command lints the entire project and is intended for final verification before committing or pushing changes.
### Introducing New Plugins or Rules
If a new rule causes many existing code errors or automatic fixes generate too many diffs, do not use the `--fix` option for automatic fixes.
You can introduce the rule first, then use the `--suppress-all` option to temporarily suppress these errors, and gradually fix them in subsequent changes.
## Type Check
You should be able to see suggestions from TypeScript in your editor for all open files.

File diff suppressed because it is too large Load Diff

View File

@ -1,9 +1,9 @@
// @ts-check
import antfu from '@antfu/eslint-config'
import pluginQuery from '@tanstack/eslint-plugin-query'
import tailwindcss from 'eslint-plugin-better-tailwindcss'
import sonar from 'eslint-plugin-sonarjs'
import storybook from 'eslint-plugin-storybook'
import tailwind from 'eslint-plugin-tailwindcss'
import dify from './eslint-rules/index.js'
export default antfu(
@ -23,7 +23,7 @@ export default antfu(
},
},
nextjs: true,
ignores: ['public', 'types/doc-paths.ts', 'eslint-suppressions.json'],
ignores: ['public', 'types/doc-paths.ts'],
typescript: {
overrides: {
'ts/consistent-type-definitions': ['error', 'type'],
@ -66,16 +66,42 @@ export default antfu(
sonarjs: sonar,
},
},
tailwind.configs['flat/recommended'],
{
files: ['**/*.{ts,tsx}'],
plugins: {
tailwindcss,
settings: {
tailwindcss: {
// These are the default values but feel free to customize
callees: ['classnames', 'clsx', 'ctl', 'cn', 'classNames'],
config: 'tailwind.config.js', // returned from `loadConfig()` utility if not provided
cssFiles: [
'**/*.css',
'!**/node_modules',
'!**/.*',
'!**/dist',
'!**/build',
'!**/.storybook',
'!**/.next',
'!**/.public',
],
cssFilesRefreshRate: 5_000,
removeDuplicates: true,
skipClassAttribute: false,
whitelist: [],
tags: [], // can be set to e.g. ['tw'] for use in tw`bg-blue`
classRegex: '^class(Name)?$', // can be modified to support custom attributes. E.g. "^tw$" for `twin.macro`
},
},
rules: {
'tailwindcss/enforce-consistent-class-order': 'error',
'tailwindcss/no-duplicate-classes': 'error',
'tailwindcss/no-unnecessary-whitespace': 'error',
'tailwindcss/no-unknown-classes': 'warn',
// due to 1k lines of tailwind config, these rule have performance issue
'tailwindcss/no-contradicting-classname': 'off',
'tailwindcss/enforces-shorthand': 'off',
'tailwindcss/no-custom-classname': 'off',
'tailwindcss/no-unnecessary-arbitrary-value': 'off',
'tailwindcss/no-arbitrary-value': 'off',
'tailwindcss/classnames-order': 'warn',
'tailwindcss/enforces-negative-arbitrary-values': 'warn',
'tailwindcss/migration-from-tailwind-2': 'warn',
},
},
{

View File

@ -29,7 +29,7 @@ const remoteImageURLs = ([hasSetWebPrefix ? new URL(`${process.env.NEXT_PUBLIC_W
const nextConfig: NextConfig = {
basePath: process.env.NEXT_PUBLIC_BASE_PATH || '',
serverExternalPackages: ['esbuild'],
serverExternalPackages: ['esbuild-wasm'],
transpilePackages: ['echarts', 'zrender'],
turbopack: {
rules: codeInspectorPlugin({

View File

@ -46,8 +46,7 @@
"uglify-embed": "node ./bin/uglify-embed",
"i18n:check": "tsx ./scripts/check-i18n.js",
"test": "vitest run",
"test:coverage": "vitest run --coverage",
"test:ci": "vitest run --coverage --reporter vitest-tiny-reporter --silent=passed-only",
"test:coverage": "vitest run --coverage --reporter=dot --silent=passed-only",
"test:watch": "vitest --watch",
"analyze-component": "node ./scripts/analyze-component.js",
"refactor-component": "node ./scripts/refactor-component.js",
@ -154,9 +153,8 @@
"sharp": "0.33.5",
"sortablejs": "1.15.6",
"string-ts": "2.3.1",
"tailwind-merge": "2.6.1",
"tailwind-merge": "2.6.0",
"tldts": "7.0.17",
"ufo": "1.6.3",
"use-context-selector": "2.0.0",
"uuid": "10.0.0",
"zod": "3.25.76",
@ -166,21 +164,21 @@
"devDependencies": {
"@antfu/eslint-config": "7.2.0",
"@chromatic-com/storybook": "5.0.0",
"@eslint-react/eslint-plugin": "2.9.4",
"@eslint-react/eslint-plugin": "2.8.1",
"@mdx-js/loader": "3.1.1",
"@mdx-js/react": "3.1.1",
"@next/bundle-analyzer": "16.1.5",
"@next/eslint-plugin-next": "16.1.6",
"@next/mdx": "16.1.5",
"@rgrove/parse-xml": "4.2.0",
"@serwist/turbopack": "9.5.4",
"@serwist/turbopack": "9.5.0",
"@storybook/addon-docs": "10.2.0",
"@storybook/addon-links": "10.2.0",
"@storybook/addon-onboarding": "10.2.0",
"@storybook/addon-themes": "10.2.0",
"@storybook/nextjs-vite": "10.2.0",
"@storybook/react": "10.2.0",
"@tanstack/eslint-plugin-query": "5.91.4",
"@tanstack/eslint-plugin-query": "5.91.3",
"@tanstack/react-devtools": "0.9.2",
"@tanstack/react-form-devtools": "0.2.12",
"@tanstack/react-query-devtools": "5.90.2",
@ -211,13 +209,13 @@
"autoprefixer": "10.4.21",
"code-inspector-plugin": "1.3.6",
"cross-env": "10.1.0",
"esbuild": "0.27.2",
"esbuild-wasm": "0.27.2",
"eslint": "9.39.2",
"eslint-plugin-better-tailwindcss": "4.1.1",
"eslint-plugin-react-hooks": "7.0.1",
"eslint-plugin-react-refresh": "0.5.0",
"eslint-plugin-react-refresh": "0.4.26",
"eslint-plugin-sonarjs": "3.0.6",
"eslint-plugin-storybook": "10.2.6",
"eslint-plugin-storybook": "10.2.1",
"eslint-plugin-tailwindcss": "3.18.2",
"husky": "9.1.7",
"jsdom": "27.3.0",
"jsdom-testing-mocks": "1.16.0",
@ -227,17 +225,16 @@
"postcss": "8.5.6",
"react-scan": "0.4.3",
"sass": "1.93.2",
"serwist": "9.5.4",
"serwist": "9.5.0",
"storybook": "10.2.0",
"tailwindcss": "3.4.19",
"tailwindcss": "3.4.18",
"tsx": "4.21.0",
"typescript": "5.9.3",
"uglify-js": "3.19.3",
"vite": "7.3.1",
"vite-tsconfig-paths": "6.0.4",
"vitest": "4.0.17",
"vitest-canvas-mock": "1.1.3",
"vitest-tiny-reporter": "1.3.1"
"vitest-canvas-mock": "1.1.3"
},
"pnpm": {
"overrides": {

551
web/pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,157 @@
/**
* Test suite for the classnames utility function
* This utility combines the classnames library with tailwind-merge
* to handle conditional CSS classes and merge conflicting Tailwind classes
*/
import { cn } from './classnames'
describe('classnames', () => {
/**
* Tests basic classnames library features:
* - String concatenation
* - Array handling
* - Falsy value filtering
* - Object-based conditional classes
*/
it('classnames libs feature', () => {
expect(cn('foo')).toBe('foo')
expect(cn('foo', 'bar')).toBe('foo bar')
expect(cn(['foo', 'bar'])).toBe('foo bar')
expect(cn(undefined)).toBe('')
expect(cn(null)).toBe('')
expect(cn(false)).toBe('')
expect(cn({
foo: true,
bar: false,
baz: true,
})).toBe('foo baz')
})
/**
* Tests tailwind-merge functionality:
* - Conflicting class resolution (last one wins)
* - Modifier handling (hover, focus, etc.)
* - Important prefix (!)
* - Custom color classes
* - Arbitrary values
*/
it('tailwind-merge', () => {
/* eslint-disable tailwindcss/classnames-order */
expect(cn('p-0')).toBe('p-0')
expect(cn('text-right text-center text-left')).toBe('text-left')
expect(cn('pl-4 p-8')).toBe('p-8')
expect(cn('m-[2px] m-[4px]')).toBe('m-[4px]')
expect(cn('m-1 m-[4px]')).toBe('m-[4px]')
expect(cn('overflow-x-auto hover:overflow-x-hidden overflow-x-scroll')).toBe(
'hover:overflow-x-hidden overflow-x-scroll',
)
expect(cn('h-10 h-min')).toBe('h-min')
expect(cn('bg-grey-5 bg-hotpink')).toBe('bg-hotpink')
expect(cn('hover:block hover:inline')).toBe('hover:inline')
expect(cn('font-medium !font-bold')).toBe('font-medium !font-bold')
expect(cn('!font-medium !font-bold')).toBe('!font-bold')
expect(cn('text-gray-100 text-primary-200')).toBe('text-primary-200')
expect(cn('text-some-unknown-color text-components-input-bg-disabled text-primary-200')).toBe('text-primary-200')
expect(cn('bg-some-unknown-color bg-components-input-bg-disabled bg-primary-200')).toBe('bg-primary-200')
expect(cn('border-t border-white/10')).toBe('border-t border-white/10')
expect(cn('border-t border-white')).toBe('border-t border-white')
expect(cn('text-3.5xl text-black')).toBe('text-3.5xl text-black')
})
/**
* Tests the integration of classnames and tailwind-merge:
* - Object-based conditional classes with Tailwind conflict resolution
*/
it('classnames combined with tailwind-merge', () => {
expect(cn('text-right', {
'text-center': true,
})).toBe('text-center')
expect(cn('text-right', {
'text-center': false,
})).toBe('text-right')
})
/**
* Tests handling of multiple mixed argument types:
* - Strings, arrays, and objects in a single call
* - Tailwind merge working across different argument types
*/
it('multiple mixed argument types', () => {
expect(cn('foo', ['bar', 'baz'], { qux: true, quux: false })).toBe('foo bar baz qux')
expect(cn('p-4', ['p-2', 'm-4'], { 'text-left': true, 'text-right': true })).toBe('p-2 m-4 text-right')
})
/**
* Tests nested array handling:
* - Deep array flattening
* - Tailwind merge with nested structures
*/
it('nested arrays', () => {
expect(cn(['foo', ['bar', 'baz']])).toBe('foo bar baz')
expect(cn(['p-4', ['p-2', 'text-center']])).toBe('p-2 text-center')
})
/**
* Tests empty input handling:
* - Empty strings, arrays, and objects
* - Mixed empty and non-empty values
*/
it('empty inputs', () => {
expect(cn('')).toBe('')
expect(cn([])).toBe('')
expect(cn({})).toBe('')
expect(cn('', [], {})).toBe('')
expect(cn('foo', '', 'bar')).toBe('foo bar')
})
/**
* Tests number input handling:
* - Truthy numbers converted to strings
* - Zero treated as falsy
*/
it('numbers as inputs', () => {
expect(cn(1)).toBe('1')
expect(cn(0)).toBe('')
expect(cn('foo', 1, 'bar')).toBe('foo 1 bar')
})
/**
* Tests multiple object arguments:
* - Object merging
* - Tailwind conflict resolution across objects
*/
it('multiple objects', () => {
expect(cn({ foo: true }, { bar: true })).toBe('foo bar')
expect(cn({ foo: true, bar: false }, { bar: true, baz: true })).toBe('foo bar baz')
expect(cn({ 'p-4': true }, { 'p-2': true })).toBe('p-2')
})
/**
* Tests complex edge cases:
* - Mixed falsy values
* - Nested arrays with falsy values
* - Multiple conflicting Tailwind classes
*/
it('complex edge cases', () => {
expect(cn('foo', null, undefined, false, 'bar', 0, 1, '')).toBe('foo bar 1')
expect(cn(['foo', null, ['bar', undefined, 'baz']])).toBe('foo bar baz')
expect(cn('text-sm', { 'text-lg': false, 'text-xl': true }, 'text-2xl')).toBe('text-2xl')
})
/**
* Tests important (!) modifier behavior:
* - Important modifiers in objects
* - Conflict resolution with important prefix
*/
it('important modifier with objects', () => {
expect(cn({ '!font-medium': true }, { '!font-bold': true })).toBe('!font-bold')
expect(cn('font-normal', { '!font-bold': true })).toBe('font-normal !font-bold')
})
})