mirror of
https://github.com/langgenius/dify.git
synced 2026-01-19 11:45:05 +08:00
Merge remote-tracking branch 'origin/main' into feat/support-agent-sandbox
This commit is contained in:
@ -3,6 +3,7 @@ import datetime
|
||||
import json
|
||||
import logging
|
||||
import secrets
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
import click
|
||||
@ -46,6 +47,8 @@ from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpi
|
||||
from services.plugin.data_migration import PluginDataMigration
|
||||
from services.plugin.plugin_migration import PluginMigration
|
||||
from services.plugin.plugin_service import PluginService
|
||||
from services.retention.conversation.messages_clean_policy import create_message_clean_policy
|
||||
from services.retention.conversation.messages_clean_service import MessagesCleanService
|
||||
from services.retention.workflow_run.clear_free_plan_expired_workflow_run_logs import WorkflowRunCleanup
|
||||
from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
|
||||
|
||||
@ -2226,3 +2229,79 @@ def migrate_oss(
|
||||
except Exception as e:
|
||||
db.session.rollback()
|
||||
click.echo(click.style(f"Failed to update DB storage_type: {str(e)}", fg="red"))
|
||||
|
||||
|
||||
@click.command("clean-expired-messages", help="Clean expired messages.")
|
||||
@click.option(
|
||||
"--start-from",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
required=True,
|
||||
help="Lower bound (inclusive) for created_at.",
|
||||
)
|
||||
@click.option(
|
||||
"--end-before",
|
||||
type=click.DateTime(formats=["%Y-%m-%d", "%Y-%m-%dT%H:%M:%S"]),
|
||||
required=True,
|
||||
help="Upper bound (exclusive) for created_at.",
|
||||
)
|
||||
@click.option("--batch-size", default=1000, show_default=True, help="Batch size for selecting messages.")
|
||||
@click.option(
|
||||
"--graceful-period",
|
||||
default=21,
|
||||
show_default=True,
|
||||
help="Graceful period in days after subscription expiration, will be ignored when billing is disabled.",
|
||||
)
|
||||
@click.option("--dry-run", is_flag=True, default=False, help="Show messages logs would be cleaned without deleting")
|
||||
def clean_expired_messages(
|
||||
batch_size: int,
|
||||
graceful_period: int,
|
||||
start_from: datetime.datetime,
|
||||
end_before: datetime.datetime,
|
||||
dry_run: bool,
|
||||
):
|
||||
"""
|
||||
Clean expired messages and related data for tenants based on clean policy.
|
||||
"""
|
||||
click.echo(click.style("clean_messages: start clean messages.", fg="green"))
|
||||
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
# Create policy based on billing configuration
|
||||
# NOTE: graceful_period will be ignored when billing is disabled.
|
||||
policy = create_message_clean_policy(graceful_period_days=graceful_period)
|
||||
|
||||
# Create and run the cleanup service
|
||||
service = MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
batch_size=batch_size,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
stats = service.run()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
click.echo(
|
||||
click.style(
|
||||
f"clean_messages: completed successfully\n"
|
||||
f" - Latency: {end_at - start_at:.2f}s\n"
|
||||
f" - Batches processed: {stats['batches']}\n"
|
||||
f" - Total messages scanned: {stats['total_messages']}\n"
|
||||
f" - Messages filtered: {stats['filtered_messages']}\n"
|
||||
f" - Messages deleted: {stats['total_deleted']}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
end_at = time.perf_counter()
|
||||
logger.exception("clean_messages failed")
|
||||
click.echo(
|
||||
click.style(
|
||||
f"clean_messages: failed after {end_at - start_at:.2f}s - {str(e)}",
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
click.echo(click.style("messages cleanup completed.", fg="green"))
|
||||
|
||||
@ -189,8 +189,7 @@ class WorkflowEntry:
|
||||
)
|
||||
|
||||
try:
|
||||
# run node
|
||||
generator = node.run()
|
||||
generator = cls._traced_node_run(node)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"error while running node, workflow_id=%s, node_id=%s, node_type=%s, node_version=%s",
|
||||
@ -323,8 +322,7 @@ class WorkflowEntry:
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
|
||||
# run node
|
||||
generator = node.run()
|
||||
generator = cls._traced_node_run(node)
|
||||
|
||||
return node, generator
|
||||
except Exception as e:
|
||||
@ -430,3 +428,26 @@ class WorkflowEntry:
|
||||
input_value = current_variable.value | input_value
|
||||
|
||||
variable_pool.add([variable_node_id] + variable_key_list, input_value)
|
||||
|
||||
@staticmethod
|
||||
def _traced_node_run(node: Node) -> Generator[GraphNodeEventBase, None, None]:
|
||||
"""
|
||||
Wraps a node's run method with OpenTelemetry tracing and returns a generator.
|
||||
"""
|
||||
# Wrap node.run() with ObservabilityLayer hooks to produce node-level spans
|
||||
layer = ObservabilityLayer()
|
||||
layer.on_graph_start()
|
||||
node.ensure_execution_id()
|
||||
|
||||
def _gen():
|
||||
error: Exception | None = None
|
||||
layer.on_node_run_start(node)
|
||||
try:
|
||||
yield from node.run()
|
||||
except Exception as exc:
|
||||
error = exc
|
||||
raise
|
||||
finally:
|
||||
layer.on_node_run_end(node, error)
|
||||
|
||||
return _gen()
|
||||
|
||||
@ -6,6 +6,7 @@ from .create_site_record_when_app_created import handle as handle_create_site_re
|
||||
from .delete_tool_parameters_cache_when_sync_draft_workflow import (
|
||||
handle as handle_delete_tool_parameters_cache_when_sync_draft_workflow,
|
||||
)
|
||||
from .queue_credential_sync_when_tenant_created import handle as handle_queue_credential_sync_when_tenant_created
|
||||
from .sync_plugin_trigger_when_app_created import handle as handle_sync_plugin_trigger_when_app_created
|
||||
from .sync_webhook_when_app_created import handle as handle_sync_webhook_when_app_created
|
||||
from .sync_workflow_schedule_when_app_published import handle as handle_sync_workflow_schedule_when_app_published
|
||||
@ -30,6 +31,7 @@ __all__ = [
|
||||
"handle_create_installed_app_when_app_created",
|
||||
"handle_create_site_record_when_app_created",
|
||||
"handle_delete_tool_parameters_cache_when_sync_draft_workflow",
|
||||
"handle_queue_credential_sync_when_tenant_created",
|
||||
"handle_sync_plugin_trigger_when_app_created",
|
||||
"handle_sync_webhook_when_app_created",
|
||||
"handle_sync_workflow_schedule_when_app_published",
|
||||
|
||||
@ -0,0 +1,19 @@
|
||||
from configs import dify_config
|
||||
from events.tenant_event import tenant_was_created
|
||||
from services.enterprise.workspace_sync import WorkspaceSyncService
|
||||
|
||||
|
||||
@tenant_was_created.connect
|
||||
def handle(sender, **kwargs):
|
||||
"""Queue credential sync when a tenant/workspace is created."""
|
||||
# Only queue sync tasks if plugin manager (enterprise feature) is enabled
|
||||
if not dify_config.ENTERPRISE_ENABLED:
|
||||
return
|
||||
|
||||
tenant = sender
|
||||
|
||||
# Determine source from kwargs if available, otherwise use generic
|
||||
source = kwargs.get("source", "tenant_created")
|
||||
|
||||
# Queue credential sync task to Redis for enterprise backend to process
|
||||
WorkspaceSyncService.queue_credential_sync(tenant.id, source=source)
|
||||
@ -4,6 +4,7 @@ from dify_app import DifyApp
|
||||
def init_app(app: DifyApp):
|
||||
from commands import (
|
||||
add_qdrant_index,
|
||||
clean_expired_messages,
|
||||
clean_workflow_runs,
|
||||
cleanup_orphaned_draft_variables,
|
||||
clear_free_plan_tenant_expired_logs,
|
||||
@ -60,6 +61,7 @@ def init_app(app: DifyApp):
|
||||
transform_datasource_credentials,
|
||||
install_rag_pipeline_plugins,
|
||||
clean_workflow_runs,
|
||||
clean_expired_messages,
|
||||
]
|
||||
for cmd in cmds_to_register:
|
||||
app.cli.add_command(cmd)
|
||||
|
||||
@ -0,0 +1,33 @@
|
||||
"""feat: add created_at id index to messages
|
||||
|
||||
Revision ID: 3334862ee907
|
||||
Revises: 905527cc8fd3
|
||||
Create Date: 2026-01-12 17:29:44.846544
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3334862ee907'
|
||||
down_revision = '905527cc8fd3'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.create_index('message_created_at_id_idx', ['created_at', 'id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('messages', schema=None) as batch_op:
|
||||
batch_op.drop_index('message_created_at_id_idx')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@ -970,6 +970,7 @@ class Message(Base):
|
||||
Index("message_workflow_run_id_idx", "conversation_id", "workflow_run_id"),
|
||||
Index("message_created_at_idx", "created_at"),
|
||||
Index("message_app_mode_idx", "app_mode"),
|
||||
Index("message_created_at_id_idx", "created_at", "id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()))
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "dify-api"
|
||||
version = "1.11.3"
|
||||
version = "1.11.4"
|
||||
requires-python = ">=3.11,<3.13"
|
||||
|
||||
dependencies = [
|
||||
|
||||
@ -1,90 +1,62 @@
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
|
||||
import click
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
import app
|
||||
from configs import dify_config
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from models.model import (
|
||||
App,
|
||||
Message,
|
||||
MessageAgentThought,
|
||||
MessageAnnotation,
|
||||
MessageChain,
|
||||
MessageFeedback,
|
||||
MessageFile,
|
||||
)
|
||||
from models.web import SavedMessage
|
||||
from services.feature_service import FeatureService
|
||||
from services.retention.conversation.messages_clean_policy import create_message_clean_policy
|
||||
from services.retention.conversation.messages_clean_service import MessagesCleanService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@app.celery.task(queue="dataset")
|
||||
@app.celery.task(queue="retention")
|
||||
def clean_messages():
|
||||
click.echo(click.style("Start clean messages.", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
plan_sandbox_clean_message_day = datetime.datetime.now() - datetime.timedelta(
|
||||
days=dify_config.PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING
|
||||
)
|
||||
while True:
|
||||
try:
|
||||
# Main query with join and filter
|
||||
messages = (
|
||||
db.session.query(Message)
|
||||
.where(Message.created_at < plan_sandbox_clean_message_day)
|
||||
.order_by(Message.created_at.desc())
|
||||
.limit(100)
|
||||
.all()
|
||||
)
|
||||
"""
|
||||
Clean expired messages based on clean policy.
|
||||
|
||||
except SQLAlchemyError:
|
||||
raise
|
||||
if not messages:
|
||||
break
|
||||
for message in messages:
|
||||
app = db.session.query(App).filter_by(id=message.app_id).first()
|
||||
if not app:
|
||||
logger.warning(
|
||||
"Expected App record to exist, but none was found, app_id=%s, message_id=%s",
|
||||
message.app_id,
|
||||
message.id,
|
||||
)
|
||||
continue
|
||||
features_cache_key = f"features:{app.tenant_id}"
|
||||
plan_cache = redis_client.get(features_cache_key)
|
||||
if plan_cache is None:
|
||||
features = FeatureService.get_features(app.tenant_id)
|
||||
redis_client.setex(features_cache_key, 600, features.billing.subscription.plan)
|
||||
plan = features.billing.subscription.plan
|
||||
else:
|
||||
plan = plan_cache.decode()
|
||||
if plan == CloudPlan.SANDBOX:
|
||||
# clean related message
|
||||
db.session.query(MessageFeedback).where(MessageFeedback.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageAnnotation).where(MessageAnnotation.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageChain).where(MessageChain.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageAgentThought).where(MessageAgentThought.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(MessageFile).where(MessageFile.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(SavedMessage).where(SavedMessage.message_id == message.id).delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db.session.query(Message).where(Message.id == message.id).delete()
|
||||
db.session.commit()
|
||||
end_at = time.perf_counter()
|
||||
click.echo(click.style(f"Cleaned messages from db success latency: {end_at - start_at}", fg="green"))
|
||||
This task uses MessagesCleanService to efficiently clean messages in batches.
|
||||
The behavior depends on BILLING_ENABLED configuration:
|
||||
- BILLING_ENABLED=True: only delete messages from sandbox tenants (with whitelist/grace period)
|
||||
- BILLING_ENABLED=False: delete all messages within the time range
|
||||
"""
|
||||
click.echo(click.style("clean_messages: start clean messages.", fg="green"))
|
||||
start_at = time.perf_counter()
|
||||
|
||||
try:
|
||||
# Create policy based on billing configuration
|
||||
policy = create_message_clean_policy(
|
||||
graceful_period_days=dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_GRACEFUL_PERIOD,
|
||||
)
|
||||
|
||||
# Create and run the cleanup service
|
||||
service = MessagesCleanService.from_days(
|
||||
policy=policy,
|
||||
days=dify_config.SANDBOX_EXPIRED_RECORDS_RETENTION_DAYS,
|
||||
batch_size=dify_config.SANDBOX_EXPIRED_RECORDS_CLEAN_BATCH_SIZE,
|
||||
)
|
||||
stats = service.run()
|
||||
|
||||
end_at = time.perf_counter()
|
||||
click.echo(
|
||||
click.style(
|
||||
f"clean_messages: completed successfully\n"
|
||||
f" - Latency: {end_at - start_at:.2f}s\n"
|
||||
f" - Batches processed: {stats['batches']}\n"
|
||||
f" - Total messages scanned: {stats['total_messages']}\n"
|
||||
f" - Messages filtered: {stats['filtered_messages']}\n"
|
||||
f" - Messages deleted: {stats['total_deleted']}",
|
||||
fg="green",
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
end_at = time.perf_counter()
|
||||
logger.exception("clean_messages failed")
|
||||
click.echo(
|
||||
click.style(
|
||||
f"clean_messages: failed after {end_at - start_at:.2f}s - {str(e)}",
|
||||
fg="red",
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
58
api/services/enterprise/workspace_sync.py
Normal file
58
api/services/enterprise/workspace_sync.py
Normal file
@ -0,0 +1,58 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from redis import RedisError
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
WORKSPACE_SYNC_QUEUE = "enterprise:workspace:sync:queue"
|
||||
WORKSPACE_SYNC_PROCESSING = "enterprise:workspace:sync:processing"
|
||||
|
||||
|
||||
class WorkspaceSyncService:
|
||||
"""Service to publish workspace sync tasks to Redis queue for enterprise backend consumption"""
|
||||
|
||||
@staticmethod
|
||||
def queue_credential_sync(workspace_id: str, *, source: str) -> bool:
|
||||
"""
|
||||
Queue a credential sync task for a newly created workspace.
|
||||
|
||||
This publishes a task to Redis that will be consumed by the enterprise backend
|
||||
worker to sync credentials with the plugin-manager.
|
||||
|
||||
Args:
|
||||
workspace_id: The workspace/tenant ID to sync credentials for
|
||||
source: Source of the sync request (for debugging/tracking)
|
||||
|
||||
Returns:
|
||||
bool: True if task was queued successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
task = {
|
||||
"task_id": str(uuid.uuid4()),
|
||||
"workspace_id": workspace_id,
|
||||
"retry_count": 0,
|
||||
"created_at": datetime.now(UTC).isoformat(),
|
||||
"source": source,
|
||||
}
|
||||
|
||||
# Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP
|
||||
redis_client.lpush(WORKSPACE_SYNC_QUEUE, json.dumps(task))
|
||||
|
||||
logger.info(
|
||||
"Queued credential sync task for workspace %s, task_id: %s, source: %s",
|
||||
workspace_id,
|
||||
task["task_id"],
|
||||
source,
|
||||
)
|
||||
return True
|
||||
|
||||
except (RedisError, TypeError) as e:
|
||||
logger.error("Failed to queue credential sync for workspace %s: %s", workspace_id, str(e), exc_info=True)
|
||||
# Don't raise - we don't want to fail workspace creation if queueing fails
|
||||
# The scheduled task will catch it later
|
||||
return False
|
||||
216
api/services/retention/conversation/messages_clean_policy.py
Normal file
216
api/services/retention/conversation/messages_clean_policy.py
Normal file
@ -0,0 +1,216 @@
|
||||
import datetime
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable, Sequence
|
||||
from dataclasses import dataclass
|
||||
|
||||
from configs import dify_config
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from services.billing_service import BillingService, SubscriptionPlan
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SimpleMessage:
|
||||
id: str
|
||||
app_id: str
|
||||
created_at: datetime.datetime
|
||||
|
||||
|
||||
class MessagesCleanPolicy(ABC):
|
||||
"""
|
||||
Abstract base class for message cleanup policies.
|
||||
|
||||
A policy determines which messages from a batch should be deleted.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def filter_message_ids(
|
||||
self,
|
||||
messages: Sequence[SimpleMessage],
|
||||
app_to_tenant: dict[str, str],
|
||||
) -> Sequence[str]:
|
||||
"""
|
||||
Filter messages and return IDs of messages that should be deleted.
|
||||
|
||||
Args:
|
||||
messages: Batch of messages to evaluate
|
||||
app_to_tenant: Mapping from app_id to tenant_id
|
||||
|
||||
Returns:
|
||||
List of message IDs that should be deleted
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
class BillingDisabledPolicy(MessagesCleanPolicy):
|
||||
"""
|
||||
Policy for community or enterpriseedition (billing disabled).
|
||||
|
||||
No special filter logic, just return all message ids.
|
||||
"""
|
||||
|
||||
def filter_message_ids(
|
||||
self,
|
||||
messages: Sequence[SimpleMessage],
|
||||
app_to_tenant: dict[str, str],
|
||||
) -> Sequence[str]:
|
||||
return [msg.id for msg in messages]
|
||||
|
||||
|
||||
class BillingSandboxPolicy(MessagesCleanPolicy):
|
||||
"""
|
||||
Policy for sandbox plan tenants in cloud edition (billing enabled).
|
||||
|
||||
Filters messages based on sandbox plan expiration rules:
|
||||
- Skip tenants in the whitelist
|
||||
- Only delete messages from sandbox plan tenants
|
||||
- Respect grace period after subscription expiration
|
||||
- Safe default: if tenant mapping or plan is missing, do NOT delete
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
plan_provider: Callable[[Sequence[str]], dict[str, SubscriptionPlan]],
|
||||
graceful_period_days: int = 21,
|
||||
tenant_whitelist: Sequence[str] | None = None,
|
||||
current_timestamp: int | None = None,
|
||||
) -> None:
|
||||
self._graceful_period_days = graceful_period_days
|
||||
self._tenant_whitelist: Sequence[str] = tenant_whitelist or []
|
||||
self._plan_provider = plan_provider
|
||||
self._current_timestamp = current_timestamp
|
||||
|
||||
def filter_message_ids(
|
||||
self,
|
||||
messages: Sequence[SimpleMessage],
|
||||
app_to_tenant: dict[str, str],
|
||||
) -> Sequence[str]:
|
||||
"""
|
||||
Filter messages based on sandbox plan expiration rules.
|
||||
|
||||
Args:
|
||||
messages: Batch of messages to evaluate
|
||||
app_to_tenant: Mapping from app_id to tenant_id
|
||||
|
||||
Returns:
|
||||
List of message IDs that should be deleted
|
||||
"""
|
||||
if not messages or not app_to_tenant:
|
||||
return []
|
||||
|
||||
# Get unique tenant_ids and fetch subscription plans
|
||||
tenant_ids = list(set(app_to_tenant.values()))
|
||||
tenant_plans = self._plan_provider(tenant_ids)
|
||||
|
||||
if not tenant_plans:
|
||||
return []
|
||||
|
||||
# Apply sandbox deletion rules
|
||||
return self._filter_expired_sandbox_messages(
|
||||
messages=messages,
|
||||
app_to_tenant=app_to_tenant,
|
||||
tenant_plans=tenant_plans,
|
||||
)
|
||||
|
||||
def _filter_expired_sandbox_messages(
|
||||
self,
|
||||
messages: Sequence[SimpleMessage],
|
||||
app_to_tenant: dict[str, str],
|
||||
tenant_plans: dict[str, SubscriptionPlan],
|
||||
) -> list[str]:
|
||||
"""
|
||||
Filter messages that should be deleted based on sandbox plan expiration.
|
||||
|
||||
A message should be deleted if:
|
||||
1. It belongs to a sandbox tenant AND
|
||||
2. Either:
|
||||
a) The tenant has no previous subscription (expiration_date == -1), OR
|
||||
b) The subscription expired more than graceful_period_days ago
|
||||
|
||||
Args:
|
||||
messages: List of message objects with id and app_id attributes
|
||||
app_to_tenant: Mapping from app_id to tenant_id
|
||||
tenant_plans: Mapping from tenant_id to subscription plan info
|
||||
|
||||
Returns:
|
||||
List of message IDs that should be deleted
|
||||
"""
|
||||
current_timestamp = self._current_timestamp
|
||||
if current_timestamp is None:
|
||||
current_timestamp = int(datetime.datetime.now(datetime.UTC).timestamp())
|
||||
|
||||
sandbox_message_ids: list[str] = []
|
||||
graceful_period_seconds = self._graceful_period_days * 24 * 60 * 60
|
||||
|
||||
for msg in messages:
|
||||
# Get tenant_id for this message's app
|
||||
tenant_id = app_to_tenant.get(msg.app_id)
|
||||
if not tenant_id:
|
||||
continue
|
||||
|
||||
# Skip tenant messages in whitelist
|
||||
if tenant_id in self._tenant_whitelist:
|
||||
continue
|
||||
|
||||
# Get subscription plan for this tenant
|
||||
tenant_plan = tenant_plans.get(tenant_id)
|
||||
if not tenant_plan:
|
||||
continue
|
||||
|
||||
plan = str(tenant_plan["plan"])
|
||||
expiration_date = int(tenant_plan["expiration_date"])
|
||||
|
||||
# Only process sandbox plans
|
||||
if plan != CloudPlan.SANDBOX:
|
||||
continue
|
||||
|
||||
# Case 1: No previous subscription (-1 means never had a paid subscription)
|
||||
if expiration_date == -1:
|
||||
sandbox_message_ids.append(msg.id)
|
||||
continue
|
||||
|
||||
# Case 2: Subscription expired beyond grace period
|
||||
if current_timestamp - expiration_date > graceful_period_seconds:
|
||||
sandbox_message_ids.append(msg.id)
|
||||
|
||||
return sandbox_message_ids
|
||||
|
||||
|
||||
def create_message_clean_policy(
|
||||
graceful_period_days: int = 21,
|
||||
current_timestamp: int | None = None,
|
||||
) -> MessagesCleanPolicy:
|
||||
"""
|
||||
Factory function to create the appropriate message clean policy.
|
||||
|
||||
Determines which policy to use based on BILLING_ENABLED configuration:
|
||||
- If BILLING_ENABLED is True: returns BillingSandboxPolicy
|
||||
- If BILLING_ENABLED is False: returns BillingDisabledPolicy
|
||||
|
||||
Args:
|
||||
graceful_period_days: Grace period in days after subscription expiration (default: 21)
|
||||
current_timestamp: Current Unix timestamp for testing (default: None, uses current time)
|
||||
"""
|
||||
if not dify_config.BILLING_ENABLED:
|
||||
logger.info("create_message_clean_policy: billing disabled, using BillingDisabledPolicy")
|
||||
return BillingDisabledPolicy()
|
||||
|
||||
# Billing enabled - fetch whitelist from BillingService
|
||||
tenant_whitelist = BillingService.get_expired_subscription_cleanup_whitelist()
|
||||
plan_provider = BillingService.get_plan_bulk_with_cache
|
||||
|
||||
logger.info(
|
||||
"create_message_clean_policy: billing enabled, using BillingSandboxPolicy "
|
||||
"(graceful_period_days=%s, whitelist=%s)",
|
||||
graceful_period_days,
|
||||
tenant_whitelist,
|
||||
)
|
||||
|
||||
return BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=graceful_period_days,
|
||||
tenant_whitelist=tenant_whitelist,
|
||||
current_timestamp=current_timestamp,
|
||||
)
|
||||
334
api/services/retention/conversation/messages_clean_service.py
Normal file
334
api/services/retention/conversation/messages_clean_service.py
Normal file
@ -0,0 +1,334 @@
|
||||
import datetime
|
||||
import logging
|
||||
import random
|
||||
from collections.abc import Sequence
|
||||
from typing import cast
|
||||
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.engine import CursorResult
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models.model import (
|
||||
App,
|
||||
AppAnnotationHitHistory,
|
||||
DatasetRetrieverResource,
|
||||
Message,
|
||||
MessageAgentThought,
|
||||
MessageAnnotation,
|
||||
MessageChain,
|
||||
MessageFeedback,
|
||||
MessageFile,
|
||||
)
|
||||
from models.web import SavedMessage
|
||||
from services.retention.conversation.messages_clean_policy import (
|
||||
MessagesCleanPolicy,
|
||||
SimpleMessage,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MessagesCleanService:
|
||||
"""
|
||||
Service for cleaning expired messages based on retention policies.
|
||||
|
||||
Compatible with non cloud edition (billing disabled): all messages in the time range will be deleted.
|
||||
If billing is enabled: only sandbox plan tenant messages are deleted (with whitelist and grace period support).
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
policy: MessagesCleanPolicy,
|
||||
end_before: datetime.datetime,
|
||||
start_from: datetime.datetime | None = None,
|
||||
batch_size: int = 1000,
|
||||
dry_run: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Initialize the service with cleanup parameters.
|
||||
|
||||
Args:
|
||||
policy: The policy that determines which messages to delete
|
||||
end_before: End time (exclusive) of the range
|
||||
start_from: Optional start time (inclusive) of the range
|
||||
batch_size: Number of messages to process per batch
|
||||
dry_run: Whether to perform a dry run (no actual deletion)
|
||||
"""
|
||||
self._policy = policy
|
||||
self._end_before = end_before
|
||||
self._start_from = start_from
|
||||
self._batch_size = batch_size
|
||||
self._dry_run = dry_run
|
||||
|
||||
@classmethod
|
||||
def from_time_range(
|
||||
cls,
|
||||
policy: MessagesCleanPolicy,
|
||||
start_from: datetime.datetime,
|
||||
end_before: datetime.datetime,
|
||||
batch_size: int = 1000,
|
||||
dry_run: bool = False,
|
||||
) -> "MessagesCleanService":
|
||||
"""
|
||||
Create a service instance for cleaning messages within a specific time range.
|
||||
|
||||
Time range is [start_from, end_before).
|
||||
|
||||
Args:
|
||||
policy: The policy that determines which messages to delete
|
||||
start_from: Start time (inclusive) of the range
|
||||
end_before: End time (exclusive) of the range
|
||||
batch_size: Number of messages to process per batch
|
||||
dry_run: Whether to perform a dry run (no actual deletion)
|
||||
|
||||
Returns:
|
||||
MessagesCleanService instance
|
||||
|
||||
Raises:
|
||||
ValueError: If start_from >= end_before or invalid parameters
|
||||
"""
|
||||
if start_from >= end_before:
|
||||
raise ValueError(f"start_from ({start_from}) must be less than end_before ({end_before})")
|
||||
|
||||
if batch_size <= 0:
|
||||
raise ValueError(f"batch_size ({batch_size}) must be greater than 0")
|
||||
|
||||
logger.info(
|
||||
"clean_messages: start_from=%s, end_before=%s, batch_size=%s, policy=%s",
|
||||
start_from,
|
||||
end_before,
|
||||
batch_size,
|
||||
policy.__class__.__name__,
|
||||
)
|
||||
|
||||
return cls(
|
||||
policy=policy,
|
||||
end_before=end_before,
|
||||
start_from=start_from,
|
||||
batch_size=batch_size,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_days(
|
||||
cls,
|
||||
policy: MessagesCleanPolicy,
|
||||
days: int = 30,
|
||||
batch_size: int = 1000,
|
||||
dry_run: bool = False,
|
||||
) -> "MessagesCleanService":
|
||||
"""
|
||||
Create a service instance for cleaning messages older than specified days.
|
||||
|
||||
Args:
|
||||
policy: The policy that determines which messages to delete
|
||||
days: Number of days to look back from now
|
||||
batch_size: Number of messages to process per batch
|
||||
dry_run: Whether to perform a dry run (no actual deletion)
|
||||
|
||||
Returns:
|
||||
MessagesCleanService instance
|
||||
|
||||
Raises:
|
||||
ValueError: If invalid parameters
|
||||
"""
|
||||
if days < 0:
|
||||
raise ValueError(f"days ({days}) must be greater than or equal to 0")
|
||||
|
||||
if batch_size <= 0:
|
||||
raise ValueError(f"batch_size ({batch_size}) must be greater than 0")
|
||||
|
||||
end_before = datetime.datetime.now() - datetime.timedelta(days=days)
|
||||
|
||||
logger.info(
|
||||
"clean_messages: days=%s, end_before=%s, batch_size=%s, policy=%s",
|
||||
days,
|
||||
end_before,
|
||||
batch_size,
|
||||
policy.__class__.__name__,
|
||||
)
|
||||
|
||||
return cls(policy=policy, end_before=end_before, start_from=None, batch_size=batch_size, dry_run=dry_run)
|
||||
|
||||
def run(self) -> dict[str, int]:
|
||||
"""
|
||||
Execute the message cleanup operation.
|
||||
|
||||
Returns:
|
||||
Dict with statistics: batches, filtered_messages, total_deleted
|
||||
"""
|
||||
return self._clean_messages_by_time_range()
|
||||
|
||||
def _clean_messages_by_time_range(self) -> dict[str, int]:
|
||||
"""
|
||||
Clean messages within a time range using cursor-based pagination.
|
||||
|
||||
Time range is [start_from, end_before)
|
||||
|
||||
Steps:
|
||||
1. Iterate messages using cursor pagination (by created_at, id)
|
||||
2. Query app_id -> tenant_id mapping
|
||||
3. Delegate to policy to determine which messages to delete
|
||||
4. Batch delete messages and their relations
|
||||
|
||||
Returns:
|
||||
Dict with statistics: batches, filtered_messages, total_deleted
|
||||
"""
|
||||
stats = {
|
||||
"batches": 0,
|
||||
"total_messages": 0,
|
||||
"filtered_messages": 0,
|
||||
"total_deleted": 0,
|
||||
}
|
||||
|
||||
# Cursor-based pagination using (created_at, id) to avoid infinite loops
|
||||
# and ensure proper ordering with time-based filtering
|
||||
_cursor: tuple[datetime.datetime, str] | None = None
|
||||
|
||||
logger.info(
|
||||
"clean_messages: start cleaning messages (dry_run=%s), start_from=%s, end_before=%s",
|
||||
self._dry_run,
|
||||
self._start_from,
|
||||
self._end_before,
|
||||
)
|
||||
|
||||
while True:
|
||||
stats["batches"] += 1
|
||||
|
||||
# Step 1: Fetch a batch of messages using cursor
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
msg_stmt = (
|
||||
select(Message.id, Message.app_id, Message.created_at)
|
||||
.where(Message.created_at < self._end_before)
|
||||
.order_by(Message.created_at, Message.id)
|
||||
.limit(self._batch_size)
|
||||
)
|
||||
|
||||
if self._start_from:
|
||||
msg_stmt = msg_stmt.where(Message.created_at >= self._start_from)
|
||||
|
||||
# Apply cursor condition: (created_at, id) > (last_created_at, last_message_id)
|
||||
# This translates to:
|
||||
# created_at > last_created_at OR (created_at = last_created_at AND id > last_message_id)
|
||||
if _cursor:
|
||||
# Continuing from previous batch
|
||||
msg_stmt = msg_stmt.where(
|
||||
(Message.created_at > _cursor[0])
|
||||
| ((Message.created_at == _cursor[0]) & (Message.id > _cursor[1]))
|
||||
)
|
||||
|
||||
raw_messages = list(session.execute(msg_stmt).all())
|
||||
messages = [
|
||||
SimpleMessage(id=msg_id, app_id=app_id, created_at=msg_created_at)
|
||||
for msg_id, app_id, msg_created_at in raw_messages
|
||||
]
|
||||
|
||||
# Track total messages fetched across all batches
|
||||
stats["total_messages"] += len(messages)
|
||||
|
||||
if not messages:
|
||||
logger.info("clean_messages (batch %s): no more messages to process", stats["batches"])
|
||||
break
|
||||
|
||||
# Update cursor to the last message's (created_at, id)
|
||||
_cursor = (messages[-1].created_at, messages[-1].id)
|
||||
|
||||
# Step 2: Extract app_ids and query tenant_ids
|
||||
app_ids = list({msg.app_id for msg in messages})
|
||||
|
||||
if not app_ids:
|
||||
logger.info("clean_messages (batch %s): no app_ids found, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
app_stmt = select(App.id, App.tenant_id).where(App.id.in_(app_ids))
|
||||
apps = list(session.execute(app_stmt).all())
|
||||
|
||||
if not apps:
|
||||
logger.info("clean_messages (batch %s): no apps found, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
# Build app_id -> tenant_id mapping
|
||||
app_to_tenant: dict[str, str] = {app.id: app.tenant_id for app in apps}
|
||||
|
||||
# Step 3: Delegate to policy to determine which messages to delete
|
||||
message_ids_to_delete = self._policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
if not message_ids_to_delete:
|
||||
logger.info("clean_messages (batch %s): no messages to delete, skip", stats["batches"])
|
||||
continue
|
||||
|
||||
stats["filtered_messages"] += len(message_ids_to_delete)
|
||||
|
||||
# Step 4: Batch delete messages and their relations
|
||||
if not self._dry_run:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Delete related records first
|
||||
self._batch_delete_message_relations(session, message_ids_to_delete)
|
||||
|
||||
# Delete messages
|
||||
delete_stmt = delete(Message).where(Message.id.in_(message_ids_to_delete))
|
||||
delete_result = cast(CursorResult, session.execute(delete_stmt))
|
||||
messages_deleted = delete_result.rowcount
|
||||
session.commit()
|
||||
|
||||
stats["total_deleted"] += messages_deleted
|
||||
|
||||
logger.info(
|
||||
"clean_messages (batch %s): processed %s messages, deleted %s messages",
|
||||
stats["batches"],
|
||||
len(messages),
|
||||
messages_deleted,
|
||||
)
|
||||
else:
|
||||
# Log random sample of message IDs that would be deleted (up to 10)
|
||||
sample_size = min(10, len(message_ids_to_delete))
|
||||
sampled_ids = random.sample(list(message_ids_to_delete), sample_size)
|
||||
|
||||
logger.info(
|
||||
"clean_messages (batch %s, dry_run): would delete %s messages, sampling %s ids:",
|
||||
stats["batches"],
|
||||
len(message_ids_to_delete),
|
||||
sample_size,
|
||||
)
|
||||
for msg_id in sampled_ids:
|
||||
logger.info("clean_messages (batch %s, dry_run) sample: message_id=%s", stats["batches"], msg_id)
|
||||
|
||||
logger.info(
|
||||
"clean_messages completed: total batches: %s, total messages: %s, filtered messages: %s, total deleted: %s",
|
||||
stats["batches"],
|
||||
stats["total_messages"],
|
||||
stats["filtered_messages"],
|
||||
stats["total_deleted"],
|
||||
)
|
||||
|
||||
return stats
|
||||
|
||||
@staticmethod
|
||||
def _batch_delete_message_relations(session: Session, message_ids: Sequence[str]) -> None:
|
||||
"""
|
||||
Batch delete all related records for given message IDs.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
message_ids: List of message IDs to delete relations for
|
||||
"""
|
||||
if not message_ids:
|
||||
return
|
||||
|
||||
# Delete all related records in batch
|
||||
session.execute(delete(MessageFeedback).where(MessageFeedback.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(MessageAnnotation).where(MessageAnnotation.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(MessageChain).where(MessageChain.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(MessageAgentThought).where(MessageAgentThought.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(MessageFile).where(MessageFile.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(SavedMessage).where(SavedMessage.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(AppAnnotationHitHistory).where(AppAnnotationHitHistory.message_id.in_(message_ids)))
|
||||
|
||||
session.execute(delete(DatasetRetrieverResource).where(DatasetRetrieverResource.message_id.in_(message_ids)))
|
||||
File diff suppressed because it is too large
Load Diff
627
api/tests/unit_tests/services/test_messages_clean_service.py
Normal file
627
api/tests/unit_tests/services/test_messages_clean_service.py
Normal file
@ -0,0 +1,627 @@
|
||||
import datetime
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from services.retention.conversation.messages_clean_policy import (
|
||||
BillingDisabledPolicy,
|
||||
BillingSandboxPolicy,
|
||||
SimpleMessage,
|
||||
create_message_clean_policy,
|
||||
)
|
||||
from services.retention.conversation.messages_clean_service import MessagesCleanService
|
||||
|
||||
|
||||
def make_simple_message(msg_id: str, app_id: str) -> SimpleMessage:
|
||||
"""Helper to create a SimpleMessage with a fixed created_at timestamp."""
|
||||
return SimpleMessage(id=msg_id, app_id=app_id, created_at=datetime.datetime(2024, 1, 1))
|
||||
|
||||
|
||||
def make_plan_provider(tenant_plans: dict) -> MagicMock:
|
||||
"""Helper to create a mock plan_provider that returns the given tenant_plans."""
|
||||
provider = MagicMock()
|
||||
provider.return_value = tenant_plans
|
||||
return provider
|
||||
|
||||
|
||||
class TestBillingSandboxPolicyFilterMessageIds:
|
||||
"""Unit tests for BillingSandboxPolicy.filter_message_ids method."""
|
||||
|
||||
# Fixed timestamp for deterministic tests
|
||||
CURRENT_TIMESTAMP = 1000000
|
||||
GRACEFUL_PERIOD_DAYS = 8
|
||||
GRACEFUL_PERIOD_SECONDS = GRACEFUL_PERIOD_DAYS * 24 * 60 * 60
|
||||
|
||||
def test_missing_tenant_mapping_excluded(self):
|
||||
"""Test that messages with missing app-to-tenant mapping are excluded."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant = {} # No mapping
|
||||
tenant_plans = {"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert
|
||||
assert list(result) == []
|
||||
|
||||
def test_missing_tenant_plan_excluded(self):
|
||||
"""Test that messages with missing tenant plan are excluded (safe default)."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2"}
|
||||
tenant_plans = {} # No plans
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert
|
||||
assert list(result) == []
|
||||
|
||||
def test_non_sandbox_plan_excluded(self):
|
||||
"""Test that messages from non-sandbox plans (PROFESSIONAL/TEAM) are excluded."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
make_simple_message("msg3", "app3"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant3"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.PROFESSIONAL, "expiration_date": -1},
|
||||
"tenant2": {"plan": CloudPlan.TEAM, "expiration_date": -1},
|
||||
"tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}, # Only this one
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - only msg3 (sandbox tenant) should be included
|
||||
assert set(result) == {"msg3"}
|
||||
|
||||
def test_whitelist_skip(self):
|
||||
"""Test that whitelisted tenants are excluded even if sandbox + expired."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"), # Whitelisted - excluded
|
||||
make_simple_message("msg2", "app2"), # Not whitelisted - included
|
||||
make_simple_message("msg3", "app3"), # Whitelisted - excluded
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant3"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
"tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
tenant_whitelist = ["tenant1", "tenant3"]
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
tenant_whitelist=tenant_whitelist,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - only msg2 should be included
|
||||
assert set(result) == {"msg2"}
|
||||
|
||||
def test_no_previous_subscription_included(self):
|
||||
"""Test that messages with expiration_date=-1 (no previous subscription) are included."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - all messages should be included
|
||||
assert set(result) == {"msg1", "msg2"}
|
||||
|
||||
def test_within_grace_period_excluded(self):
|
||||
"""Test that messages within grace period are excluded."""
|
||||
# Arrange
|
||||
now = self.CURRENT_TIMESTAMP
|
||||
expired_1_day_ago = now - (1 * 24 * 60 * 60)
|
||||
expired_5_days_ago = now - (5 * 24 * 60 * 60)
|
||||
expired_7_days_ago = now - (7 * 24 * 60 * 60)
|
||||
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
make_simple_message("msg3", "app3"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant3"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_1_day_ago},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_5_days_ago},
|
||||
"tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_7_days_ago},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS, # 8 days
|
||||
current_timestamp=now,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - all within 8-day grace period, none should be included
|
||||
assert list(result) == []
|
||||
|
||||
def test_exactly_at_boundary_excluded(self):
|
||||
"""Test that messages exactly at grace period boundary are excluded (code uses >)."""
|
||||
# Arrange
|
||||
now = self.CURRENT_TIMESTAMP
|
||||
expired_exactly_8_days_ago = now - self.GRACEFUL_PERIOD_SECONDS # Exactly at boundary
|
||||
|
||||
messages = [make_simple_message("msg1", "app1")]
|
||||
app_to_tenant = {"app1": "tenant1"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_exactly_8_days_ago},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=now,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - exactly at boundary (==) should be excluded (code uses >)
|
||||
assert list(result) == []
|
||||
|
||||
def test_beyond_grace_period_included(self):
|
||||
"""Test that messages beyond grace period are included."""
|
||||
# Arrange
|
||||
now = self.CURRENT_TIMESTAMP
|
||||
expired_9_days_ago = now - (9 * 24 * 60 * 60) # Just beyond 8-day grace
|
||||
expired_30_days_ago = now - (30 * 24 * 60 * 60) # Well beyond
|
||||
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2"}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_9_days_ago},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": expired_30_days_ago},
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=now,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - both beyond grace period, should be included
|
||||
assert set(result) == {"msg1", "msg2"}
|
||||
|
||||
def test_empty_messages_returns_empty(self):
|
||||
"""Test that empty messages returns empty list."""
|
||||
# Arrange
|
||||
messages: list[SimpleMessage] = []
|
||||
app_to_tenant = {"app1": "tenant1"}
|
||||
plan_provider = make_plan_provider({"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1}})
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert
|
||||
assert list(result) == []
|
||||
|
||||
def test_plan_provider_called_with_correct_tenant_ids(self):
|
||||
"""Test that plan_provider is called with correct tenant_ids."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
make_simple_message("msg3", "app3"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2", "app3": "tenant1"} # tenant1 appears twice
|
||||
plan_provider = make_plan_provider({})
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
current_timestamp=self.CURRENT_TIMESTAMP,
|
||||
)
|
||||
|
||||
# Act
|
||||
policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - plan_provider should be called once with unique tenant_ids
|
||||
plan_provider.assert_called_once()
|
||||
called_tenant_ids = set(plan_provider.call_args[0][0])
|
||||
assert called_tenant_ids == {"tenant1", "tenant2"}
|
||||
|
||||
def test_complex_mixed_scenario(self):
|
||||
"""Test complex scenario with mixed plans, expirations, whitelist, and missing mappings."""
|
||||
# Arrange
|
||||
now = self.CURRENT_TIMESTAMP
|
||||
sandbox_expired_old = now - (15 * 24 * 60 * 60) # Beyond grace
|
||||
sandbox_expired_recent = now - (3 * 24 * 60 * 60) # Within grace
|
||||
future_expiration = now + (30 * 24 * 60 * 60)
|
||||
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"), # Sandbox, no subscription - included
|
||||
make_simple_message("msg2", "app2"), # Sandbox, expired old - included
|
||||
make_simple_message("msg3", "app3"), # Sandbox, within grace - excluded
|
||||
make_simple_message("msg4", "app4"), # Team plan, active - excluded
|
||||
make_simple_message("msg5", "app5"), # No tenant mapping - excluded
|
||||
make_simple_message("msg6", "app6"), # No plan info - excluded
|
||||
make_simple_message("msg7", "app7"), # Sandbox, expired old, whitelisted - excluded
|
||||
]
|
||||
app_to_tenant = {
|
||||
"app1": "tenant1",
|
||||
"app2": "tenant2",
|
||||
"app3": "tenant3",
|
||||
"app4": "tenant4",
|
||||
"app6": "tenant6", # Has mapping but no plan
|
||||
"app7": "tenant7",
|
||||
# app5 has no mapping
|
||||
}
|
||||
tenant_plans = {
|
||||
"tenant1": {"plan": CloudPlan.SANDBOX, "expiration_date": -1},
|
||||
"tenant2": {"plan": CloudPlan.SANDBOX, "expiration_date": sandbox_expired_old},
|
||||
"tenant3": {"plan": CloudPlan.SANDBOX, "expiration_date": sandbox_expired_recent},
|
||||
"tenant4": {"plan": CloudPlan.TEAM, "expiration_date": future_expiration},
|
||||
"tenant7": {"plan": CloudPlan.SANDBOX, "expiration_date": sandbox_expired_old},
|
||||
# tenant6 has no plan
|
||||
}
|
||||
plan_provider = make_plan_provider(tenant_plans)
|
||||
tenant_whitelist = ["tenant7"]
|
||||
|
||||
policy = BillingSandboxPolicy(
|
||||
plan_provider=plan_provider,
|
||||
graceful_period_days=self.GRACEFUL_PERIOD_DAYS,
|
||||
tenant_whitelist=tenant_whitelist,
|
||||
current_timestamp=now,
|
||||
)
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - only msg1 and msg2 should be included
|
||||
assert set(result) == {"msg1", "msg2"}
|
||||
|
||||
|
||||
class TestBillingDisabledPolicyFilterMessageIds:
|
||||
"""Unit tests for BillingDisabledPolicy.filter_message_ids method."""
|
||||
|
||||
def test_returns_all_message_ids(self):
|
||||
"""Test that all message IDs are returned (order-preserving)."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
make_simple_message("msg3", "app3"),
|
||||
]
|
||||
app_to_tenant = {"app1": "tenant1", "app2": "tenant2"}
|
||||
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - all message IDs returned in order
|
||||
assert list(result) == ["msg1", "msg2", "msg3"]
|
||||
|
||||
def test_ignores_app_to_tenant(self):
|
||||
"""Test that app_to_tenant mapping is ignored."""
|
||||
# Arrange
|
||||
messages = [
|
||||
make_simple_message("msg1", "app1"),
|
||||
make_simple_message("msg2", "app2"),
|
||||
]
|
||||
app_to_tenant: dict[str, str] = {} # Empty - should be ignored
|
||||
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert - all message IDs still returned
|
||||
assert list(result) == ["msg1", "msg2"]
|
||||
|
||||
def test_empty_messages_returns_empty(self):
|
||||
"""Test that empty messages returns empty list."""
|
||||
# Arrange
|
||||
messages: list[SimpleMessage] = []
|
||||
app_to_tenant = {"app1": "tenant1"}
|
||||
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
result = policy.filter_message_ids(messages, app_to_tenant)
|
||||
|
||||
# Assert
|
||||
assert list(result) == []
|
||||
|
||||
|
||||
class TestCreateMessageCleanPolicy:
|
||||
"""Unit tests for create_message_clean_policy factory function."""
|
||||
|
||||
@patch("services.retention.conversation.messages_clean_policy.dify_config")
|
||||
def test_billing_disabled_returns_billing_disabled_policy(self, mock_config):
|
||||
"""Test that BILLING_ENABLED=False returns BillingDisabledPolicy."""
|
||||
# Arrange
|
||||
mock_config.BILLING_ENABLED = False
|
||||
|
||||
# Act
|
||||
policy = create_message_clean_policy(graceful_period_days=21)
|
||||
|
||||
# Assert
|
||||
assert isinstance(policy, BillingDisabledPolicy)
|
||||
|
||||
@patch("services.retention.conversation.messages_clean_policy.BillingService")
|
||||
@patch("services.retention.conversation.messages_clean_policy.dify_config")
|
||||
def test_billing_enabled_policy_has_correct_internals(self, mock_config, mock_billing_service):
|
||||
"""Test that BillingSandboxPolicy is created with correct internal values."""
|
||||
# Arrange
|
||||
mock_config.BILLING_ENABLED = True
|
||||
whitelist = ["tenant1", "tenant2"]
|
||||
mock_billing_service.get_expired_subscription_cleanup_whitelist.return_value = whitelist
|
||||
mock_plan_provider = MagicMock()
|
||||
mock_billing_service.get_plan_bulk_with_cache = mock_plan_provider
|
||||
|
||||
# Act
|
||||
policy = create_message_clean_policy(graceful_period_days=14, current_timestamp=1234567)
|
||||
|
||||
# Assert
|
||||
mock_billing_service.get_expired_subscription_cleanup_whitelist.assert_called_once()
|
||||
assert isinstance(policy, BillingSandboxPolicy)
|
||||
assert policy._graceful_period_days == 14
|
||||
assert list(policy._tenant_whitelist) == whitelist
|
||||
assert policy._plan_provider == mock_plan_provider
|
||||
assert policy._current_timestamp == 1234567
|
||||
|
||||
|
||||
class TestMessagesCleanServiceFromTimeRange:
|
||||
"""Unit tests for MessagesCleanService.from_time_range factory method."""
|
||||
|
||||
def test_start_from_end_before_raises_value_error(self):
|
||||
"""Test that start_from == end_before raises ValueError."""
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Arrange
|
||||
same_time = datetime.datetime(2024, 1, 1, 12, 0, 0)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="start_from .* must be less than end_before"):
|
||||
MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=same_time,
|
||||
end_before=same_time,
|
||||
)
|
||||
|
||||
# Arrange
|
||||
start_from = datetime.datetime(2024, 12, 31)
|
||||
end_before = datetime.datetime(2024, 1, 1)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="start_from .* must be less than end_before"):
|
||||
MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
)
|
||||
|
||||
def test_batch_size_raises_value_error(self):
|
||||
"""Test that batch_size=0 raises ValueError."""
|
||||
# Arrange
|
||||
start_from = datetime.datetime(2024, 1, 1)
|
||||
end_before = datetime.datetime(2024, 2, 1)
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="batch_size .* must be greater than 0"):
|
||||
MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
batch_size=0,
|
||||
)
|
||||
|
||||
start_from = datetime.datetime(2024, 1, 1)
|
||||
end_before = datetime.datetime(2024, 2, 1)
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="batch_size .* must be greater than 0"):
|
||||
MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
batch_size=-100,
|
||||
)
|
||||
|
||||
def test_valid_params_creates_instance(self):
|
||||
"""Test that valid parameters create a correctly configured instance."""
|
||||
# Arrange
|
||||
start_from = datetime.datetime(2024, 1, 1, 0, 0, 0)
|
||||
end_before = datetime.datetime(2024, 12, 31, 23, 59, 59)
|
||||
policy = BillingDisabledPolicy()
|
||||
batch_size = 500
|
||||
dry_run = True
|
||||
|
||||
# Act
|
||||
service = MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
batch_size=batch_size,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert isinstance(service, MessagesCleanService)
|
||||
assert service._policy is policy
|
||||
assert service._start_from == start_from
|
||||
assert service._end_before == end_before
|
||||
assert service._batch_size == batch_size
|
||||
assert service._dry_run == dry_run
|
||||
|
||||
def test_default_params(self):
|
||||
"""Test that default parameters are applied correctly."""
|
||||
# Arrange
|
||||
start_from = datetime.datetime(2024, 1, 1)
|
||||
end_before = datetime.datetime(2024, 2, 1)
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
service = MessagesCleanService.from_time_range(
|
||||
policy=policy,
|
||||
start_from=start_from,
|
||||
end_before=end_before,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert service._batch_size == 1000 # default
|
||||
assert service._dry_run is False # default
|
||||
|
||||
|
||||
class TestMessagesCleanServiceFromDays:
|
||||
"""Unit tests for MessagesCleanService.from_days factory method."""
|
||||
|
||||
def test_days_raises_value_error(self):
|
||||
"""Test that days < 0 raises ValueError."""
|
||||
# Arrange
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="days .* must be greater than or equal to 0"):
|
||||
MessagesCleanService.from_days(policy=policy, days=-1)
|
||||
|
||||
# Act
|
||||
with patch("services.retention.conversation.messages_clean_service.datetime") as mock_datetime:
|
||||
fixed_now = datetime.datetime(2024, 6, 15, 14, 0, 0)
|
||||
mock_datetime.datetime.now.return_value = fixed_now
|
||||
mock_datetime.timedelta = datetime.timedelta
|
||||
|
||||
service = MessagesCleanService.from_days(policy=policy, days=0)
|
||||
|
||||
# Assert
|
||||
assert service._end_before == fixed_now
|
||||
|
||||
def test_batch_size_raises_value_error(self):
|
||||
"""Test that batch_size=0 raises ValueError."""
|
||||
# Arrange
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="batch_size .* must be greater than 0"):
|
||||
MessagesCleanService.from_days(policy=policy, days=30, batch_size=0)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(ValueError, match="batch_size .* must be greater than 0"):
|
||||
MessagesCleanService.from_days(policy=policy, days=30, batch_size=-500)
|
||||
|
||||
def test_valid_params_creates_instance(self):
|
||||
"""Test that valid parameters create a correctly configured instance."""
|
||||
# Arrange
|
||||
policy = BillingDisabledPolicy()
|
||||
days = 90
|
||||
batch_size = 500
|
||||
dry_run = True
|
||||
|
||||
# Act
|
||||
with patch("services.retention.conversation.messages_clean_service.datetime") as mock_datetime:
|
||||
fixed_now = datetime.datetime(2024, 6, 15, 10, 30, 0)
|
||||
mock_datetime.datetime.now.return_value = fixed_now
|
||||
mock_datetime.timedelta = datetime.timedelta
|
||||
|
||||
service = MessagesCleanService.from_days(
|
||||
policy=policy,
|
||||
days=days,
|
||||
batch_size=batch_size,
|
||||
dry_run=dry_run,
|
||||
)
|
||||
|
||||
# Assert
|
||||
expected_end_before = fixed_now - datetime.timedelta(days=days)
|
||||
assert isinstance(service, MessagesCleanService)
|
||||
assert service._policy is policy
|
||||
assert service._start_from is None
|
||||
assert service._end_before == expected_end_before
|
||||
assert service._batch_size == batch_size
|
||||
assert service._dry_run == dry_run
|
||||
|
||||
def test_default_params(self):
|
||||
"""Test that default parameters are applied correctly."""
|
||||
# Arrange
|
||||
policy = BillingDisabledPolicy()
|
||||
|
||||
# Act
|
||||
with patch("services.retention.conversation.messages_clean_service.datetime") as mock_datetime:
|
||||
fixed_now = datetime.datetime(2024, 6, 15, 10, 30, 0)
|
||||
mock_datetime.datetime.now.return_value = fixed_now
|
||||
mock_datetime.timedelta = datetime.timedelta
|
||||
|
||||
service = MessagesCleanService.from_days(policy=policy)
|
||||
|
||||
# Assert
|
||||
expected_end_before = fixed_now - datetime.timedelta(days=30) # default days=30
|
||||
assert service._end_before == expected_end_before
|
||||
assert service._batch_size == 1000 # default
|
||||
assert service._dry_run is False # default
|
||||
2
api/uv.lock
generated
2
api/uv.lock
generated
@ -1484,7 +1484,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "dify-api"
|
||||
version = "1.11.3"
|
||||
version = "1.11.4"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "aliyun-log-python-sdk" },
|
||||
|
||||
@ -21,7 +21,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -63,7 +63,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -102,7 +102,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -132,7 +132,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.11.3
|
||||
image: langgenius/dify-web:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -705,7 +705,7 @@ services:
|
||||
|
||||
# API service
|
||||
api:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -747,7 +747,7 @@ services:
|
||||
# worker service
|
||||
# The Celery worker for processing all queues (dataset, workflow, mail, etc.)
|
||||
worker:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -786,7 +786,7 @@ services:
|
||||
# worker_beat service
|
||||
# Celery beat for scheduling periodic tasks.
|
||||
worker_beat:
|
||||
image: langgenius/dify-api:1.11.3
|
||||
image: langgenius/dify-api:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
# Use the shared environment variables.
|
||||
@ -816,7 +816,7 @@ services:
|
||||
|
||||
# Frontend web application.
|
||||
web:
|
||||
image: langgenius/dify-web:1.11.3
|
||||
image: langgenius/dify-web:1.11.4
|
||||
restart: always
|
||||
environment:
|
||||
CONSOLE_API_URL: ${CONSOLE_API_URL:-}
|
||||
|
||||
@ -12,7 +12,6 @@ import { useDebounceFn } from 'ahooks'
|
||||
import dynamic from 'next/dynamic'
|
||||
import {
|
||||
useRouter,
|
||||
useSearchParams,
|
||||
} from 'next/navigation'
|
||||
import { parseAsString, useQueryState } from 'nuqs'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
@ -29,7 +28,6 @@ import { CheckModal } from '@/hooks/use-pay'
|
||||
import { useInfiniteAppList } from '@/service/use-apps'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { isServer } from '@/utils/client'
|
||||
import AppCard from './app-card'
|
||||
import { AppCardSkeleton } from './app-card-skeleton'
|
||||
import Empty from './empty'
|
||||
@ -59,7 +57,6 @@ const List = () => {
|
||||
const { t } = useTranslation()
|
||||
const { systemFeatures } = useGlobalPublicStore()
|
||||
const router = useRouter()
|
||||
const searchParams = useSearchParams()
|
||||
const { isCurrentWorkspaceEditor, isCurrentWorkspaceDatasetOperator, isLoadingCurrentWorkspace } = useAppContext()
|
||||
const showTagManagementModal = useTagStore(s => s.showTagManagementModal)
|
||||
const [activeTab, setActiveTab] = useQueryState(
|
||||
@ -67,33 +64,6 @@ const List = () => {
|
||||
parseAsString.withDefault('all').withOptions({ history: 'push' }),
|
||||
)
|
||||
|
||||
// valid tabs for apps list; anything else should fallback to 'all'
|
||||
|
||||
// 1) Normalize legacy/incorrect query params like ?mode=discover -> ?category=all
|
||||
useEffect(() => {
|
||||
// avoid running on server
|
||||
if (isServer)
|
||||
return
|
||||
const mode = searchParams.get('mode')
|
||||
if (!mode)
|
||||
return
|
||||
const url = new URL(window.location.href)
|
||||
url.searchParams.delete('mode')
|
||||
if (validTabs.has(mode)) {
|
||||
// migrate to category key
|
||||
url.searchParams.set('category', mode)
|
||||
}
|
||||
else {
|
||||
url.searchParams.set('category', 'all')
|
||||
}
|
||||
router.replace(url.pathname + url.search)
|
||||
}, [router, searchParams])
|
||||
|
||||
// 2) If category has an invalid value (e.g., 'discover'), reset to 'all'
|
||||
useEffect(() => {
|
||||
if (!validTabs.has(activeTab))
|
||||
setActiveTab('all')
|
||||
}, [activeTab, setActiveTab])
|
||||
const { query: { tagIDs = [], keywords = '', isCreatedByMe: queryIsCreatedByMe = false }, setQuery } = useAppsQueryState()
|
||||
const [isCreatedByMe, setIsCreatedByMe] = useState(queryIsCreatedByMe)
|
||||
const [tagFilterValue, setTagFilterValue] = useState<string[]>(tagIDs)
|
||||
|
||||
@ -2,9 +2,9 @@
|
||||
|
||||
import type { INavSelectorProps } from './nav-selector'
|
||||
import Link from 'next/link'
|
||||
import { usePathname, useSearchParams, useSelectedLayoutSegment } from 'next/navigation'
|
||||
import { useSelectedLayoutSegment } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useState } from 'react'
|
||||
import { useState } from 'react'
|
||||
import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import { ArrowNarrowLeft } from '@/app/components/base/icons/src/vender/line/arrows'
|
||||
import { cn } from '@/utils/classnames'
|
||||
@ -36,14 +36,6 @@ const Nav = ({
|
||||
const [hovered, setHovered] = useState(false)
|
||||
const segment = useSelectedLayoutSegment()
|
||||
const isActivated = Array.isArray(activeSegment) ? activeSegment.includes(segment!) : segment === activeSegment
|
||||
const pathname = usePathname()
|
||||
const searchParams = useSearchParams()
|
||||
const [linkLastSearchParams, setLinkLastSearchParams] = useState('')
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname === link)
|
||||
setLinkLastSearchParams(searchParams.toString())
|
||||
}, [pathname, searchParams])
|
||||
|
||||
return (
|
||||
<div className={`
|
||||
@ -52,7 +44,7 @@ const Nav = ({
|
||||
${!curNav && !isActivated && 'hover:bg-components-main-nav-nav-button-bg-hover'}
|
||||
`}
|
||||
>
|
||||
<Link href={link + (linkLastSearchParams && `?${linkLastSearchParams}`)}>
|
||||
<Link href={link}>
|
||||
<div
|
||||
onClick={(e) => {
|
||||
// Don't clear state if opening in new tab/window
|
||||
|
||||
@ -897,6 +897,58 @@ describe('Icon', () => {
|
||||
const iconDiv = container.firstChild as HTMLElement
|
||||
expect(iconDiv).toHaveStyle({ backgroundImage: 'url(/icon?name=test&size=large)' })
|
||||
})
|
||||
|
||||
it('should not render status indicators when src is object with installed=true', () => {
|
||||
render(<Icon src={{ content: '🎉', background: '#fff' }} installed={true} />)
|
||||
|
||||
// Status indicators should not render for object src
|
||||
expect(screen.queryByTestId('ri-check-line')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render status indicators when src is object with installFailed=true', () => {
|
||||
render(<Icon src={{ content: '🎉', background: '#fff' }} installFailed={true} />)
|
||||
|
||||
// Status indicators should not render for object src
|
||||
expect(screen.queryByTestId('ri-close-line')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render object src with all size variants', () => {
|
||||
const sizes: Array<'xs' | 'tiny' | 'small' | 'medium' | 'large'> = ['xs', 'tiny', 'small', 'medium', 'large']
|
||||
|
||||
sizes.forEach((size) => {
|
||||
const { unmount } = render(<Icon src={{ content: '🔗', background: '#fff' }} size={size} />)
|
||||
expect(screen.getByTestId('app-icon')).toHaveAttribute('data-size', size)
|
||||
unmount()
|
||||
})
|
||||
})
|
||||
|
||||
it('should render object src with custom className', () => {
|
||||
const { container } = render(
|
||||
<Icon src={{ content: '🎉', background: '#fff' }} className="custom-object-icon" />,
|
||||
)
|
||||
|
||||
expect(container.querySelector('.custom-object-icon')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass correct props to AppIcon for object src', () => {
|
||||
render(<Icon src={{ content: '😀', background: '#123456' }} />)
|
||||
|
||||
const appIcon = screen.getByTestId('app-icon')
|
||||
expect(appIcon).toHaveAttribute('data-icon', '😀')
|
||||
expect(appIcon).toHaveAttribute('data-background', '#123456')
|
||||
expect(appIcon).toHaveAttribute('data-icon-type', 'emoji')
|
||||
})
|
||||
|
||||
it('should render inner icon only when shouldUseMcpIcon returns true', () => {
|
||||
// Test with MCP icon content
|
||||
const { unmount } = render(<Icon src={{ content: '🔗', background: '#fff' }} />)
|
||||
expect(screen.getByTestId('inner-icon')).toBeInTheDocument()
|
||||
unmount()
|
||||
|
||||
// Test without MCP icon content
|
||||
render(<Icon src={{ content: '🎉', background: '#fff' }} />)
|
||||
expect(screen.queryByTestId('inner-icon')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
123
web/app/components/plugins/plugin-page/context.spec.tsx
Normal file
123
web/app/components/plugins/plugin-page/context.spec.tsx
Normal file
@ -0,0 +1,123 @@
|
||||
import { render, screen } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
// Import mocks
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
import { PluginPageContext, PluginPageContextProvider, usePluginPageContext } from './context'
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('nuqs', () => ({
|
||||
useQueryState: vi.fn(() => ['plugins', vi.fn()]),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('../hooks', () => ({
|
||||
PLUGIN_PAGE_TABS_MAP: {
|
||||
plugins: 'plugins',
|
||||
marketplace: 'discover',
|
||||
},
|
||||
usePluginPageTabs: () => [
|
||||
{ value: 'plugins', text: 'Plugins' },
|
||||
{ value: 'discover', text: 'Explore Marketplace' },
|
||||
],
|
||||
}))
|
||||
|
||||
// Helper function to mock useGlobalPublicStore with marketplace setting
|
||||
const mockGlobalPublicStore = (enableMarketplace: boolean) => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = { systemFeatures: { enable_marketplace: enableMarketplace } }
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
}
|
||||
|
||||
// Test component that uses the context
|
||||
const TestConsumer = () => {
|
||||
const containerRef = usePluginPageContext(v => v.containerRef)
|
||||
const options = usePluginPageContext(v => v.options)
|
||||
const activeTab = usePluginPageContext(v => v.activeTab)
|
||||
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="has-container-ref">{containerRef ? 'true' : 'false'}</span>
|
||||
<span data-testid="options-count">{options.length}</span>
|
||||
<span data-testid="active-tab">{activeTab}</span>
|
||||
{options.map((opt: { value: string, text: string }) => (
|
||||
<span key={opt.value} data-testid={`option-${opt.value}`}>{opt.text}</span>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
describe('PluginPageContext', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('PluginPageContextProvider', () => {
|
||||
it('should provide context values to children', () => {
|
||||
mockGlobalPublicStore(true)
|
||||
|
||||
render(
|
||||
<PluginPageContextProvider>
|
||||
<TestConsumer />
|
||||
</PluginPageContextProvider>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('has-container-ref')).toHaveTextContent('true')
|
||||
expect(screen.getByTestId('options-count')).toHaveTextContent('2')
|
||||
})
|
||||
|
||||
it('should include marketplace tab when enable_marketplace is true', () => {
|
||||
mockGlobalPublicStore(true)
|
||||
|
||||
render(
|
||||
<PluginPageContextProvider>
|
||||
<TestConsumer />
|
||||
</PluginPageContextProvider>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('option-plugins')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('option-discover')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should filter out marketplace tab when enable_marketplace is false', () => {
|
||||
mockGlobalPublicStore(false)
|
||||
|
||||
render(
|
||||
<PluginPageContextProvider>
|
||||
<TestConsumer />
|
||||
</PluginPageContextProvider>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('option-plugins')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('option-discover')).not.toBeInTheDocument()
|
||||
expect(screen.getByTestId('options-count')).toHaveTextContent('1')
|
||||
})
|
||||
})
|
||||
|
||||
describe('usePluginPageContext', () => {
|
||||
it('should select specific context values', () => {
|
||||
mockGlobalPublicStore(true)
|
||||
|
||||
render(
|
||||
<PluginPageContextProvider>
|
||||
<TestConsumer />
|
||||
</PluginPageContextProvider>,
|
||||
)
|
||||
|
||||
// activeTab should be 'plugins' from the mock
|
||||
expect(screen.getByTestId('active-tab')).toHaveTextContent('plugins')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Default Context Values', () => {
|
||||
it('should have empty options by default from context', () => {
|
||||
// Test that the context has proper default values by checking the exported constant
|
||||
// The PluginPageContext is created with default values including empty options array
|
||||
expect(PluginPageContext).toBeDefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
1041
web/app/components/plugins/plugin-page/index.spec.tsx
Normal file
1041
web/app/components/plugins/plugin-page/index.spec.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@ -207,6 +207,7 @@ const PluginPage = ({
|
||||
popupContent={t('privilege.title', { ns: 'plugin' })}
|
||||
>
|
||||
<Button
|
||||
data-testid="plugin-settings-button"
|
||||
className="group h-full w-full p-2 text-components-button-secondary-text"
|
||||
onClick={setShowPluginSettingModal}
|
||||
>
|
||||
|
||||
@ -0,0 +1,219 @@
|
||||
import type { FC, ReactNode } from 'react'
|
||||
import type { PluginStatus } from '@/app/components/plugins/types'
|
||||
import type { Locale } from '@/i18n-config'
|
||||
import {
|
||||
RiCheckboxCircleFill,
|
||||
RiErrorWarningFill,
|
||||
RiLoaderLine,
|
||||
} from '@remixicon/react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import CardIcon from '@/app/components/plugins/card/base/card-icon'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
|
||||
// Types
|
||||
type PluginItemProps = {
|
||||
plugin: PluginStatus
|
||||
getIconUrl: (icon: string) => string
|
||||
language: Locale
|
||||
statusIcon: ReactNode
|
||||
statusText: string
|
||||
statusClassName?: string
|
||||
action?: ReactNode
|
||||
}
|
||||
|
||||
type PluginSectionProps = {
|
||||
title: string
|
||||
count: number
|
||||
plugins: PluginStatus[]
|
||||
getIconUrl: (icon: string) => string
|
||||
language: Locale
|
||||
statusIcon: ReactNode
|
||||
defaultStatusText: string
|
||||
statusClassName?: string
|
||||
headerAction?: ReactNode
|
||||
renderItemAction?: (plugin: PluginStatus) => ReactNode
|
||||
}
|
||||
|
||||
type PluginTaskListProps = {
|
||||
runningPlugins: PluginStatus[]
|
||||
successPlugins: PluginStatus[]
|
||||
errorPlugins: PluginStatus[]
|
||||
getIconUrl: (icon: string) => string
|
||||
onClearAll: () => void
|
||||
onClearErrors: () => void
|
||||
onClearSingle: (taskId: string, pluginId: string) => void
|
||||
}
|
||||
|
||||
// Plugin Item Component
|
||||
const PluginItem: FC<PluginItemProps> = ({
|
||||
plugin,
|
||||
getIconUrl,
|
||||
language,
|
||||
statusIcon,
|
||||
statusText,
|
||||
statusClassName,
|
||||
action,
|
||||
}) => {
|
||||
return (
|
||||
<div className="flex items-center rounded-lg p-2 hover:bg-state-base-hover">
|
||||
<div className="relative mr-2 flex h-6 w-6 items-center justify-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-background-default-dodge">
|
||||
{statusIcon}
|
||||
<CardIcon
|
||||
size="tiny"
|
||||
src={getIconUrl(plugin.icon)}
|
||||
/>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<div className="system-md-regular truncate text-text-secondary">
|
||||
{plugin.labels[language]}
|
||||
</div>
|
||||
<div className={`system-xs-regular ${statusClassName || 'text-text-tertiary'}`}>
|
||||
{statusText}
|
||||
</div>
|
||||
</div>
|
||||
{action}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
// Plugin Section Component
|
||||
const PluginSection: FC<PluginSectionProps> = ({
|
||||
title,
|
||||
count,
|
||||
plugins,
|
||||
getIconUrl,
|
||||
language,
|
||||
statusIcon,
|
||||
defaultStatusText,
|
||||
statusClassName,
|
||||
headerAction,
|
||||
renderItemAction,
|
||||
}) => {
|
||||
if (plugins.length === 0)
|
||||
return null
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="system-sm-semibold-uppercase sticky top-0 flex h-7 items-center justify-between px-2 pt-1 text-text-secondary">
|
||||
{title}
|
||||
{' '}
|
||||
(
|
||||
{count}
|
||||
)
|
||||
{headerAction}
|
||||
</div>
|
||||
<div className="max-h-[200px] overflow-y-auto">
|
||||
{plugins.map(plugin => (
|
||||
<PluginItem
|
||||
key={plugin.plugin_unique_identifier}
|
||||
plugin={plugin}
|
||||
getIconUrl={getIconUrl}
|
||||
language={language}
|
||||
statusIcon={statusIcon}
|
||||
statusText={plugin.message || defaultStatusText}
|
||||
statusClassName={statusClassName}
|
||||
action={renderItemAction?.(plugin)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
// Main Plugin Task List Component
|
||||
const PluginTaskList: FC<PluginTaskListProps> = ({
|
||||
runningPlugins,
|
||||
successPlugins,
|
||||
errorPlugins,
|
||||
getIconUrl,
|
||||
onClearAll,
|
||||
onClearErrors,
|
||||
onClearSingle,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const language = useGetLanguage()
|
||||
|
||||
return (
|
||||
<div className="w-[360px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-1 shadow-lg">
|
||||
{/* Running Plugins Section */}
|
||||
{runningPlugins.length > 0 && (
|
||||
<PluginSection
|
||||
title={t('task.installing', { ns: 'plugin' })}
|
||||
count={runningPlugins.length}
|
||||
plugins={runningPlugins}
|
||||
getIconUrl={getIconUrl}
|
||||
language={language}
|
||||
statusIcon={
|
||||
<RiLoaderLine className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 animate-spin text-text-accent" />
|
||||
}
|
||||
defaultStatusText={t('task.installing', { ns: 'plugin' })}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Success Plugins Section */}
|
||||
{successPlugins.length > 0 && (
|
||||
<PluginSection
|
||||
title={t('task.installed', { ns: 'plugin' })}
|
||||
count={successPlugins.length}
|
||||
plugins={successPlugins}
|
||||
getIconUrl={getIconUrl}
|
||||
language={language}
|
||||
statusIcon={
|
||||
<RiCheckboxCircleFill className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 text-text-success" />
|
||||
}
|
||||
defaultStatusText={t('task.installed', { ns: 'plugin' })}
|
||||
statusClassName="text-text-success"
|
||||
headerAction={(
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={onClearAll}
|
||||
>
|
||||
{t('task.clearAll', { ns: 'plugin' })}
|
||||
</Button>
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Error Plugins Section */}
|
||||
{errorPlugins.length > 0 && (
|
||||
<PluginSection
|
||||
title={t('task.installError', { ns: 'plugin', errorLength: errorPlugins.length })}
|
||||
count={errorPlugins.length}
|
||||
plugins={errorPlugins}
|
||||
getIconUrl={getIconUrl}
|
||||
language={language}
|
||||
statusIcon={
|
||||
<RiErrorWarningFill className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 text-text-destructive" />
|
||||
}
|
||||
defaultStatusText={t('task.installError', { ns: 'plugin', errorLength: errorPlugins.length })}
|
||||
statusClassName="text-text-destructive break-all"
|
||||
headerAction={(
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={onClearErrors}
|
||||
>
|
||||
{t('task.clearAll', { ns: 'plugin' })}
|
||||
</Button>
|
||||
)}
|
||||
renderItemAction={plugin => (
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={() => onClearSingle(plugin.taskId, plugin.plugin_unique_identifier)}
|
||||
>
|
||||
{t('operation.clear', { ns: 'common' })}
|
||||
</Button>
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default PluginTaskList
|
||||
@ -0,0 +1,96 @@
|
||||
import type { FC } from 'react'
|
||||
import {
|
||||
RiCheckboxCircleFill,
|
||||
RiErrorWarningFill,
|
||||
RiInstallLine,
|
||||
} from '@remixicon/react'
|
||||
import ProgressCircle from '@/app/components/base/progress-bar/progress-circle'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import DownloadingIcon from '@/app/components/header/plugins-nav/downloading-icon'
|
||||
import { cn } from '@/utils/classnames'
|
||||
|
||||
export type TaskStatusIndicatorProps = {
|
||||
tip: string
|
||||
isInstalling: boolean
|
||||
isInstallingWithSuccess: boolean
|
||||
isInstallingWithError: boolean
|
||||
isSuccess: boolean
|
||||
isFailed: boolean
|
||||
successPluginsLength: number
|
||||
runningPluginsLength: number
|
||||
totalPluginsLength: number
|
||||
onClick: () => void
|
||||
}
|
||||
|
||||
const TaskStatusIndicator: FC<TaskStatusIndicatorProps> = ({
|
||||
tip,
|
||||
isInstalling,
|
||||
isInstallingWithSuccess,
|
||||
isInstallingWithError,
|
||||
isSuccess,
|
||||
isFailed,
|
||||
successPluginsLength,
|
||||
runningPluginsLength,
|
||||
totalPluginsLength,
|
||||
onClick,
|
||||
}) => {
|
||||
const showDownloadingIcon = isInstalling || isInstallingWithError
|
||||
const showErrorStyle = isInstallingWithError || isFailed
|
||||
const showSuccessIcon = isSuccess || (successPluginsLength > 0 && runningPluginsLength === 0)
|
||||
|
||||
return (
|
||||
<Tooltip
|
||||
popupContent={tip}
|
||||
asChild
|
||||
offset={8}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'relative flex h-8 w-8 items-center justify-center rounded-lg border-[0.5px] border-components-button-secondary-border bg-components-button-secondary-bg shadow-xs hover:bg-components-button-secondary-bg-hover',
|
||||
showErrorStyle && 'cursor-pointer border-components-button-destructive-secondary-border-hover bg-state-destructive-hover hover:bg-state-destructive-hover-alt',
|
||||
(isInstalling || isInstallingWithSuccess || isSuccess) && 'cursor-pointer hover:bg-components-button-secondary-bg-hover',
|
||||
)}
|
||||
id="plugin-task-trigger"
|
||||
onClick={onClick}
|
||||
>
|
||||
{/* Main Icon */}
|
||||
{showDownloadingIcon
|
||||
? <DownloadingIcon />
|
||||
: (
|
||||
<RiInstallLine
|
||||
className={cn(
|
||||
'h-4 w-4 text-components-button-secondary-text',
|
||||
showErrorStyle && 'text-components-button-destructive-secondary-text',
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Status Indicator Badge */}
|
||||
<div className="absolute -right-1 -top-1">
|
||||
{(isInstalling || isInstallingWithSuccess) && (
|
||||
<ProgressCircle
|
||||
percentage={(totalPluginsLength > 0 ? successPluginsLength / totalPluginsLength : 0) * 100}
|
||||
circleFillColor="fill-components-progress-brand-bg"
|
||||
/>
|
||||
)}
|
||||
{isInstallingWithError && (
|
||||
<ProgressCircle
|
||||
percentage={(totalPluginsLength > 0 ? runningPluginsLength / totalPluginsLength : 0) * 100}
|
||||
circleFillColor="fill-components-progress-brand-bg"
|
||||
sectorFillColor="fill-components-progress-error-border"
|
||||
circleStrokeColor="stroke-components-progress-error-border"
|
||||
/>
|
||||
)}
|
||||
{showSuccessIcon && !isInstalling && !isInstallingWithSuccess && !isInstallingWithError && (
|
||||
<RiCheckboxCircleFill className="h-3.5 w-3.5 text-text-success" />
|
||||
)}
|
||||
{isFailed && (
|
||||
<RiErrorWarningFill className="h-3.5 w-3.5 text-text-destructive" />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</Tooltip>
|
||||
)
|
||||
}
|
||||
|
||||
export default TaskStatusIndicator
|
||||
@ -0,0 +1,856 @@
|
||||
import type { PluginStatus } from '@/app/components/plugins/types'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { TaskStatus } from '@/app/components/plugins/types'
|
||||
// Import mocked modules
|
||||
import { useMutationClearTaskPlugin, usePluginTaskList } from '@/service/use-plugins'
|
||||
import PluginTaskList from './components/plugin-task-list'
|
||||
import TaskStatusIndicator from './components/task-status-indicator'
|
||||
import { usePluginTaskStatus } from './hooks'
|
||||
|
||||
import PluginTasks from './index'
|
||||
|
||||
// Mock external dependencies
|
||||
vi.mock('@/service/use-plugins', () => ({
|
||||
usePluginTaskList: vi.fn(),
|
||||
useMutationClearTaskPlugin: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/plugins/install-plugin/base/use-get-icon', () => ({
|
||||
default: () => ({
|
||||
getIconUrl: (icon: string) => `https://example.com/${icon}`,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/i18n', () => ({
|
||||
useGetLanguage: () => 'en_US',
|
||||
}))
|
||||
|
||||
// Helper to create mock plugin
|
||||
const createMockPlugin = (overrides: Partial<PluginStatus> = {}): PluginStatus => ({
|
||||
plugin_unique_identifier: `plugin-${Math.random().toString(36).substr(2, 9)}`,
|
||||
plugin_id: 'test-plugin',
|
||||
status: TaskStatus.running,
|
||||
message: '',
|
||||
icon: 'test-icon.png',
|
||||
labels: {
|
||||
en_US: 'Test Plugin',
|
||||
zh_Hans: '测试插件',
|
||||
} as Record<string, string>,
|
||||
taskId: 'task-1',
|
||||
...overrides,
|
||||
})
|
||||
|
||||
// Helper to setup mock hook returns
|
||||
const setupMocks = (plugins: PluginStatus[] = []) => {
|
||||
const mockMutateAsync = vi.fn().mockResolvedValue({})
|
||||
const mockHandleRefetch = vi.fn()
|
||||
|
||||
vi.mocked(usePluginTaskList).mockReturnValue({
|
||||
pluginTasks: plugins.length > 0
|
||||
? [{ id: 'task-1', plugins, created_at: '', updated_at: '', status: 'running', total_plugins: plugins.length, completed_plugins: 0 }]
|
||||
: [],
|
||||
handleRefetch: mockHandleRefetch,
|
||||
} as any)
|
||||
|
||||
vi.mocked(useMutationClearTaskPlugin).mockReturnValue({
|
||||
mutateAsync: mockMutateAsync,
|
||||
} as any)
|
||||
|
||||
return { mockMutateAsync, mockHandleRefetch }
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// usePluginTaskStatus Hook Tests
|
||||
// ============================================================================
|
||||
describe('usePluginTaskStatus Hook', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Plugin categorization', () => {
|
||||
it('should categorize running plugins correctly', () => {
|
||||
const runningPlugin = createMockPlugin({ status: TaskStatus.running })
|
||||
setupMocks([runningPlugin])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { runningPlugins, runningPluginsLength } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="running-count">{runningPluginsLength}</span>
|
||||
<span data-testid="running-id">{runningPlugins[0]?.plugin_unique_identifier}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('running-count')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('running-id')).toHaveTextContent(runningPlugin.plugin_unique_identifier)
|
||||
})
|
||||
|
||||
it('should categorize success plugins correctly', () => {
|
||||
const successPlugin = createMockPlugin({ status: TaskStatus.success })
|
||||
setupMocks([successPlugin])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { successPlugins, successPluginsLength } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="success-count">{successPluginsLength}</span>
|
||||
<span data-testid="success-id">{successPlugins[0]?.plugin_unique_identifier}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('success-count')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('success-id')).toHaveTextContent(successPlugin.plugin_unique_identifier)
|
||||
})
|
||||
|
||||
it('should categorize error plugins correctly', () => {
|
||||
const errorPlugin = createMockPlugin({ status: TaskStatus.failed, message: 'Install failed' })
|
||||
setupMocks([errorPlugin])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { errorPlugins, errorPluginsLength } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="error-count">{errorPluginsLength}</span>
|
||||
<span data-testid="error-id">{errorPlugins[0]?.plugin_unique_identifier}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('error-count')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('error-id')).toHaveTextContent(errorPlugin.plugin_unique_identifier)
|
||||
})
|
||||
|
||||
it('should categorize mixed plugins correctly', () => {
|
||||
const plugins = [
|
||||
createMockPlugin({ status: TaskStatus.running, plugin_unique_identifier: 'running-1' }),
|
||||
createMockPlugin({ status: TaskStatus.success, plugin_unique_identifier: 'success-1' }),
|
||||
createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'error-1' }),
|
||||
]
|
||||
setupMocks(plugins)
|
||||
|
||||
const TestComponent = () => {
|
||||
const { runningPluginsLength, successPluginsLength, errorPluginsLength, totalPluginsLength } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="running">{runningPluginsLength}</span>
|
||||
<span data-testid="success">{successPluginsLength}</span>
|
||||
<span data-testid="error">{errorPluginsLength}</span>
|
||||
<span data-testid="total">{totalPluginsLength}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('running')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('success')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('error')).toHaveTextContent('1')
|
||||
expect(screen.getByTestId('total')).toHaveTextContent('3')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Status flags', () => {
|
||||
it('should set isInstalling when only running plugins exist', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isInstalling, isInstallingWithSuccess, isInstallingWithError, isSuccess, isFailed } = usePluginTaskStatus()
|
||||
return (
|
||||
<div>
|
||||
<span data-testid="isInstalling">{String(isInstalling)}</span>
|
||||
<span data-testid="isInstallingWithSuccess">{String(isInstallingWithSuccess)}</span>
|
||||
<span data-testid="isInstallingWithError">{String(isInstallingWithError)}</span>
|
||||
<span data-testid="isSuccess">{String(isSuccess)}</span>
|
||||
<span data-testid="isFailed">{String(isFailed)}</span>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
|
||||
expect(screen.getByTestId('isInstalling')).toHaveTextContent('true')
|
||||
expect(screen.getByTestId('isInstallingWithSuccess')).toHaveTextContent('false')
|
||||
expect(screen.getByTestId('isInstallingWithError')).toHaveTextContent('false')
|
||||
expect(screen.getByTestId('isSuccess')).toHaveTextContent('false')
|
||||
expect(screen.getByTestId('isFailed')).toHaveTextContent('false')
|
||||
})
|
||||
|
||||
it('should set isInstallingWithSuccess when running and success plugins exist', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.running }),
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isInstallingWithSuccess } = usePluginTaskStatus()
|
||||
return <span data-testid="flag">{String(isInstallingWithSuccess)}</span>
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
expect(screen.getByTestId('flag')).toHaveTextContent('true')
|
||||
})
|
||||
|
||||
it('should set isInstallingWithError when running and error plugins exist', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.running }),
|
||||
createMockPlugin({ status: TaskStatus.failed }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isInstallingWithError } = usePluginTaskStatus()
|
||||
return <span data-testid="flag">{String(isInstallingWithError)}</span>
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
expect(screen.getByTestId('flag')).toHaveTextContent('true')
|
||||
})
|
||||
|
||||
it('should set isSuccess when all plugins succeeded', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isSuccess } = usePluginTaskStatus()
|
||||
return <span data-testid="flag">{String(isSuccess)}</span>
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
expect(screen.getByTestId('flag')).toHaveTextContent('true')
|
||||
})
|
||||
|
||||
it('should set isFailed when no running plugins and some failed', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
createMockPlugin({ status: TaskStatus.failed }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { isFailed } = usePluginTaskStatus()
|
||||
return <span data-testid="flag">{String(isFailed)}</span>
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
expect(screen.getByTestId('flag')).toHaveTextContent('true')
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleClearErrorPlugin', () => {
|
||||
it('should call mutateAsync and handleRefetch', async () => {
|
||||
const { mockMutateAsync, mockHandleRefetch } = setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.failed }),
|
||||
])
|
||||
|
||||
const TestComponent = () => {
|
||||
const { handleClearErrorPlugin } = usePluginTaskStatus()
|
||||
return (
|
||||
<button onClick={() => handleClearErrorPlugin('task-1', 'plugin-1')}>
|
||||
Clear
|
||||
</button>
|
||||
)
|
||||
}
|
||||
|
||||
render(<TestComponent />)
|
||||
fireEvent.click(screen.getByRole('button'))
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith({
|
||||
taskId: 'task-1',
|
||||
pluginId: 'plugin-1',
|
||||
})
|
||||
expect(mockHandleRefetch).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// TaskStatusIndicator Component Tests
|
||||
// ============================================================================
|
||||
describe('TaskStatusIndicator Component', () => {
|
||||
const defaultProps = {
|
||||
tip: 'Test tooltip',
|
||||
isInstalling: false,
|
||||
isInstallingWithSuccess: false,
|
||||
isInstallingWithError: false,
|
||||
isSuccess: false,
|
||||
isFailed: false,
|
||||
successPluginsLength: 0,
|
||||
runningPluginsLength: 0,
|
||||
totalPluginsLength: 1,
|
||||
onClick: vi.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render with correct id', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Icon display', () => {
|
||||
it('should show downloading icon when installing', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isInstalling />)
|
||||
// DownloadingIcon is rendered when isInstalling is true
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show downloading icon when installing with error', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isInstallingWithError />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show install icon when not installing', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isSuccess />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Status badge', () => {
|
||||
it('should show progress circle when installing', () => {
|
||||
render(
|
||||
<TaskStatusIndicator
|
||||
{...defaultProps}
|
||||
isInstalling
|
||||
successPluginsLength={1}
|
||||
totalPluginsLength={3}
|
||||
/>,
|
||||
)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show progress circle when installing with success', () => {
|
||||
render(
|
||||
<TaskStatusIndicator
|
||||
{...defaultProps}
|
||||
isInstallingWithSuccess
|
||||
successPluginsLength={2}
|
||||
totalPluginsLength={3}
|
||||
/>,
|
||||
)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show error progress circle when installing with error', () => {
|
||||
render(
|
||||
<TaskStatusIndicator
|
||||
{...defaultProps}
|
||||
isInstallingWithError
|
||||
runningPluginsLength={1}
|
||||
totalPluginsLength={3}
|
||||
/>,
|
||||
)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show success icon when all completed successfully', () => {
|
||||
render(
|
||||
<TaskStatusIndicator
|
||||
{...defaultProps}
|
||||
isSuccess
|
||||
successPluginsLength={3}
|
||||
runningPluginsLength={0}
|
||||
totalPluginsLength={3}
|
||||
/>,
|
||||
)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show error icon when failed', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isFailed />)
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Styling', () => {
|
||||
it('should apply error styles when installing with error', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isInstallingWithError />)
|
||||
const trigger = document.getElementById('plugin-task-trigger')
|
||||
expect(trigger).toHaveClass('bg-state-destructive-hover')
|
||||
})
|
||||
|
||||
it('should apply error styles when failed', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isFailed />)
|
||||
const trigger = document.getElementById('plugin-task-trigger')
|
||||
expect(trigger).toHaveClass('bg-state-destructive-hover')
|
||||
})
|
||||
|
||||
it('should apply cursor-pointer when clickable', () => {
|
||||
render(<TaskStatusIndicator {...defaultProps} isInstalling />)
|
||||
const trigger = document.getElementById('plugin-task-trigger')
|
||||
expect(trigger).toHaveClass('cursor-pointer')
|
||||
})
|
||||
})
|
||||
|
||||
describe('User interactions', () => {
|
||||
it('should call onClick when clicked', () => {
|
||||
const handleClick = vi.fn()
|
||||
render(<TaskStatusIndicator {...defaultProps} onClick={handleClick} />)
|
||||
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
expect(handleClick).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// PluginTaskList Component Tests
|
||||
// ============================================================================
|
||||
describe('PluginTaskList Component', () => {
|
||||
const defaultProps = {
|
||||
runningPlugins: [] as PluginStatus[],
|
||||
successPlugins: [] as PluginStatus[],
|
||||
errorPlugins: [] as PluginStatus[],
|
||||
getIconUrl: (icon: string) => `https://example.com/${icon}`,
|
||||
onClearAll: vi.fn(),
|
||||
onClearErrors: vi.fn(),
|
||||
onClearSingle: vi.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing with empty lists', () => {
|
||||
render(<PluginTaskList {...defaultProps} />)
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render running plugins section when plugins exist', () => {
|
||||
const runningPlugins = [createMockPlugin({ status: TaskStatus.running })]
|
||||
render(<PluginTaskList {...defaultProps} runningPlugins={runningPlugins} />)
|
||||
|
||||
// Translation key is returned as text in tests, multiple matches expected (title + status)
|
||||
expect(screen.getAllByText(/task\.installing/i).length).toBeGreaterThan(0)
|
||||
// Verify section container is rendered
|
||||
expect(document.querySelector('.max-h-\\[200px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render success plugins section when plugins exist', () => {
|
||||
const successPlugins = [createMockPlugin({ status: TaskStatus.success })]
|
||||
render(<PluginTaskList {...defaultProps} successPlugins={successPlugins} />)
|
||||
|
||||
// Translation key is returned as text in tests, multiple matches expected
|
||||
expect(screen.getAllByText(/task\.installed/i).length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should render error plugins section when plugins exist', () => {
|
||||
const errorPlugins = [createMockPlugin({ status: TaskStatus.failed, message: 'Error occurred' })]
|
||||
render(<PluginTaskList {...defaultProps} errorPlugins={errorPlugins} />)
|
||||
|
||||
expect(screen.getByText('Error occurred')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render all sections when all types exist', () => {
|
||||
render(
|
||||
<PluginTaskList
|
||||
{...defaultProps}
|
||||
runningPlugins={[createMockPlugin({ status: TaskStatus.running })]}
|
||||
successPlugins={[createMockPlugin({ status: TaskStatus.success })]}
|
||||
errorPlugins={[createMockPlugin({ status: TaskStatus.failed })]}
|
||||
/>,
|
||||
)
|
||||
|
||||
// All sections should be present
|
||||
expect(document.querySelectorAll('.max-h-\\[200px\\]').length).toBe(3)
|
||||
})
|
||||
})
|
||||
|
||||
describe('User interactions', () => {
|
||||
it('should call onClearAll when clear all button is clicked in success section', () => {
|
||||
const handleClearAll = vi.fn()
|
||||
const successPlugins = [createMockPlugin({ status: TaskStatus.success })]
|
||||
|
||||
render(
|
||||
<PluginTaskList
|
||||
{...defaultProps}
|
||||
successPlugins={successPlugins}
|
||||
onClearAll={handleClearAll}
|
||||
/>,
|
||||
)
|
||||
|
||||
fireEvent.click(screen.getByRole('button', { name: /task\.clearAll/i }))
|
||||
|
||||
expect(handleClearAll).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should call onClearErrors when clear all button is clicked in error section', () => {
|
||||
const handleClearErrors = vi.fn()
|
||||
const errorPlugins = [createMockPlugin({ status: TaskStatus.failed })]
|
||||
|
||||
render(
|
||||
<PluginTaskList
|
||||
{...defaultProps}
|
||||
errorPlugins={errorPlugins}
|
||||
onClearErrors={handleClearErrors}
|
||||
/>,
|
||||
)
|
||||
|
||||
const clearButtons = screen.getAllByRole('button')
|
||||
fireEvent.click(clearButtons.find(btn => btn.textContent?.includes('task.clearAll'))!)
|
||||
|
||||
expect(handleClearErrors).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should call onClearSingle with correct args when individual clear is clicked', () => {
|
||||
const handleClearSingle = vi.fn()
|
||||
const errorPlugin = createMockPlugin({
|
||||
status: TaskStatus.failed,
|
||||
plugin_unique_identifier: 'error-plugin-1',
|
||||
taskId: 'task-123',
|
||||
})
|
||||
|
||||
render(
|
||||
<PluginTaskList
|
||||
{...defaultProps}
|
||||
errorPlugins={[errorPlugin]}
|
||||
onClearSingle={handleClearSingle}
|
||||
/>,
|
||||
)
|
||||
|
||||
// The individual clear button has the text 'operation.clear'
|
||||
fireEvent.click(screen.getByRole('button', { name: /operation\.clear/i }))
|
||||
|
||||
expect(handleClearSingle).toHaveBeenCalledWith('task-123', 'error-plugin-1')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Plugin display', () => {
|
||||
it('should display plugin name from labels', () => {
|
||||
const plugin = createMockPlugin({
|
||||
status: TaskStatus.running,
|
||||
labels: { en_US: 'My Test Plugin' } as Record<string, string>,
|
||||
})
|
||||
|
||||
render(<PluginTaskList {...defaultProps} runningPlugins={[plugin]} />)
|
||||
|
||||
expect(screen.getByText('My Test Plugin')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display plugin message when available', () => {
|
||||
const plugin = createMockPlugin({
|
||||
status: TaskStatus.success,
|
||||
message: 'Successfully installed!',
|
||||
})
|
||||
|
||||
render(<PluginTaskList {...defaultProps} successPlugins={[plugin]} />)
|
||||
|
||||
expect(screen.getByText('Successfully installed!')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display multiple plugins in each section', () => {
|
||||
const runningPlugins = [
|
||||
createMockPlugin({ status: TaskStatus.running, labels: { en_US: 'Plugin A' } as Record<string, string> }),
|
||||
createMockPlugin({ status: TaskStatus.running, labels: { en_US: 'Plugin B' } as Record<string, string> }),
|
||||
]
|
||||
|
||||
render(<PluginTaskList {...defaultProps} runningPlugins={runningPlugins} />)
|
||||
|
||||
expect(screen.getByText('Plugin A')).toBeInTheDocument()
|
||||
expect(screen.getByText('Plugin B')).toBeInTheDocument()
|
||||
// Count is rendered, verify multiple items are in list
|
||||
expect(document.querySelectorAll('.hover\\:bg-state-base-hover').length).toBe(2)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// PluginTasks Main Component Tests
|
||||
// ============================================================================
|
||||
describe('PluginTasks Component', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should return null when no plugins exist', () => {
|
||||
setupMocks([])
|
||||
|
||||
const { container } = render(<PluginTasks />)
|
||||
|
||||
expect(container.firstChild).toBeNull()
|
||||
})
|
||||
|
||||
it('should render when plugins exist', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Tooltip text (tip memoization)', () => {
|
||||
it('should show installing tip when isInstalling', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// The component renders with a tooltip, we verify it exists
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show success tip when all succeeded', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.success })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show error tip when some failed', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.success }),
|
||||
createMockPlugin({ status: TaskStatus.failed }),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Popover interaction', () => {
|
||||
it('should toggle popover when trigger is clicked and status allows', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Click to open
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
// The popover content should be visible (PluginTaskList)
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not toggle when status does not allow', () => {
|
||||
// Setup with no actionable status (edge case - should not happen in practice)
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Component should still render
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Clear handlers', () => {
|
||||
it('should clear all completed plugins when onClearAll is called', async () => {
|
||||
const { mockMutateAsync } = setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.success, plugin_unique_identifier: 'success-1' }),
|
||||
createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'error-1' }),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
// Wait for popover content to render
|
||||
await waitFor(() => {
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Find and click clear all button
|
||||
const clearButtons = screen.getAllByRole('button')
|
||||
const clearAllButton = clearButtons.find(btn => btn.textContent?.includes('clearAll'))
|
||||
if (clearAllButton)
|
||||
fireEvent.click(clearAllButton)
|
||||
|
||||
// Verify mutateAsync was called for each completed plugin
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should clear only error plugins when onClearErrors is called', async () => {
|
||||
const { mockMutateAsync } = setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'error-1' }),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Find and click the clear all button in error section
|
||||
const clearButtons = screen.getAllByRole('button')
|
||||
if (clearButtons.length > 0)
|
||||
fireEvent.click(clearButtons[0])
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
it('should clear single plugin when onClearSingle is called', async () => {
|
||||
const { mockMutateAsync } = setupMocks([
|
||||
createMockPlugin({
|
||||
status: TaskStatus.failed,
|
||||
plugin_unique_identifier: 'error-plugin',
|
||||
taskId: 'task-1',
|
||||
}),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
// Find and click individual clear button (usually the last one)
|
||||
const clearButtons = screen.getAllByRole('button')
|
||||
const individualClearButton = clearButtons[clearButtons.length - 1]
|
||||
fireEvent.click(individualClearButton)
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockMutateAsync).toHaveBeenCalledWith({
|
||||
taskId: 'task-1',
|
||||
pluginId: 'error-plugin',
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle empty plugin tasks array', () => {
|
||||
setupMocks([])
|
||||
|
||||
const { container } = render(<PluginTasks />)
|
||||
|
||||
expect(container.firstChild).toBeNull()
|
||||
})
|
||||
|
||||
it('should handle single running plugin', () => {
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle many plugins', () => {
|
||||
const manyPlugins = Array.from({ length: 10 }, (_, i) =>
|
||||
createMockPlugin({
|
||||
status: i % 3 === 0 ? TaskStatus.running : i % 3 === 1 ? TaskStatus.success : TaskStatus.failed,
|
||||
plugin_unique_identifier: `plugin-${i}`,
|
||||
}))
|
||||
setupMocks(manyPlugins)
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle plugins with empty labels', () => {
|
||||
const plugin = createMockPlugin({
|
||||
status: TaskStatus.running,
|
||||
labels: {} as Record<string, string>,
|
||||
})
|
||||
setupMocks([plugin])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle plugins with long messages', () => {
|
||||
const plugin = createMockPlugin({
|
||||
status: TaskStatus.failed,
|
||||
message: 'A'.repeat(500),
|
||||
})
|
||||
setupMocks([plugin])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
expect(document.querySelector('.w-\\[360px\\]')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// Integration Tests
|
||||
// ============================================================================
|
||||
describe('PluginTasks Integration', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should show correct UI flow from installing to success', async () => {
|
||||
// Start with installing state
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
const { rerender } = render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
|
||||
// Simulate completion by re-rendering with success
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.success })])
|
||||
rerender(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show correct UI flow from installing to failure', async () => {
|
||||
// Start with installing state
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.running })])
|
||||
|
||||
const { rerender } = render(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
|
||||
// Simulate failure by re-rendering with failed
|
||||
setupMocks([createMockPlugin({ status: TaskStatus.failed, message: 'Network error' })])
|
||||
rerender(<PluginTasks />)
|
||||
|
||||
expect(document.getElementById('plugin-task-trigger')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle mixed status during installation', () => {
|
||||
setupMocks([
|
||||
createMockPlugin({ status: TaskStatus.running, plugin_unique_identifier: 'p1' }),
|
||||
createMockPlugin({ status: TaskStatus.success, plugin_unique_identifier: 'p2' }),
|
||||
createMockPlugin({ status: TaskStatus.failed, plugin_unique_identifier: 'p3' }),
|
||||
])
|
||||
|
||||
render(<PluginTasks />)
|
||||
|
||||
// Open popover
|
||||
fireEvent.click(document.getElementById('plugin-task-trigger')!)
|
||||
|
||||
// All sections should be visible
|
||||
const sections = document.querySelectorAll('.max-h-\\[200px\\]')
|
||||
expect(sections.length).toBe(3)
|
||||
})
|
||||
})
|
||||
@ -1,33 +1,21 @@
|
||||
import {
|
||||
RiCheckboxCircleFill,
|
||||
RiErrorWarningFill,
|
||||
RiInstallLine,
|
||||
RiLoaderLine,
|
||||
} from '@remixicon/react'
|
||||
import {
|
||||
useCallback,
|
||||
useMemo,
|
||||
useState,
|
||||
} from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Button from '@/app/components/base/button'
|
||||
import {
|
||||
PortalToFollowElem,
|
||||
PortalToFollowElemContent,
|
||||
PortalToFollowElemTrigger,
|
||||
} from '@/app/components/base/portal-to-follow-elem'
|
||||
import ProgressCircle from '@/app/components/base/progress-bar/progress-circle'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import DownloadingIcon from '@/app/components/header/plugins-nav/downloading-icon'
|
||||
import CardIcon from '@/app/components/plugins/card/base/card-icon'
|
||||
import useGetIcon from '@/app/components/plugins/install-plugin/base/use-get-icon'
|
||||
import { useGetLanguage } from '@/context/i18n'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import PluginTaskList from './components/plugin-task-list'
|
||||
import TaskStatusIndicator from './components/task-status-indicator'
|
||||
import { usePluginTaskStatus } from './hooks'
|
||||
|
||||
const PluginTasks = () => {
|
||||
const { t } = useTranslation()
|
||||
const language = useGetLanguage()
|
||||
const [open, setOpen] = useState(false)
|
||||
const {
|
||||
errorPlugins,
|
||||
@ -46,35 +34,7 @@ const PluginTasks = () => {
|
||||
} = usePluginTaskStatus()
|
||||
const { getIconUrl } = useGetIcon()
|
||||
|
||||
const handleClearAllWithModal = useCallback(async () => {
|
||||
// Clear all completed plugins (success and error) but keep running ones
|
||||
const completedPlugins = [...successPlugins, ...errorPlugins]
|
||||
|
||||
// Clear all completed plugins individually
|
||||
for (const plugin of completedPlugins)
|
||||
await handleClearErrorPlugin(plugin.taskId, plugin.plugin_unique_identifier)
|
||||
|
||||
// Only close modal if no plugins are still installing
|
||||
if (runningPluginsLength === 0)
|
||||
setOpen(false)
|
||||
}, [successPlugins, errorPlugins, handleClearErrorPlugin, runningPluginsLength])
|
||||
|
||||
const handleClearErrorsWithModal = useCallback(async () => {
|
||||
// Clear only error plugins, not all plugins
|
||||
for (const plugin of errorPlugins)
|
||||
await handleClearErrorPlugin(plugin.taskId, plugin.plugin_unique_identifier)
|
||||
// Only close modal if no plugins are still installing
|
||||
if (runningPluginsLength === 0)
|
||||
setOpen(false)
|
||||
}, [errorPlugins, handleClearErrorPlugin, runningPluginsLength])
|
||||
|
||||
const handleClearSingleWithModal = useCallback(async (taskId: string, pluginId: string) => {
|
||||
await handleClearErrorPlugin(taskId, pluginId)
|
||||
// Only close modal if no plugins are still installing
|
||||
if (runningPluginsLength === 0)
|
||||
setOpen(false)
|
||||
}, [handleClearErrorPlugin, runningPluginsLength])
|
||||
|
||||
// Generate tooltip text based on status
|
||||
const tip = useMemo(() => {
|
||||
if (isInstallingWithError)
|
||||
return t('task.installingWithError', { ns: 'plugin', installingLength: runningPluginsLength, successLength: successPluginsLength, errorLength: errorPluginsLength })
|
||||
@ -99,8 +59,38 @@ const PluginTasks = () => {
|
||||
t,
|
||||
])
|
||||
|
||||
// Show icon if there are any plugin tasks (completed, running, or failed)
|
||||
// Only hide when there are absolutely no plugin tasks
|
||||
// Generic clear function that handles clearing and modal closing
|
||||
const clearPluginsAndClose = useCallback(async (
|
||||
plugins: Array<{ taskId: string, plugin_unique_identifier: string }>,
|
||||
) => {
|
||||
for (const plugin of plugins)
|
||||
await handleClearErrorPlugin(plugin.taskId, plugin.plugin_unique_identifier)
|
||||
if (runningPluginsLength === 0)
|
||||
setOpen(false)
|
||||
}, [handleClearErrorPlugin, runningPluginsLength])
|
||||
|
||||
// Clear handlers using the generic function
|
||||
const handleClearAll = useCallback(
|
||||
() => clearPluginsAndClose([...successPlugins, ...errorPlugins]),
|
||||
[clearPluginsAndClose, successPlugins, errorPlugins],
|
||||
)
|
||||
|
||||
const handleClearErrors = useCallback(
|
||||
() => clearPluginsAndClose(errorPlugins),
|
||||
[clearPluginsAndClose, errorPlugins],
|
||||
)
|
||||
|
||||
const handleClearSingle = useCallback(
|
||||
(taskId: string, pluginId: string) => clearPluginsAndClose([{ taskId, plugin_unique_identifier: pluginId }]),
|
||||
[clearPluginsAndClose],
|
||||
)
|
||||
|
||||
const handleTriggerClick = useCallback(() => {
|
||||
if (isFailed || isInstalling || isInstallingWithSuccess || isInstallingWithError || isSuccess)
|
||||
setOpen(v => !v)
|
||||
}, [isFailed, isInstalling, isInstallingWithSuccess, isInstallingWithError, isSuccess])
|
||||
|
||||
// Hide when no plugin tasks
|
||||
if (totalPluginsLength === 0)
|
||||
return null
|
||||
|
||||
@ -115,206 +105,30 @@ const PluginTasks = () => {
|
||||
crossAxis: 79,
|
||||
}}
|
||||
>
|
||||
<PortalToFollowElemTrigger
|
||||
onClick={() => {
|
||||
if (isFailed || isInstalling || isInstallingWithSuccess || isInstallingWithError || isSuccess)
|
||||
setOpen(v => !v)
|
||||
}}
|
||||
>
|
||||
<Tooltip
|
||||
popupContent={tip}
|
||||
asChild
|
||||
offset={8}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
'relative flex h-8 w-8 items-center justify-center rounded-lg border-[0.5px] border-components-button-secondary-border bg-components-button-secondary-bg shadow-xs hover:bg-components-button-secondary-bg-hover',
|
||||
(isInstallingWithError || isFailed) && 'cursor-pointer border-components-button-destructive-secondary-border-hover bg-state-destructive-hover hover:bg-state-destructive-hover-alt',
|
||||
(isInstalling || isInstallingWithSuccess || isSuccess) && 'cursor-pointer hover:bg-components-button-secondary-bg-hover',
|
||||
)}
|
||||
id="plugin-task-trigger"
|
||||
>
|
||||
{
|
||||
(isInstalling || isInstallingWithError) && (
|
||||
<DownloadingIcon />
|
||||
)
|
||||
}
|
||||
{
|
||||
!(isInstalling || isInstallingWithError) && (
|
||||
<RiInstallLine
|
||||
className={cn(
|
||||
'h-4 w-4 text-components-button-secondary-text',
|
||||
(isInstallingWithError || isFailed) && 'text-components-button-destructive-secondary-text',
|
||||
)}
|
||||
/>
|
||||
)
|
||||
}
|
||||
<div className="absolute -right-1 -top-1">
|
||||
{
|
||||
(isInstalling || isInstallingWithSuccess) && (
|
||||
<ProgressCircle
|
||||
percentage={successPluginsLength / totalPluginsLength * 100}
|
||||
circleFillColor="fill-components-progress-brand-bg"
|
||||
/>
|
||||
)
|
||||
}
|
||||
{
|
||||
isInstallingWithError && (
|
||||
<ProgressCircle
|
||||
percentage={runningPluginsLength / totalPluginsLength * 100}
|
||||
circleFillColor="fill-components-progress-brand-bg"
|
||||
sectorFillColor="fill-components-progress-error-border"
|
||||
circleStrokeColor="stroke-components-progress-error-border"
|
||||
/>
|
||||
)
|
||||
}
|
||||
{
|
||||
(isSuccess || (successPluginsLength > 0 && runningPluginsLength === 0 && errorPluginsLength === 0)) && (
|
||||
<RiCheckboxCircleFill className="h-3.5 w-3.5 text-text-success" />
|
||||
)
|
||||
}
|
||||
{
|
||||
isFailed && (
|
||||
<RiErrorWarningFill className="h-3.5 w-3.5 text-text-destructive" />
|
||||
)
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</Tooltip>
|
||||
<PortalToFollowElemTrigger onClick={handleTriggerClick}>
|
||||
<TaskStatusIndicator
|
||||
tip={tip}
|
||||
isInstalling={isInstalling}
|
||||
isInstallingWithSuccess={isInstallingWithSuccess}
|
||||
isInstallingWithError={isInstallingWithError}
|
||||
isSuccess={isSuccess}
|
||||
isFailed={isFailed}
|
||||
successPluginsLength={successPluginsLength}
|
||||
runningPluginsLength={runningPluginsLength}
|
||||
totalPluginsLength={totalPluginsLength}
|
||||
onClick={() => {}}
|
||||
/>
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent className="z-[11]">
|
||||
<div className="w-[360px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-1 shadow-lg">
|
||||
{/* Running Plugins */}
|
||||
{runningPlugins.length > 0 && (
|
||||
<>
|
||||
<div className="system-sm-semibold-uppercase sticky top-0 flex h-7 items-center justify-between px-2 pt-1 text-text-secondary">
|
||||
{t('task.installing', { ns: 'plugin' })}
|
||||
{' '}
|
||||
(
|
||||
{runningPlugins.length}
|
||||
)
|
||||
</div>
|
||||
<div className="max-h-[200px] overflow-y-auto">
|
||||
{runningPlugins.map(runningPlugin => (
|
||||
<div
|
||||
key={runningPlugin.plugin_unique_identifier}
|
||||
className="flex items-center rounded-lg p-2 hover:bg-state-base-hover"
|
||||
>
|
||||
<div className="relative mr-2 flex h-6 w-6 items-center justify-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-background-default-dodge">
|
||||
<RiLoaderLine className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 animate-spin text-text-accent" />
|
||||
<CardIcon
|
||||
size="tiny"
|
||||
src={getIconUrl(runningPlugin.icon)}
|
||||
/>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<div className="system-md-regular truncate text-text-secondary">
|
||||
{runningPlugin.labels[language]}
|
||||
</div>
|
||||
<div className="system-xs-regular text-text-tertiary">
|
||||
{t('task.installing', { ns: 'plugin' })}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Success Plugins */}
|
||||
{successPlugins.length > 0 && (
|
||||
<>
|
||||
<div className="system-sm-semibold-uppercase sticky top-0 flex h-7 items-center justify-between px-2 pt-1 text-text-secondary">
|
||||
{t('task.installed', { ns: 'plugin' })}
|
||||
{' '}
|
||||
(
|
||||
{successPlugins.length}
|
||||
)
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={() => handleClearAllWithModal()}
|
||||
>
|
||||
{t('task.clearAll', { ns: 'plugin' })}
|
||||
</Button>
|
||||
</div>
|
||||
<div className="max-h-[200px] overflow-y-auto">
|
||||
{successPlugins.map(successPlugin => (
|
||||
<div
|
||||
key={successPlugin.plugin_unique_identifier}
|
||||
className="flex items-center rounded-lg p-2 hover:bg-state-base-hover"
|
||||
>
|
||||
<div className="relative mr-2 flex h-6 w-6 items-center justify-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-background-default-dodge">
|
||||
<RiCheckboxCircleFill className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 text-text-success" />
|
||||
<CardIcon
|
||||
size="tiny"
|
||||
src={getIconUrl(successPlugin.icon)}
|
||||
/>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<div className="system-md-regular truncate text-text-secondary">
|
||||
{successPlugin.labels[language]}
|
||||
</div>
|
||||
<div className="system-xs-regular text-text-success">
|
||||
{successPlugin.message || t('task.installed', { ns: 'plugin' })}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
{/* Error Plugins */}
|
||||
{errorPlugins.length > 0 && (
|
||||
<>
|
||||
<div className="system-sm-semibold-uppercase sticky top-0 flex h-7 items-center justify-between px-2 pt-1 text-text-secondary">
|
||||
{t('task.installError', { ns: 'plugin', errorLength: errorPlugins.length })}
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={() => handleClearErrorsWithModal()}
|
||||
>
|
||||
{t('task.clearAll', { ns: 'plugin' })}
|
||||
</Button>
|
||||
</div>
|
||||
<div className="max-h-[200px] overflow-y-auto">
|
||||
{errorPlugins.map(errorPlugin => (
|
||||
<div
|
||||
key={errorPlugin.plugin_unique_identifier}
|
||||
className="flex items-center rounded-lg p-2 hover:bg-state-base-hover"
|
||||
>
|
||||
<div className="relative mr-2 flex h-6 w-6 items-center justify-center rounded-md border-[0.5px] border-components-panel-border-subtle bg-background-default-dodge">
|
||||
<RiErrorWarningFill className="absolute -bottom-0.5 -right-0.5 z-10 h-3 w-3 text-text-destructive" />
|
||||
<CardIcon
|
||||
size="tiny"
|
||||
src={getIconUrl(errorPlugin.icon)}
|
||||
/>
|
||||
</div>
|
||||
<div className="grow">
|
||||
<div className="system-md-regular truncate text-text-secondary">
|
||||
{errorPlugin.labels[language]}
|
||||
</div>
|
||||
<div className="system-xs-regular break-all text-text-destructive">
|
||||
{errorPlugin.message}
|
||||
</div>
|
||||
</div>
|
||||
<Button
|
||||
className="shrink-0"
|
||||
size="small"
|
||||
variant="ghost"
|
||||
onClick={() => handleClearSingleWithModal(errorPlugin.taskId, errorPlugin.plugin_unique_identifier)}
|
||||
>
|
||||
{t('operation.clear', { ns: 'common' })}
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
<PluginTaskList
|
||||
runningPlugins={runningPlugins}
|
||||
successPlugins={successPlugins}
|
||||
errorPlugins={errorPlugins}
|
||||
getIconUrl={getIconUrl}
|
||||
onClearAll={handleClearAll}
|
||||
onClearErrors={handleClearErrors}
|
||||
onClearSingle={handleClearSingle}
|
||||
/>
|
||||
</PortalToFollowElemContent>
|
||||
</PortalToFollowElem>
|
||||
</div>
|
||||
|
||||
@ -0,0 +1,388 @@
|
||||
import { renderHook, waitFor } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
// Import mocks for assertions
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
|
||||
import { useInvalidateReferenceSettings, useMutationReferenceSettings, useReferenceSettings } from '@/service/use-plugins'
|
||||
import Toast from '../../base/toast'
|
||||
import { PermissionType } from '../types'
|
||||
import useReferenceSetting, { useCanInstallPluginFromMarketplace } from './use-reference-setting'
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/app-context', () => ({
|
||||
useAppContext: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/global-public-context', () => ({
|
||||
useGlobalPublicStore: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-plugins', () => ({
|
||||
useReferenceSettings: vi.fn(),
|
||||
useMutationReferenceSettings: vi.fn(),
|
||||
useInvalidateReferenceSettings: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('../../base/toast', () => ({
|
||||
default: {
|
||||
notify: vi.fn(),
|
||||
},
|
||||
}))
|
||||
|
||||
describe('useReferenceSetting Hook', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Default mocks
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.everyone,
|
||||
debug_permission: PermissionType.everyone,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
vi.mocked(useMutationReferenceSettings).mockReturnValue({
|
||||
mutate: vi.fn(),
|
||||
isPending: false,
|
||||
} as unknown as ReturnType<typeof useMutationReferenceSettings>)
|
||||
|
||||
vi.mocked(useInvalidateReferenceSettings).mockReturnValue(vi.fn())
|
||||
})
|
||||
|
||||
describe('hasPermission logic', () => {
|
||||
it('should return false when permission is undefined', () => {
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: undefined,
|
||||
debug_permission: undefined,
|
||||
},
|
||||
},
|
||||
} as unknown as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(false)
|
||||
expect(result.current.canDebugger).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when permission is noOne', () => {
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.noOne,
|
||||
debug_permission: PermissionType.noOne,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(false)
|
||||
expect(result.current.canDebugger).toBe(false)
|
||||
})
|
||||
|
||||
it('should return true when permission is everyone', () => {
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.everyone,
|
||||
debug_permission: PermissionType.everyone,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(true)
|
||||
expect(result.current.canDebugger).toBe(true)
|
||||
})
|
||||
|
||||
it('should return isAdmin when permission is admin and user is manager', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: true,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.admin,
|
||||
debug_permission: PermissionType.admin,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(true)
|
||||
expect(result.current.canDebugger).toBe(true)
|
||||
})
|
||||
|
||||
it('should return isAdmin when permission is admin and user is owner', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: true,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.admin,
|
||||
debug_permission: PermissionType.admin,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(true)
|
||||
expect(result.current.canDebugger).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false when permission is admin and user is not admin', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.admin,
|
||||
debug_permission: PermissionType.admin,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(false)
|
||||
expect(result.current.canDebugger).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('canSetPermissions', () => {
|
||||
it('should be true when user is workspace manager', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: true,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canSetPermissions).toBe(true)
|
||||
})
|
||||
|
||||
it('should be true when user is workspace owner', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: true,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canSetPermissions).toBe(true)
|
||||
})
|
||||
|
||||
it('should be false when user is neither manager nor owner', () => {
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: false,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canSetPermissions).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('setReferenceSettings callback', () => {
|
||||
it('should call invalidateReferenceSettings and show toast on success', async () => {
|
||||
const mockInvalidate = vi.fn()
|
||||
vi.mocked(useInvalidateReferenceSettings).mockReturnValue(mockInvalidate)
|
||||
|
||||
let onSuccessCallback: (() => void) | undefined
|
||||
vi.mocked(useMutationReferenceSettings).mockImplementation((options) => {
|
||||
onSuccessCallback = options?.onSuccess as () => void
|
||||
return {
|
||||
mutate: vi.fn(),
|
||||
isPending: false,
|
||||
} as unknown as ReturnType<typeof useMutationReferenceSettings>
|
||||
})
|
||||
|
||||
renderHook(() => useReferenceSetting())
|
||||
|
||||
// Trigger the onSuccess callback
|
||||
if (onSuccessCallback)
|
||||
onSuccessCallback()
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockInvalidate).toHaveBeenCalled()
|
||||
expect(Toast.notify).toHaveBeenCalledWith({
|
||||
type: 'success',
|
||||
message: 'api.actionSuccess',
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('returned values', () => {
|
||||
it('should return referenceSetting data', () => {
|
||||
const mockData = {
|
||||
permission: {
|
||||
install_permission: PermissionType.everyone,
|
||||
debug_permission: PermissionType.everyone,
|
||||
},
|
||||
}
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: mockData,
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.referenceSetting).toEqual(mockData)
|
||||
})
|
||||
|
||||
it('should return isUpdatePending from mutation', () => {
|
||||
vi.mocked(useMutationReferenceSettings).mockReturnValue({
|
||||
mutate: vi.fn(),
|
||||
isPending: true,
|
||||
} as unknown as ReturnType<typeof useMutationReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.isUpdatePending).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle null data', () => {
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: null,
|
||||
} as unknown as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useReferenceSetting())
|
||||
|
||||
expect(result.current.canManagement).toBe(false)
|
||||
expect(result.current.canDebugger).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('useCanInstallPluginFromMarketplace Hook', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
vi.mocked(useAppContext).mockReturnValue({
|
||||
isCurrentWorkspaceManager: true,
|
||||
isCurrentWorkspaceOwner: false,
|
||||
} as ReturnType<typeof useAppContext>)
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.everyone,
|
||||
debug_permission: PermissionType.everyone,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
vi.mocked(useMutationReferenceSettings).mockReturnValue({
|
||||
mutate: vi.fn(),
|
||||
isPending: false,
|
||||
} as unknown as ReturnType<typeof useMutationReferenceSettings>)
|
||||
|
||||
vi.mocked(useInvalidateReferenceSettings).mockReturnValue(vi.fn())
|
||||
})
|
||||
|
||||
it('should return true when marketplace is enabled and canManagement is true', () => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = {
|
||||
systemFeatures: {
|
||||
enable_marketplace: true,
|
||||
},
|
||||
}
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useCanInstallPluginFromMarketplace())
|
||||
|
||||
expect(result.current.canInstallPluginFromMarketplace).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false when marketplace is disabled', () => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = {
|
||||
systemFeatures: {
|
||||
enable_marketplace: false,
|
||||
},
|
||||
}
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useCanInstallPluginFromMarketplace())
|
||||
|
||||
expect(result.current.canInstallPluginFromMarketplace).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when canManagement is false', () => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = {
|
||||
systemFeatures: {
|
||||
enable_marketplace: true,
|
||||
},
|
||||
}
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.noOne,
|
||||
debug_permission: PermissionType.noOne,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useCanInstallPluginFromMarketplace())
|
||||
|
||||
expect(result.current.canInstallPluginFromMarketplace).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when both marketplace is disabled and canManagement is false', () => {
|
||||
vi.mocked(useGlobalPublicStore).mockImplementation((selector) => {
|
||||
const state = {
|
||||
systemFeatures: {
|
||||
enable_marketplace: false,
|
||||
},
|
||||
}
|
||||
return selector(state as Parameters<typeof selector>[0])
|
||||
})
|
||||
|
||||
vi.mocked(useReferenceSettings).mockReturnValue({
|
||||
data: {
|
||||
permission: {
|
||||
install_permission: PermissionType.noOne,
|
||||
debug_permission: PermissionType.noOne,
|
||||
},
|
||||
},
|
||||
} as ReturnType<typeof useReferenceSettings>)
|
||||
|
||||
const { result } = renderHook(() => useCanInstallPluginFromMarketplace())
|
||||
|
||||
expect(result.current.canInstallPluginFromMarketplace).toBe(false)
|
||||
})
|
||||
})
|
||||
487
web/app/components/plugins/plugin-page/use-uploader.spec.ts
Normal file
487
web/app/components/plugins/plugin-page/use-uploader.spec.ts
Normal file
@ -0,0 +1,487 @@
|
||||
import type { RefObject } from 'react'
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { useUploader } from './use-uploader'
|
||||
|
||||
describe('useUploader Hook', () => {
|
||||
let mockContainerRef: RefObject<HTMLDivElement | null>
|
||||
let mockOnFileChange: (file: File | null) => void
|
||||
let mockContainer: HTMLDivElement
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
mockContainer = document.createElement('div')
|
||||
document.body.appendChild(mockContainer)
|
||||
|
||||
mockContainerRef = { current: mockContainer }
|
||||
mockOnFileChange = vi.fn()
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
if (mockContainer.parentNode)
|
||||
document.body.removeChild(mockContainer)
|
||||
})
|
||||
|
||||
describe('Initial State', () => {
|
||||
it('should return initial state with dragging false', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
expect(result.current.fileUploader.current).toBeNull()
|
||||
expect(result.current.fileChangeHandle).not.toBeNull()
|
||||
expect(result.current.removeFile).not.toBeNull()
|
||||
})
|
||||
|
||||
it('should return null handlers when disabled', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: false,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
expect(result.current.fileChangeHandle).toBeNull()
|
||||
expect(result.current.removeFile).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Drag Events', () => {
|
||||
it('should handle dragenter and set dragging to true', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(true)
|
||||
})
|
||||
|
||||
it('should not set dragging when dragenter without Files type', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['text/plain'] },
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle dragover event', () => {
|
||||
renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const dragOverEvent = new Event('dragover', { bubbles: true, cancelable: true })
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragOverEvent)
|
||||
})
|
||||
|
||||
// dragover should prevent default and stop propagation
|
||||
expect(mockContainer).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle dragleave when relatedTarget is null', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// First set dragging to true
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
expect(result.current.dragging).toBe(true)
|
||||
|
||||
// Then trigger dragleave with null relatedTarget
|
||||
const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragLeaveEvent, 'relatedTarget', {
|
||||
value: null,
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragLeaveEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle dragleave when relatedTarget is outside container', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// First set dragging to true
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
expect(result.current.dragging).toBe(true)
|
||||
|
||||
// Create element outside container
|
||||
const outsideElement = document.createElement('div')
|
||||
document.body.appendChild(outsideElement)
|
||||
|
||||
// Trigger dragleave with relatedTarget outside container
|
||||
const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragLeaveEvent, 'relatedTarget', {
|
||||
value: outsideElement,
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragLeaveEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
document.body.removeChild(outsideElement)
|
||||
})
|
||||
|
||||
it('should not set dragging to false when relatedTarget is inside container', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// First set dragging to true
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
expect(result.current.dragging).toBe(true)
|
||||
|
||||
// Create element inside container
|
||||
const insideElement = document.createElement('div')
|
||||
mockContainer.appendChild(insideElement)
|
||||
|
||||
// Trigger dragleave with relatedTarget inside container
|
||||
const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragLeaveEvent, 'relatedTarget', {
|
||||
value: insideElement,
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragLeaveEvent)
|
||||
})
|
||||
|
||||
// Should still be dragging since relatedTarget is inside container
|
||||
expect(result.current.dragging).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Drop Events', () => {
|
||||
it('should handle drop event with files', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// First set dragging to true
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
|
||||
// Create mock file
|
||||
const file = new File(['content'], 'test.difypkg', { type: 'application/octet-stream' })
|
||||
|
||||
// Trigger drop event
|
||||
const dropEvent = new Event('drop', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dropEvent, 'dataTransfer', {
|
||||
value: { files: [file] },
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dropEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(file)
|
||||
})
|
||||
|
||||
it('should not call onFileChange when drop has no dataTransfer', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// Set dragging first
|
||||
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dragEnterEvent, 'dataTransfer', {
|
||||
value: { types: ['Files'] },
|
||||
})
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dragEnterEvent)
|
||||
})
|
||||
|
||||
// Drop without dataTransfer
|
||||
const dropEvent = new Event('drop', { bubbles: true, cancelable: true })
|
||||
// No dataTransfer property
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dropEvent)
|
||||
})
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
expect(mockOnFileChange).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not call onFileChange when drop has empty files array', () => {
|
||||
renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const dropEvent = new Event('drop', { bubbles: true, cancelable: true })
|
||||
Object.defineProperty(dropEvent, 'dataTransfer', {
|
||||
value: { files: [] },
|
||||
})
|
||||
|
||||
act(() => {
|
||||
mockContainer.dispatchEvent(dropEvent)
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('File Change Handler', () => {
|
||||
it('should call onFileChange with file from input', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const file = new File(['content'], 'test.difypkg', { type: 'application/octet-stream' })
|
||||
const mockEvent = {
|
||||
target: {
|
||||
files: [file],
|
||||
},
|
||||
} as unknown as React.ChangeEvent<HTMLInputElement>
|
||||
|
||||
act(() => {
|
||||
result.current.fileChangeHandle?.(mockEvent)
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(file)
|
||||
})
|
||||
|
||||
it('should call onFileChange with null when no files', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
const mockEvent = {
|
||||
target: {
|
||||
files: null,
|
||||
},
|
||||
} as unknown as React.ChangeEvent<HTMLInputElement>
|
||||
|
||||
act(() => {
|
||||
result.current.fileChangeHandle?.(mockEvent)
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Remove File', () => {
|
||||
it('should call onFileChange with null', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
act(() => {
|
||||
result.current.removeFile?.()
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(null)
|
||||
})
|
||||
|
||||
it('should handle removeFile when fileUploader has a value', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// Create a mock input element with value property
|
||||
const mockInput = {
|
||||
value: 'test.difypkg',
|
||||
}
|
||||
|
||||
// Override the fileUploader ref
|
||||
Object.defineProperty(result.current.fileUploader, 'current', {
|
||||
value: mockInput,
|
||||
writable: true,
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.removeFile?.()
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(null)
|
||||
expect(mockInput.value).toBe('')
|
||||
})
|
||||
|
||||
it('should handle removeFile when fileUploader is null', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
}),
|
||||
)
|
||||
|
||||
// fileUploader.current is null by default
|
||||
act(() => {
|
||||
result.current.removeFile?.()
|
||||
})
|
||||
|
||||
expect(mockOnFileChange).toHaveBeenCalledWith(null)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Enabled/Disabled State', () => {
|
||||
it('should not add event listeners when disabled', () => {
|
||||
const addEventListenerSpy = vi.spyOn(mockContainer, 'addEventListener')
|
||||
|
||||
renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: false,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(addEventListenerSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should add event listeners when enabled', () => {
|
||||
const addEventListenerSpy = vi.spyOn(mockContainer, 'addEventListener')
|
||||
|
||||
renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: true,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(addEventListenerSpy).toHaveBeenCalledWith('dragenter', expect.any(Function))
|
||||
expect(addEventListenerSpy).toHaveBeenCalledWith('dragover', expect.any(Function))
|
||||
expect(addEventListenerSpy).toHaveBeenCalledWith('dragleave', expect.any(Function))
|
||||
expect(addEventListenerSpy).toHaveBeenCalledWith('drop', expect.any(Function))
|
||||
})
|
||||
|
||||
it('should remove event listeners on cleanup', () => {
|
||||
const removeEventListenerSpy = vi.spyOn(mockContainer, 'removeEventListener')
|
||||
|
||||
const { unmount } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: true,
|
||||
}),
|
||||
)
|
||||
|
||||
unmount()
|
||||
|
||||
expect(removeEventListenerSpy).toHaveBeenCalledWith('dragenter', expect.any(Function))
|
||||
expect(removeEventListenerSpy).toHaveBeenCalledWith('dragover', expect.any(Function))
|
||||
expect(removeEventListenerSpy).toHaveBeenCalledWith('dragleave', expect.any(Function))
|
||||
expect(removeEventListenerSpy).toHaveBeenCalledWith('drop', expect.any(Function))
|
||||
})
|
||||
|
||||
it('should return false for dragging when disabled', () => {
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: mockContainerRef,
|
||||
enabled: false,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Container Ref Edge Cases', () => {
|
||||
it('should handle null containerRef.current', () => {
|
||||
const nullRef: RefObject<HTMLDivElement | null> = { current: null }
|
||||
|
||||
const { result } = renderHook(() =>
|
||||
useUploader({
|
||||
onFileChange: mockOnFileChange,
|
||||
containerRef: nullRef,
|
||||
}),
|
||||
)
|
||||
|
||||
expect(result.current.dragging).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
550
web/app/components/rag-pipeline/index.spec.tsx
Normal file
550
web/app/components/rag-pipeline/index.spec.tsx
Normal file
@ -0,0 +1,550 @@
|
||||
import type { FetchWorkflowDraftResponse } from '@/types/workflow'
|
||||
import { cleanup, render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
|
||||
// Import real utility functions (pure functions, no side effects)
|
||||
|
||||
// Import mocked modules for manipulation
|
||||
import { useDatasetDetailContextWithSelector } from '@/context/dataset-detail'
|
||||
import { usePipelineInit } from './hooks'
|
||||
import RagPipelineWrapper from './index'
|
||||
import { processNodesWithoutDataSource } from './utils'
|
||||
|
||||
// Mock: Context - need to control return values
|
||||
vi.mock('@/context/dataset-detail', () => ({
|
||||
useDatasetDetailContextWithSelector: vi.fn(),
|
||||
}))
|
||||
|
||||
// Mock: Hook with API calls
|
||||
vi.mock('./hooks', () => ({
|
||||
usePipelineInit: vi.fn(),
|
||||
}))
|
||||
|
||||
// Mock: Store creator
|
||||
vi.mock('./store', () => ({
|
||||
createRagPipelineSliceSlice: vi.fn(() => ({})),
|
||||
}))
|
||||
|
||||
// Mock: Utility with complex workflow dependencies (generateNewNode, etc.)
|
||||
vi.mock('./utils', () => ({
|
||||
processNodesWithoutDataSource: vi.fn((nodes, viewport) => ({
|
||||
nodes,
|
||||
viewport,
|
||||
})),
|
||||
}))
|
||||
|
||||
// Mock: Complex component with useParams, Toast, API calls
|
||||
vi.mock('./components/conversion', () => ({
|
||||
default: () => <div data-testid="conversion-component">Conversion Component</div>,
|
||||
}))
|
||||
|
||||
// Mock: Complex component with many hooks and workflow dependencies
|
||||
vi.mock('./components/rag-pipeline-main', () => ({
|
||||
default: ({ nodes, edges, viewport }: any) => (
|
||||
<div data-testid="rag-pipeline-main">
|
||||
<span data-testid="nodes-count">{nodes?.length ?? 0}</span>
|
||||
<span data-testid="edges-count">{edges?.length ?? 0}</span>
|
||||
<span data-testid="viewport-zoom">{viewport?.zoom ?? 'none'}</span>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// Mock: Complex component with ReactFlow and many providers
|
||||
vi.mock('@/app/components/workflow', () => ({
|
||||
default: ({ children }: { children: React.ReactNode }) => (
|
||||
<div data-testid="workflow-default-context">{children}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// Mock: Context provider
|
||||
vi.mock('@/app/components/workflow/context', () => ({
|
||||
WorkflowContextProvider: ({ children }: { children: React.ReactNode }) => (
|
||||
<div data-testid="workflow-context-provider">{children}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// Type assertions for mocked functions
|
||||
const mockUseDatasetDetailContextWithSelector = vi.mocked(useDatasetDetailContextWithSelector)
|
||||
const mockUsePipelineInit = vi.mocked(usePipelineInit)
|
||||
const mockProcessNodesWithoutDataSource = vi.mocked(processNodesWithoutDataSource)
|
||||
|
||||
// Helper to mock selector with actual execution (increases function coverage)
|
||||
// This executes the real selector function: s => s.dataset?.pipeline_id
|
||||
const mockSelectorWithDataset = (pipelineId: string | null | undefined) => {
|
||||
mockUseDatasetDetailContextWithSelector.mockImplementation((selector: (state: any) => any) => {
|
||||
const mockState = { dataset: pipelineId ? { pipeline_id: pipelineId } : null }
|
||||
return selector(mockState)
|
||||
})
|
||||
}
|
||||
|
||||
// Test data factory
|
||||
const createMockWorkflowData = (overrides?: Partial<FetchWorkflowDraftResponse>): FetchWorkflowDraftResponse => ({
|
||||
graph: {
|
||||
nodes: [
|
||||
{ id: 'node-1', type: 'custom', data: { type: BlockEnum.Start, title: 'Start' }, position: { x: 100, y: 100 } },
|
||||
{ id: 'node-2', type: 'custom', data: { type: BlockEnum.End, title: 'End' }, position: { x: 300, y: 100 } },
|
||||
],
|
||||
edges: [
|
||||
{ id: 'edge-1', source: 'node-1', target: 'node-2', type: 'custom' },
|
||||
],
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
},
|
||||
hash: 'test-hash-123',
|
||||
updated_at: 1234567890,
|
||||
tool_published: false,
|
||||
environment_variables: [],
|
||||
...overrides,
|
||||
} as FetchWorkflowDraftResponse)
|
||||
|
||||
afterEach(() => {
|
||||
cleanup()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('RagPipelineWrapper', () => {
|
||||
describe('Rendering', () => {
|
||||
it('should render Conversion component when pipelineId is null', () => {
|
||||
mockSelectorWithDataset(null)
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('conversion-component')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('workflow-context-provider')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Conversion component when pipelineId is undefined', () => {
|
||||
mockSelectorWithDataset(undefined)
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('conversion-component')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Conversion component when pipelineId is empty string', () => {
|
||||
mockSelectorWithDataset('')
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('conversion-component')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render WorkflowContextProvider when pipelineId exists', () => {
|
||||
mockSelectorWithDataset('pipeline-123')
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('workflow-context-provider')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('conversion-component')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Props Variations', () => {
|
||||
it('should pass injectWorkflowStoreSliceFn to WorkflowContextProvider', () => {
|
||||
mockSelectorWithDataset('pipeline-456')
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('workflow-context-provider')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('RagPipeline', () => {
|
||||
beforeEach(() => {
|
||||
// Default setup for RagPipeline tests - execute real selector function
|
||||
mockSelectorWithDataset('pipeline-123')
|
||||
})
|
||||
|
||||
describe('Loading State', () => {
|
||||
it('should render Loading component when isLoading is true', () => {
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
// Real Loading component has role="status"
|
||||
expect(screen.getByRole('status')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Loading component when data is undefined', () => {
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByRole('status')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render Loading component when both data is undefined and isLoading is true', () => {
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByRole('status')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Data Loaded State', () => {
|
||||
it('should render RagPipelineMain when data is loaded', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('loading-component')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should pass processed nodes to RagPipelineMain', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('nodes-count').textContent).toBe('2')
|
||||
})
|
||||
|
||||
it('should pass edges to RagPipelineMain', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('edges-count').textContent).toBe('1')
|
||||
})
|
||||
|
||||
it('should pass viewport to RagPipelineMain', () => {
|
||||
const mockData = createMockWorkflowData({
|
||||
graph: {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
viewport: { x: 100, y: 200, zoom: 1.5 },
|
||||
},
|
||||
})
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('viewport-zoom').textContent).toBe('1.5')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Memoization Logic', () => {
|
||||
it('should process nodes through initialNodes when data is loaded', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
// initialNodes is a real function - verify nodes are rendered
|
||||
// The real initialNodes processes nodes and adds position data
|
||||
expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should process edges through initialEdges when data is loaded', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
// initialEdges is a real function - verify component renders with edges
|
||||
expect(screen.getByTestId('edges-count').textContent).toBe('1')
|
||||
})
|
||||
|
||||
it('should call processNodesWithoutDataSource with nodesData and viewport', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(mockProcessNodesWithoutDataSource).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not process nodes when data is undefined', () => {
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
// When data is undefined, Loading is shown, processNodesWithoutDataSource is not called
|
||||
expect(mockProcessNodesWithoutDataSource).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should use memoized values when data reference is same', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
const { rerender } = render(<RagPipelineWrapper />)
|
||||
|
||||
// Clear mock call count after initial render
|
||||
mockProcessNodesWithoutDataSource.mockClear()
|
||||
|
||||
// Rerender with same data reference (no change to mockUsePipelineInit)
|
||||
rerender(<RagPipelineWrapper />)
|
||||
|
||||
// processNodesWithoutDataSource should not be called again due to useMemo
|
||||
// Note: React strict mode may cause double render, so we check it's not excessive
|
||||
expect(mockProcessNodesWithoutDataSource.mock.calls.length).toBeLessThanOrEqual(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle empty nodes array', () => {
|
||||
const mockData = createMockWorkflowData({
|
||||
graph: {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
},
|
||||
})
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('nodes-count').textContent).toBe('0')
|
||||
})
|
||||
|
||||
it('should handle empty edges array', () => {
|
||||
const mockData = createMockWorkflowData({
|
||||
graph: {
|
||||
nodes: [{ id: 'node-1', type: 'custom', data: { type: BlockEnum.Start, title: 'Start', desc: '' }, position: { x: 0, y: 0 } }],
|
||||
edges: [],
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
},
|
||||
})
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('edges-count').textContent).toBe('0')
|
||||
})
|
||||
|
||||
it('should handle undefined viewport', () => {
|
||||
const mockData = createMockWorkflowData({
|
||||
graph: {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
viewport: undefined as any,
|
||||
},
|
||||
})
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle null viewport', () => {
|
||||
const mockData = createMockWorkflowData({
|
||||
graph: {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
viewport: null as any,
|
||||
},
|
||||
})
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle large number of nodes', () => {
|
||||
const largeNodesArray = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: `node-${i}`,
|
||||
type: 'custom',
|
||||
data: { type: BlockEnum.Start, title: `Node ${i}`, desc: '' },
|
||||
position: { x: i * 100, y: 0 },
|
||||
}))
|
||||
|
||||
const mockData = createMockWorkflowData({
|
||||
graph: {
|
||||
nodes: largeNodesArray,
|
||||
edges: [],
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
},
|
||||
})
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('nodes-count').textContent).toBe('100')
|
||||
})
|
||||
|
||||
it('should handle viewport with edge case zoom values', () => {
|
||||
const mockData = createMockWorkflowData({
|
||||
graph: {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
viewport: { x: -1000, y: -1000, zoom: 0.25 },
|
||||
},
|
||||
})
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('viewport-zoom').textContent).toBe('0.25')
|
||||
})
|
||||
|
||||
it('should handle viewport with maximum zoom', () => {
|
||||
const mockData = createMockWorkflowData({
|
||||
graph: {
|
||||
nodes: [],
|
||||
edges: [],
|
||||
viewport: { x: 0, y: 0, zoom: 4 },
|
||||
},
|
||||
})
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('viewport-zoom').textContent).toBe('4')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Component Integration', () => {
|
||||
it('should render WorkflowWithDefaultContext as wrapper', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('workflow-default-context')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should nest RagPipelineMain inside WorkflowWithDefaultContext', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
const workflowContext = screen.getByTestId('workflow-default-context')
|
||||
const ragPipelineMain = screen.getByTestId('rag-pipeline-main')
|
||||
|
||||
expect(workflowContext).toContainElement(ragPipelineMain)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('processNodesWithoutDataSource utility integration', () => {
|
||||
beforeEach(() => {
|
||||
mockSelectorWithDataset('pipeline-123')
|
||||
})
|
||||
|
||||
it('should process nodes through processNodesWithoutDataSource', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
mockProcessNodesWithoutDataSource.mockReturnValue({
|
||||
nodes: [{ id: 'processed-node', type: 'custom', data: { type: BlockEnum.Start, title: 'Processed', desc: '' }, position: { x: 0, y: 0 } }] as any,
|
||||
viewport: { x: 0, y: 0, zoom: 2 },
|
||||
})
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(mockProcessNodesWithoutDataSource).toHaveBeenCalled()
|
||||
expect(screen.getByTestId('nodes-count').textContent).toBe('1')
|
||||
expect(screen.getByTestId('viewport-zoom').textContent).toBe('2')
|
||||
})
|
||||
|
||||
it('should handle processNodesWithoutDataSource returning modified viewport', () => {
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
mockProcessNodesWithoutDataSource.mockReturnValue({
|
||||
nodes: [],
|
||||
viewport: { x: 500, y: 500, zoom: 0.5 },
|
||||
})
|
||||
|
||||
render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('viewport-zoom').textContent).toBe('0.5')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Conditional Rendering Flow', () => {
|
||||
it('should transition from loading to loaded state', () => {
|
||||
mockSelectorWithDataset('pipeline-123')
|
||||
|
||||
// Start with loading state
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true })
|
||||
const { rerender } = render(<RagPipelineWrapper />)
|
||||
|
||||
// Real Loading component has role="status"
|
||||
expect(screen.getByRole('status')).toBeInTheDocument()
|
||||
|
||||
// Transition to loaded state
|
||||
const mockData = createMockWorkflowData()
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
rerender(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('rag-pipeline-main')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should switch from Conversion to Pipeline when pipelineId becomes available', () => {
|
||||
// Start without pipelineId
|
||||
mockSelectorWithDataset(null)
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: false })
|
||||
|
||||
const { rerender } = render(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.getByTestId('conversion-component')).toBeInTheDocument()
|
||||
|
||||
// PipelineId becomes available
|
||||
mockSelectorWithDataset('new-pipeline-id')
|
||||
mockUsePipelineInit.mockReturnValue({ data: undefined, isLoading: true })
|
||||
rerender(<RagPipelineWrapper />)
|
||||
|
||||
expect(screen.queryByTestId('conversion-component')).not.toBeInTheDocument()
|
||||
// Real Loading component has role="status"
|
||||
expect(screen.getByRole('status')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error Handling', () => {
|
||||
beforeEach(() => {
|
||||
mockSelectorWithDataset('pipeline-123')
|
||||
})
|
||||
|
||||
it('should throw when graph nodes is null', () => {
|
||||
const mockData = {
|
||||
graph: {
|
||||
nodes: null as any,
|
||||
edges: null as any,
|
||||
viewport: { x: 0, y: 0, zoom: 1 },
|
||||
},
|
||||
hash: 'test',
|
||||
updated_at: 123,
|
||||
} as FetchWorkflowDraftResponse
|
||||
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
// Suppress console.error for expected error
|
||||
const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
|
||||
// Real initialNodes will throw when nodes is null
|
||||
// This documents the component's current behavior - it requires valid nodes array
|
||||
expect(() => render(<RagPipelineWrapper />)).toThrow()
|
||||
|
||||
consoleSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('should throw when graph property is missing', () => {
|
||||
const mockData = {
|
||||
hash: 'test',
|
||||
updated_at: 123,
|
||||
} as unknown as FetchWorkflowDraftResponse
|
||||
|
||||
mockUsePipelineInit.mockReturnValue({ data: mockData, isLoading: false })
|
||||
|
||||
// Suppress console.error for expected error
|
||||
const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
|
||||
// When graph is undefined, component throws because data.graph.nodes is accessed
|
||||
// This documents the component's current behavior - it requires graph to be present
|
||||
expect(() => render(<RagPipelineWrapper />)).toThrow()
|
||||
|
||||
consoleSpy.mockRestore()
|
||||
})
|
||||
})
|
||||
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "dify-web",
|
||||
"type": "module",
|
||||
"version": "1.11.3",
|
||||
"version": "1.11.4",
|
||||
"private": true,
|
||||
"packageManager": "pnpm@10.27.0+sha512.72d699da16b1179c14ba9e64dc71c9a40988cbdc65c264cb0e489db7de917f20dcf4d64d8723625f2969ba52d4b7e2a1170682d9ac2a5dcaeaab732b7e16f04a",
|
||||
"imports": {
|
||||
|
||||
Reference in New Issue
Block a user