Compare commits

..

44 Commits

Author SHA1 Message Date
2d7bffcc11 fix: upgrade OpenTelemetry packages from 0.48b0 to 0.49b0
Fixes "Failed to detach context" error in production by upgrading to OTEL 0.49b0,
which includes None token guards in Celery instrumentor (PR opentelemetry-python-contrib#2927).

Package Updates:
- OTEL instrumentation: 0.48b0 → 0.49b0
- OTEL SDK/API: 1.27.0 → 1.28.0
- protobuf: 4.25.8 → 5.29.6 (required by opentelemetry-proto 1.28.0)
- Google Cloud packages upgraded for protobuf 5.x compatibility:
  - google-api-core: 2.18.0 → 2.19.1+
  - google-auth: 2.29.0 → 2.47.0+
  - google-cloud-aiplatform: 1.49.0 → 1.123.0+
  - googleapis-common-protos: 1.63.0 → 1.65.0+
  - google-cloud-storage: 2.16.0 → 3.0.0+
- httpx: 0.27.0 → 0.28.0 (required by google-genai 1.37+)

Also removed duplicate opentelemetry-instrumentation-httpx entry in pyproject.toml.
2026-03-01 21:47:51 -08:00
5025e29220 test: remove unrelated enterprise service test
Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-14 16:34:49 +08:00
3cdc9c119e refactor(api): enhance DbMigrationAutoRenewLock acquisition logic
- Added a check to prevent double acquisition of the DB migration lock, raising an error if an attempt is made to acquire it while already held.
- Implemented logic to reuse the lock object if it has already been created, improving efficiency and clarity in lock management.
- Reset the lock object to None upon release to ensure proper state management.

(cherry picked from commit d4b102d3c8a473c4fd6409dba7c198289bb5f921)
2026-02-14 16:28:38 +08:00
18ba367b11 refactor(api): improve DbMigrationAutoRenewLock configuration and logging
- Introduced constants for minimum and maximum join timeout values, enhancing clarity and maintainability.
- Updated the renewal interval calculation to use defined constants for better readability.
- Improved logging messages to include context information, making it easier to trace issues during lock operations.

(cherry picked from commit 1471b77bf5156a95417bde148753702d44221929)
2026-02-14 16:28:38 +08:00
d0bd74fccb [autofix.ci] apply automated fixes
(cherry picked from commit 907e63cdc57f8006017837a74c2da2fbe274dcfb)
2026-02-14 16:28:38 +08:00
5ccbc00eb9 refactor(api): replace AutoRenewRedisLock with DbMigrationAutoRenewLock
- Updated the database migration locking mechanism to use DbMigrationAutoRenewLock for improved clarity and functionality.
- Removed the AutoRenewRedisLock implementation and its associated tests.
- Adjusted integration and unit tests to reflect the new locking class and its usage in the upgrade_db command.

(cherry picked from commit c812ad9ff26bed3eb59862bd7a5179b7ee83f11f)
2026-02-14 16:28:38 +08:00
94603b5408 refactor(api): replace heartbeat mechanism with AutoRenewRedisLock for database migration
- Removed the manual heartbeat function for renewing the Redis lock during database migrations.
- Integrated AutoRenewRedisLock to handle lock renewal automatically, simplifying the upgrade_db command.
- Updated unit tests to reflect changes in lock handling and error management during migrations.

(cherry picked from commit 8814256eb5fa20b29e554264f3b659b027bc4c9a)
2026-02-14 16:28:38 +08:00
8d4bd5636b refactor(tests): replace hardcoded wait time with constant for clarity
- Introduced HEARTBEAT_WAIT_TIMEOUT_SECONDS constant to improve readability and maintainability of test code.
- Updated test assertions to use the new constant instead of a hardcoded value.

(cherry picked from commit 0d53743d83b03ae0e68fad143711ffa5f6354093)
2026-02-14 16:28:38 +08:00
ee0c4a8852 [autofix.ci] apply automated fixes
(cherry picked from commit 326cffa553ffac1bcd39a051c899c35b0ebe997d)
2026-02-14 16:28:38 +08:00
6032c598b0 fix(api): improve logging for database migration lock release
- Added a migration_succeeded flag to track the success of database migrations.
- Enhanced logging messages to indicate the status of the migration when releasing the lock, providing clearer context for potential issues.

(cherry picked from commit e74be0392995d16d288eed2175c51148c9e5b9c0)
2026-02-14 16:28:38 +08:00
afdd5b6c86 feat(api): implement heartbeat mechanism for database migration lock
- Added a heartbeat function to renew the Redis lock during database migrations, preventing long blockages from crashed processes.
- Updated the upgrade_db command to utilize the new locking mechanism with a configurable TTL.
- Removed the deprecated MIGRATION_LOCK_TTL from DeploymentConfig and related files.
- Enhanced unit tests to cover the new lock renewal behavior and error handling during migrations.

(cherry picked from commit a3331c622435f9f215b95f6b0261f43ae56a9d9c)
2026-02-14 16:28:38 +08:00
9acdfbde2f feat(api): enhance database migration locking mechanism and configuration
- Introduced a configurable Redis lock TTL for database migrations in DeploymentConfig.
- Updated the upgrade_db command to handle lock release errors gracefully.
- Added documentation for the new MIGRATION_LOCK_TTL environment variable in the .env.example file and docker-compose.yaml.

(cherry picked from commit 4a05fb120622908bc109a3715686706aab3d3b59)
2026-02-14 16:28:38 +08:00
1977e68b2d fix: make flask upgrade-db fail on error (#32024)
(cherry picked from commit d9530f7bb7)
2026-02-14 16:28:38 +08:00
e9a7e8f77f fix: include sso_verified in access_mode validation (#32325) 2026-02-13 23:40:37 -08:00
9e2b28c950 fix(app-copy): inherit web app permission from original app (#32322) 2026-02-13 22:33:51 -08:00
affd07ae94 fix: make e-1.12.1 enterprise migrations database-agnostic for MySQL/TiDB (#32267)
Co-authored-by: Cursor <cursoragent@cursor.com>
2026-02-12 15:45:24 +08:00
111c76b71f Merge remote-tracking branch 'origin/hotfix/1.12.1-fix.6' into release/e-1.12.1 2026-02-12 13:26:12 +08:00
793d22754e fix: fix get_message_event_type return wrong message type (#32019)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-11 11:00:40 +08:00
b62965034e refactor: document_indexing_sync_task split db session (#32129)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-09 17:16:17 +08:00
016d72a8c6 fix: fix trigger output schema miss (#32116) 2026-02-09 17:16:08 +08:00
08b8eff933 Merge remote-tracking branch 'origin/hotfix/1.12.1-fix.4' into release/e-1.12.1 2026-02-09 15:54:32 +08:00
579cdea820 fix: include app id in automatic generation requests (#32138) 2026-02-09 15:52:22 +08:00
125f7e3ab4 refactor: document_indexing_update_task split database session (#32105)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-09 10:51:45 +08:00
400ed2fd72 refactor: partition Celery task sessions into smaller, discrete execu… (#32085)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-08 21:05:03 +08:00
840a8f3fc2 perf: use batch delete method instead of single delete (#32036)
Co-authored-by: fatelei <fatelei@gmail.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: FFXN <lizy@dify.ai>
2026-02-06 15:13:17 +08:00
b4a5296fd1 fix: fix tool type is miss (#32042) 2026-02-06 14:38:54 +08:00
d7c3ae50dc Update api/services/tools/builtin_tools_manage_service.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-06 13:37:37 +08:00
b921711e9e fix: hide invite button if current user is not workspace manager (#31742) 2026-02-06 13:37:37 +08:00
fb38ad84e1 chore: upgrade deps, see pull #30976 2026-02-06 13:37:33 +08:00
91c854b5be chore: sync enterprise release (#31626)
Co-authored-by: zhsama <torvalds@linux.do>
2026-02-06 13:35:28 +08:00
d35b231941 fix: enterprise CVE 2026 23864 (#31599) 2026-02-06 13:35:22 +08:00
849b4b8c40 fix: add TYPE_CHECKING import for Account type annotation 2026-02-06 13:32:20 +08:00
990e8feee8 security: fix IDOR and privilege escalation in set_default_provider
- Add tenant_id verification to prevent IDOR attacks
- Add admin check for enterprise tenant-wide default changes
- Preserve non-enterprise behavior (users can set own defaults)
2026-02-06 13:32:18 +08:00
53641019b1 fix: remove user_id filter when clearing default provider (enterprise only)
When setting a new default credential in enterprise mode, the code was
only clearing is_default for credentials matching the current user_id.
This caused issues when:
1. Enterprise credential A (synced with system user_id) was default
2. User sets local credential B as default
3. A still had is_default=true (different user_id)
4. Both A and B were considered defaults

The fix removes user_id from the filter only for enterprise deployments,
since enterprise credentials may have different user_id than local ones.
Non-enterprise behavior is unchanged to avoid breaking existing setups.

Fixes EE-1511
2026-02-06 13:31:50 +08:00
d1f10ff301 feat: add redis mq for account deletion cleanup 2026-02-06 13:31:50 +08:00
c8027e168b feat: implement workspace permission checks for member invitations an… (#31202) 2026-02-06 13:31:46 +08:00
aae3f76999 feat: ee workspace permission control (#30841) 2026-02-06 13:31:26 +08:00
2860c72b03 feat: ee workspace permission control (#30841) 2026-02-06 13:13:06 +08:00
fcb53383df fix: fix agent node tool type is not right (#32008)
Infer real tool type via querying relevant database tables.

The root cause for incorrect `type` field is still not clear.
2026-02-06 11:25:29 +08:00
540e1db83c perf(api): Optimize the response time of AppListApi endpoint (#31999) 2026-02-06 10:46:25 +08:00
2f75e38c08 fix: fix miss use db.session (#31971) 2026-02-05 15:59:37 +08:00
cd03e0a9ef fix: fix delete_draft_variables_batch cycle forever (#31934)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2026-02-04 19:42:50 +08:00
df2421d187 fix: auto summary env (#31930) 2026-02-04 19:42:26 +08:00
0ba321d840 chore: bump version in docker-compose and package manager to 1.12.1 (#31947) 2026-02-04 19:41:50 +08:00
162 changed files with 7188 additions and 21782 deletions

7
.github/CODEOWNERS vendored
View File

@ -24,10 +24,6 @@
/api/services/tools/mcp_tools_manage_service.py @Nov1c444
/api/controllers/mcp/ @Nov1c444
/api/controllers/console/app/mcp_server.py @Nov1c444
# Backend - Tests
/api/tests/ @laipz8200 @QuantumGhost
/api/tests/**/*mcp* @Nov1c444
# Backend - Workflow - Engine (Core graph execution engine)
@ -238,9 +234,6 @@
# Frontend - Base Components
/web/app/components/base/ @iamjoel @zxhlyh
# Frontend - Base Components Tests
/web/app/components/base/**/*.spec.tsx @hyoban @CodingOnStar
# Frontend - Utils and Hooks
/web/utils/classnames.ts @iamjoel @zxhlyh
/web/utils/time.ts @iamjoel @zxhlyh

View File

@ -30,6 +30,7 @@ from extensions.ext_redis import redis_client
from extensions.ext_storage import storage
from extensions.storage.opendal_storage import OpenDALStorage
from extensions.storage.storage_type import StorageType
from libs.db_migration_lock import DbMigrationAutoRenewLock
from libs.helper import email as email_validate
from libs.password import hash_password, password_pattern, valid_password
from libs.rsa import generate_key_pair
@ -54,6 +55,8 @@ from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
logger = logging.getLogger(__name__)
DB_UPGRADE_LOCK_TTL_SECONDS = 60
@click.command("reset-password", help="Reset the account password.")
@click.option("--email", prompt=True, help="Account email to reset password for")
@ -727,8 +730,15 @@ def create_tenant(email: str, language: str | None = None, name: str | None = No
@click.command("upgrade-db", help="Upgrade the database")
def upgrade_db():
click.echo("Preparing database migration...")
lock = redis_client.lock(name="db_upgrade_lock", timeout=60)
lock = DbMigrationAutoRenewLock(
redis_client=redis_client,
name="db_upgrade_lock",
ttl_seconds=DB_UPGRADE_LOCK_TTL_SECONDS,
logger=logger,
log_context="db_migration",
)
if lock.acquire(blocking=False):
migration_succeeded = False
try:
click.echo(click.style("Starting database migration.", fg="green"))
@ -737,12 +747,16 @@ def upgrade_db():
flask_migrate.upgrade()
migration_succeeded = True
click.echo(click.style("Database migration successful!", fg="green"))
except Exception:
except Exception as e:
logger.exception("Failed to execute database migration")
click.echo(click.style(f"Database migration failed: {e}", fg="red"))
raise SystemExit(1)
finally:
lock.release()
status = "successful" if migration_succeeded else "failed"
lock.release_safely(status=status)
else:
click.echo("Database migration skipped")

View File

@ -1,3 +1,4 @@
import logging
import uuid
from datetime import datetime
from typing import Any, Literal, TypeAlias
@ -54,6 +55,8 @@ ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "co
register_enum_models(console_ns, IconType)
_logger = logging.getLogger(__name__)
class AppListQuery(BaseModel):
page: int = Field(default=1, ge=1, le=99999, description="Page number (1-99999)")
@ -499,6 +502,7 @@ class AppListApi(Resource):
select(Workflow).where(
Workflow.version == Workflow.VERSION_DRAFT,
Workflow.app_id.in_(workflow_capable_app_ids),
Workflow.tenant_id == current_tenant_id,
)
)
.scalars()
@ -510,12 +514,14 @@ class AppListApi(Resource):
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
node_id = None
try:
for _, node_data in workflow.walk_nodes():
for node_id, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
except Exception:
_logger.exception("error while walking nodes, workflow_id=%s, node_id=%s", workflow.id, node_id)
continue
for app in app_pagination.items:
@ -654,6 +660,19 @@ class AppCopyApi(Resource):
)
session.commit()
# Inherit web app permission from original app
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:
try:
# Get the original app's access mode
original_settings = EnterpriseService.WebAppAuth.get_app_access_mode_by_id(app_model.id)
access_mode = original_settings.access_mode
except Exception:
# If original app has no settings (old app), default to public to match fallback behavior
access_mode = "public"
# Apply the same access mode to the copied app
EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, access_mode)
stmt = select(App).where(App.id == result.app_id)
app = session.scalar(stmt)

View File

@ -1,16 +1,15 @@
import logging
from typing import Any, Literal, cast
from typing import Any, cast
from flask import request
from flask_restx import Resource, fields, marshal, marshal_with
from pydantic import BaseModel
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from controllers.common.fields import Parameters as ParametersResponse
from controllers.common.fields import Site as SiteResponse
from controllers.common.schema import get_or_create_model
from controllers.console import api, console_ns
from controllers.console import api
from controllers.console.app.error import (
AppUnavailableError,
AudioTooLargeError,
@ -118,56 +117,7 @@ workflow_fields_copy["rag_pipeline_variables"] = fields.List(fields.Nested(pipel
workflow_model = get_or_create_model("TrialWorkflow", workflow_fields_copy)
# Pydantic models for request validation
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
class WorkflowRunRequest(BaseModel):
inputs: dict
files: list | None = None
class ChatRequest(BaseModel):
inputs: dict
query: str
files: list | None = None
conversation_id: str | None = None
parent_message_id: str | None = None
retriever_from: str = "explore_app"
class TextToSpeechRequest(BaseModel):
message_id: str | None = None
voice: str | None = None
text: str | None = None
streaming: bool | None = None
class CompletionRequest(BaseModel):
inputs: dict
query: str = ""
files: list | None = None
response_mode: Literal["blocking", "streaming"] | None = None
retriever_from: str = "explore_app"
# Register schemas for Swagger documentation
console_ns.schema_model(
WorkflowRunRequest.__name__, WorkflowRunRequest.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
)
console_ns.schema_model(
ChatRequest.__name__, ChatRequest.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
)
console_ns.schema_model(
TextToSpeechRequest.__name__, TextToSpeechRequest.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
)
console_ns.schema_model(
CompletionRequest.__name__, CompletionRequest.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
)
class TrialAppWorkflowRunApi(TrialAppResource):
@console_ns.expect(console_ns.models[WorkflowRunRequest.__name__])
def post(self, trial_app):
"""
Run workflow
@ -179,8 +129,10 @@ class TrialAppWorkflowRunApi(TrialAppResource):
if app_mode != AppMode.WORKFLOW:
raise NotWorkflowAppError()
request_data = WorkflowRunRequest.model_validate(console_ns.payload)
args = request_data.model_dump()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
parser.add_argument("files", type=list, required=False, location="json")
args = parser.parse_args()
assert current_user is not None
try:
app_id = app_model.id
@ -231,7 +183,6 @@ class TrialAppWorkflowTaskStopApi(TrialAppResource):
class TrialChatApi(TrialAppResource):
@console_ns.expect(console_ns.models[ChatRequest.__name__])
@trial_feature_enable
def post(self, trial_app):
app_model = trial_app
@ -239,14 +190,14 @@ class TrialChatApi(TrialAppResource):
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
raise NotChatAppError()
request_data = ChatRequest.model_validate(console_ns.payload)
args = request_data.model_dump()
# Validate UUID values if provided
if args.get("conversation_id"):
args["conversation_id"] = uuid_value(args["conversation_id"])
if args.get("parent_message_id"):
args["parent_message_id"] = uuid_value(args["parent_message_id"])
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, location="json")
parser.add_argument("query", type=str, required=True, location="json")
parser.add_argument("files", type=list, required=False, location="json")
parser.add_argument("conversation_id", type=uuid_value, location="json")
parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json")
args = parser.parse_args()
args["auto_generate_name"] = False
@ -369,16 +320,20 @@ class TrialChatAudioApi(TrialAppResource):
class TrialChatTextApi(TrialAppResource):
@console_ns.expect(console_ns.models[TextToSpeechRequest.__name__])
@trial_feature_enable
def post(self, trial_app):
app_model = trial_app
try:
request_data = TextToSpeechRequest.model_validate(console_ns.payload)
parser = reqparse.RequestParser()
parser.add_argument("message_id", type=str, required=False, location="json")
parser.add_argument("voice", type=str, location="json")
parser.add_argument("text", type=str, location="json")
parser.add_argument("streaming", type=bool, location="json")
args = parser.parse_args()
message_id = request_data.message_id
text = request_data.text
voice = request_data.voice
message_id = args.get("message_id", None)
text = args.get("text", None)
voice = args.get("voice", None)
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
@ -416,15 +371,19 @@ class TrialChatTextApi(TrialAppResource):
class TrialCompletionApi(TrialAppResource):
@console_ns.expect(console_ns.models[CompletionRequest.__name__])
@trial_feature_enable
def post(self, trial_app):
app_model = trial_app
if app_model.mode != "completion":
raise NotCompletionAppError()
request_data = CompletionRequest.model_validate(console_ns.payload)
args = request_data.model_dump()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, location="json")
parser.add_argument("query", type=str, location="json", default="")
parser.add_argument("files", type=list, required=False, location="json")
parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json")
args = parser.parse_args()
streaming = args["response_mode"] == "streaming"
args["auto_generate_name"] = False

View File

@ -878,7 +878,11 @@ class ToolBuiltinProviderSetDefaultApi(Resource):
current_user, current_tenant_id = current_account_with_tenant()
payload = BuiltinProviderDefaultCredentialPayload.model_validate(console_ns.payload or {})
return BuiltinToolManageService.set_default_provider(
tenant_id=current_tenant_id, user_id=current_user.id, provider=provider, id=payload.id
tenant_id=current_tenant_id,
user_id=current_user.id,
provider=provider,
id=args["id"],
account=current_user,
)

View File

@ -45,6 +45,8 @@ from core.app.entities.task_entities import (
from core.app.task_pipeline.based_generate_task_pipeline import BasedGenerateTaskPipeline
from core.app.task_pipeline.message_cycle_manager import MessageCycleManager
from core.base.tts import AppGeneratorTTSPublisher, AudioTrunk
from core.file import helpers as file_helpers
from core.file.enums import FileTransferMethod
from core.model_manager import ModelInstance
from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta, LLMUsage
from core.model_runtime.entities.message_entities import (
@ -56,10 +58,11 @@ from core.ops.entities.trace_entity import TraceTaskName
from core.ops.ops_trace_manager import TraceQueueManager, TraceTask
from core.prompt.utils.prompt_message_util import PromptMessageUtil
from core.prompt.utils.prompt_template_parser import PromptTemplateParser
from core.tools.signature import sign_tool_file
from events.message_event import message_was_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.model import AppMode, Conversation, Message, MessageAgentThought
from models.model import AppMode, Conversation, Message, MessageAgentThought, MessageFile, UploadFile
logger = logging.getLogger(__name__)
@ -463,6 +466,85 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
metadata=metadata_dict,
)
def _record_files(self):
with Session(db.engine, expire_on_commit=False) as session:
message_files = session.scalars(select(MessageFile).where(MessageFile.message_id == self._message_id)).all()
if not message_files:
return None
files_list = []
upload_file_ids = [
mf.upload_file_id
for mf in message_files
if mf.transfer_method == FileTransferMethod.LOCAL_FILE and mf.upload_file_id
]
upload_files_map = {}
if upload_file_ids:
upload_files = session.scalars(select(UploadFile).where(UploadFile.id.in_(upload_file_ids))).all()
upload_files_map = {uf.id: uf for uf in upload_files}
for message_file in message_files:
upload_file = None
if message_file.transfer_method == FileTransferMethod.LOCAL_FILE and message_file.upload_file_id:
upload_file = upload_files_map.get(message_file.upload_file_id)
url = None
filename = "file"
mime_type = "application/octet-stream"
size = 0
extension = ""
if message_file.transfer_method == FileTransferMethod.REMOTE_URL:
url = message_file.url
if message_file.url:
filename = message_file.url.split("/")[-1].split("?")[0] # Remove query params
elif message_file.transfer_method == FileTransferMethod.LOCAL_FILE:
if upload_file:
url = file_helpers.get_signed_file_url(upload_file_id=str(upload_file.id))
filename = upload_file.name
mime_type = upload_file.mime_type or "application/octet-stream"
size = upload_file.size or 0
extension = f".{upload_file.extension}" if upload_file.extension else ""
elif message_file.upload_file_id:
# Fallback: generate URL even if upload_file not found
url = file_helpers.get_signed_file_url(upload_file_id=str(message_file.upload_file_id))
elif message_file.transfer_method == FileTransferMethod.TOOL_FILE and message_file.url:
# For tool files, use URL directly if it's HTTP, otherwise sign it
if message_file.url.startswith("http"):
url = message_file.url
filename = message_file.url.split("/")[-1].split("?")[0]
else:
# Extract tool file id and extension from URL
url_parts = message_file.url.split("/")
if url_parts:
file_part = url_parts[-1].split("?")[0] # Remove query params first
# Use rsplit to correctly handle filenames with multiple dots
if "." in file_part:
tool_file_id, ext = file_part.rsplit(".", 1)
extension = f".{ext}"
else:
tool_file_id = file_part
extension = ".bin"
url = sign_tool_file(tool_file_id=tool_file_id, extension=extension)
filename = file_part
transfer_method_value = message_file.transfer_method
remote_url = message_file.url if message_file.transfer_method == FileTransferMethod.REMOTE_URL else ""
file_dict = {
"related_id": message_file.id,
"extension": extension,
"filename": filename,
"size": size,
"mime_type": mime_type,
"transfer_method": transfer_method_value,
"type": message_file.type,
"url": url or "",
"upload_file_id": message_file.upload_file_id or message_file.id,
"remote_url": remote_url,
}
files_list.append(file_dict)
return files_list or None
def _agent_message_to_stream_response(self, answer: str, message_id: str) -> AgentMessageStreamResponse:
"""
Agent message to stream response.

View File

@ -64,7 +64,13 @@ class MessageCycleManager:
# Use SQLAlchemy 2.x style session.scalar(select(...))
with session_factory.create_session() as session:
message_file = session.scalar(select(MessageFile).where(MessageFile.message_id == message_id))
message_file = session.scalar(
select(MessageFile)
.where(
MessageFile.message_id == message_id,
)
.where(MessageFile.belongs_to == "assistant")
)
if message_file:
self._message_has_file.add(message_id)

View File

@ -0,0 +1,213 @@
"""
DB migration Redis lock with heartbeat renewal.
This is intentionally migration-specific. Background renewal is a trade-off that makes sense
for unbounded, blocking operations like DB migrations (DDL/DML) where the main thread cannot
periodically refresh the lock TTL.
Do NOT use this as a general-purpose lock primitive for normal application code. Prefer explicit
lock lifecycle management (e.g. redis-py Lock context manager + `extend()` / `reacquire()` from
the same thread) when execution flow is under control.
"""
from __future__ import annotations
import logging
import threading
from typing import Any
from redis.exceptions import LockNotOwnedError, RedisError
logger = logging.getLogger(__name__)
MIN_RENEW_INTERVAL_SECONDS = 0.1
DEFAULT_RENEW_INTERVAL_DIVISOR = 3
MIN_JOIN_TIMEOUT_SECONDS = 0.5
MAX_JOIN_TIMEOUT_SECONDS = 5.0
JOIN_TIMEOUT_MULTIPLIER = 2.0
class DbMigrationAutoRenewLock:
"""
Redis lock wrapper that automatically renews TTL while held (migration-only).
Notes:
- We force `thread_local=False` when creating the underlying redis-py lock, because the
lock token must be accessible from the heartbeat thread for `reacquire()` to work.
- `release_safely()` is best-effort: it never raises, so it won't mask the caller's
primary error/exit code.
"""
_redis_client: Any
_name: str
_ttl_seconds: float
_renew_interval_seconds: float
_log_context: str | None
_logger: logging.Logger
_lock: Any
_stop_event: threading.Event | None
_thread: threading.Thread | None
_acquired: bool
def __init__(
self,
redis_client: Any,
name: str,
ttl_seconds: float = 60,
renew_interval_seconds: float | None = None,
*,
logger: logging.Logger | None = None,
log_context: str | None = None,
) -> None:
self._redis_client = redis_client
self._name = name
self._ttl_seconds = float(ttl_seconds)
self._renew_interval_seconds = (
float(renew_interval_seconds)
if renew_interval_seconds is not None
else max(MIN_RENEW_INTERVAL_SECONDS, self._ttl_seconds / DEFAULT_RENEW_INTERVAL_DIVISOR)
)
self._logger = logger or logging.getLogger(__name__)
self._log_context = log_context
self._lock = None
self._stop_event = None
self._thread = None
self._acquired = False
@property
def name(self) -> str:
return self._name
def acquire(self, *args: Any, **kwargs: Any) -> bool:
"""
Acquire the lock and start heartbeat renewal on success.
Accepts the same args/kwargs as redis-py `Lock.acquire()`.
"""
# Prevent accidental double-acquire which could leave the previous heartbeat thread running.
if self._acquired:
raise RuntimeError("DB migration lock is already acquired; call release_safely() before acquiring again.")
# Reuse the lock object if we already created one.
if self._lock is None:
self._lock = self._redis_client.lock(
name=self._name,
timeout=self._ttl_seconds,
thread_local=False,
)
acquired = bool(self._lock.acquire(*args, **kwargs))
self._acquired = acquired
if acquired:
self._start_heartbeat()
return acquired
def owned(self) -> bool:
if self._lock is None:
return False
try:
return bool(self._lock.owned())
except Exception:
# Ownership checks are best-effort and must not break callers.
return False
def _start_heartbeat(self) -> None:
if self._lock is None:
return
if self._stop_event is not None:
return
self._stop_event = threading.Event()
self._thread = threading.Thread(
target=self._heartbeat_loop,
args=(self._lock, self._stop_event),
daemon=True,
name=f"DbMigrationAutoRenewLock({self._name})",
)
self._thread.start()
def _heartbeat_loop(self, lock: Any, stop_event: threading.Event) -> None:
while not stop_event.wait(self._renew_interval_seconds):
try:
lock.reacquire()
except LockNotOwnedError:
self._logger.warning(
"DB migration lock is no longer owned during heartbeat; stop renewing. log_context=%s",
self._log_context,
exc_info=True,
)
return
except RedisError:
self._logger.warning(
"Failed to renew DB migration lock due to Redis error; will retry. log_context=%s",
self._log_context,
exc_info=True,
)
except Exception:
self._logger.warning(
"Unexpected error while renewing DB migration lock; will retry. log_context=%s",
self._log_context,
exc_info=True,
)
def release_safely(self, *, status: str | None = None) -> None:
"""
Stop heartbeat and release lock. Never raises.
Args:
status: Optional caller-provided status (e.g. 'successful'/'failed') to add context to logs.
"""
lock = self._lock
if lock is None:
return
self._stop_heartbeat()
# Lock release errors should never mask the real error/exit code.
try:
lock.release()
except LockNotOwnedError:
self._logger.warning(
"DB migration lock not owned on release; ignoring. status=%s log_context=%s",
status,
self._log_context,
exc_info=True,
)
except RedisError:
self._logger.warning(
"Failed to release DB migration lock due to Redis error; ignoring. status=%s log_context=%s",
status,
self._log_context,
exc_info=True,
)
except Exception:
self._logger.warning(
"Unexpected error while releasing DB migration lock; ignoring. status=%s log_context=%s",
status,
self._log_context,
exc_info=True,
)
finally:
self._acquired = False
self._lock = None
def _stop_heartbeat(self) -> None:
if self._stop_event is None:
return
self._stop_event.set()
if self._thread is not None:
# Best-effort join: if Redis calls are blocked, the daemon thread may remain alive.
join_timeout_seconds = max(
MIN_JOIN_TIMEOUT_SECONDS,
min(MAX_JOIN_TIMEOUT_SECONDS, self._renew_interval_seconds * JOIN_TIMEOUT_MULTIPLIER),
)
self._thread.join(timeout=join_timeout_seconds)
if self._thread.is_alive():
self._logger.warning(
"DB migration lock heartbeat thread did not stop within %.2fs; ignoring. log_context=%s",
join_timeout_seconds,
self._log_context,
)
self._stop_event = None
self._thread = None

View File

@ -8,7 +8,6 @@ Create Date: 2025-12-25 10:39:15.139304
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '7df29de0f6be'
@ -20,7 +19,7 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tenant_credit_pools',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('pool_type', sa.String(length=40), server_default='trial', nullable=False),
sa.Column('quota_limit', sa.BigInteger(), nullable=False),

View File

@ -8,7 +8,6 @@ Create Date: 2026-01-017 11:10:18.079355
from alembic import op
import models as models
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = 'f9f6d18a37f9'
@ -20,7 +19,7 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('account_trial_app_records',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('account_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('count', sa.Integer(), nullable=False),
@ -33,17 +32,17 @@ def upgrade():
batch_op.create_index('account_trial_app_record_app_id_idx', ['app_id'], unique=False)
op.create_table('exporle_banners',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('content', sa.JSON(), nullable=False),
sa.Column('link', sa.String(length=255), nullable=False),
sa.Column('sort', sa.Integer(), nullable=False),
sa.Column('status', sa.String(length=255), server_default=sa.text("'enabled'::character varying"), nullable=False),
sa.Column('status', sa.String(length=255), server_default='enabled', nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('language', sa.String(length=255), server_default=sa.text("'en-US'::character varying"), nullable=False),
sa.Column('language', sa.String(length=255), server_default='en-US', nullable=False),
sa.PrimaryKeyConstraint('id', name='exporler_banner_pkey')
)
op.create_table('trial_apps',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuid_generate_v4()'), nullable=False),
sa.Column('id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),

View File

@ -620,7 +620,7 @@ class TrialApp(Base):
sa.UniqueConstraint("app_id", name="unique_trail_app_id"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id = mapped_column(StringUUID, default=lambda: str(uuid4()))
app_id = mapped_column(StringUUID, nullable=False)
tenant_id = mapped_column(StringUUID, nullable=False)
created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp())
@ -640,7 +640,7 @@ class AccountTrialAppRecord(Base):
sa.Index("account_trial_app_record_app_id_idx", "app_id"),
sa.UniqueConstraint("account_id", "app_id", name="unique_account_trial_app_record"),
)
id = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"))
id = mapped_column(StringUUID, default=lambda: str(uuid4()))
account_id = mapped_column(StringUUID, nullable=False)
app_id = mapped_column(StringUUID, nullable=False)
count = mapped_column(sa.Integer, nullable=False, default=0)
@ -660,18 +660,18 @@ class AccountTrialAppRecord(Base):
class ExporleBanner(TypeBase):
__tablename__ = "exporle_banners"
__table_args__ = (sa.PrimaryKeyConstraint("id", name="exporler_banner_pkey"),)
id: Mapped[str] = mapped_column(StringUUID, server_default=sa.text("uuid_generate_v4()"), init=False)
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()), init=False)
content: Mapped[dict[str, Any]] = mapped_column(sa.JSON, nullable=False)
link: Mapped[str] = mapped_column(String(255), nullable=False)
sort: Mapped[int] = mapped_column(sa.Integer, nullable=False)
status: Mapped[str] = mapped_column(
sa.String(255), nullable=False, server_default=sa.text("'enabled'::character varying"), default="enabled"
sa.String(255), nullable=False, server_default='enabled', default="enabled"
)
created_at: Mapped[datetime] = mapped_column(
sa.DateTime, nullable=False, server_default=func.current_timestamp(), init=False
)
language: Mapped[str] = mapped_column(
String(255), nullable=False, server_default=sa.text("'en-US'::character varying"), default="en-US"
String(255), nullable=False, server_default='en-US', default="en-US"
)
@ -2166,7 +2166,7 @@ class TenantCreditPool(TypeBase):
sa.Index("tenant_credit_pool_pool_type_idx", "pool_type"),
)
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, server_default=text("uuid_generate_v4()"), init=False)
id: Mapped[str] = mapped_column(StringUUID, primary_key=True, default=lambda: str(uuid4()), init=False)
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial")
quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0)

View File

@ -22,14 +22,14 @@ dependencies = [
"flask-sqlalchemy~=3.1.1",
"gevent~=25.9.1",
"gmpy2~=2.2.1",
"google-api-core==2.18.0",
"google-api-core>=2.19.1",
"google-api-python-client==2.90.0",
"google-auth==2.29.0",
"google-auth>=2.47.0",
"google-auth-httplib2==0.2.0",
"google-cloud-aiplatform==1.49.0",
"googleapis-common-protos==1.63.0",
"google-cloud-aiplatform>=1.123.0",
"googleapis-common-protos>=1.65.0",
"gunicorn~=23.0.0",
"httpx[socks]~=0.27.0",
"httpx[socks]~=0.28.0",
"jieba==0.42.1",
"json-repair>=0.55.1",
"jsonschema>=4.25.1",
@ -41,26 +41,23 @@ dependencies = [
"openpyxl~=3.1.5",
"opik~=1.8.72",
"litellm==1.77.1", # Pinned to avoid madoka dependency issue
"opentelemetry-api==1.27.0",
"opentelemetry-distro==0.48b0",
"opentelemetry-exporter-otlp==1.27.0",
"opentelemetry-exporter-otlp-proto-common==1.27.0",
"opentelemetry-exporter-otlp-proto-grpc==1.27.0",
"opentelemetry-exporter-otlp-proto-http==1.27.0",
"opentelemetry-instrumentation==0.48b0",
"opentelemetry-instrumentation-celery==0.48b0",
"opentelemetry-instrumentation-flask==0.48b0",
"opentelemetry-instrumentation-httpx==0.48b0",
"opentelemetry-instrumentation-redis==0.48b0",
"opentelemetry-instrumentation-httpx==0.48b0",
"opentelemetry-instrumentation-sqlalchemy==0.48b0",
"opentelemetry-propagator-b3==1.27.0",
# opentelemetry-proto1.28.0 depends on protobuf (>=5.0,<6.0),
# which is conflict with googleapis-common-protos (1.63.0)
"opentelemetry-proto==1.27.0",
"opentelemetry-sdk==1.27.0",
"opentelemetry-semantic-conventions==0.48b0",
"opentelemetry-util-http==0.48b0",
"opentelemetry-api==1.28.0",
"opentelemetry-distro==0.49b0",
"opentelemetry-exporter-otlp==1.28.0",
"opentelemetry-exporter-otlp-proto-common==1.28.0",
"opentelemetry-exporter-otlp-proto-grpc==1.28.0",
"opentelemetry-exporter-otlp-proto-http==1.28.0",
"opentelemetry-instrumentation==0.49b0",
"opentelemetry-instrumentation-celery==0.49b0",
"opentelemetry-instrumentation-flask==0.49b0",
"opentelemetry-instrumentation-httpx==0.49b0",
"opentelemetry-instrumentation-redis==0.49b0",
"opentelemetry-instrumentation-sqlalchemy==0.49b0",
"opentelemetry-propagator-b3==1.28.0",
"opentelemetry-proto==1.28.0",
"opentelemetry-sdk==1.28.0",
"opentelemetry-semantic-conventions==0.49b0",
"opentelemetry-util-http==0.49b0",
"pandas[excel,output-formatting,performance]~=2.2.2",
"psycogreen~=1.0.2",
"psycopg2-binary~=2.9.6",
@ -81,7 +78,7 @@ dependencies = [
"starlette==0.49.1",
"tiktoken~=0.9.0",
"transformers~=4.56.1",
"unstructured[docx,epub,md,ppt,pptx]~=0.16.1",
"unstructured[docx,epub,md,ppt,pptx]~=0.18.18",
"yarl~=1.18.3",
"webvtt-py~=0.5.1",
"sseclient-py~=1.8.0",

View File

@ -327,6 +327,12 @@ class AccountService:
@staticmethod
def delete_account(account: Account):
"""Delete account. This method only adds a task to the queue for deletion."""
# Queue account deletion sync tasks for all workspaces BEFORE account deletion (enterprise only)
from services.enterprise.account_deletion_sync import sync_account_deletion
sync_account_deletion(account_id=account.id, source="account_deleted")
# Now proceed with async account deletion
delete_account_task.delay(account.id)
@staticmethod
@ -1230,6 +1236,11 @@ class TenantService:
if dify_config.BILLING_ENABLED:
BillingService.clean_billing_info_cache(tenant.id)
# Queue account deletion sync task for enterprise backend to reassign resources (enterprise only)
from services.enterprise.account_deletion_sync import sync_workspace_member_removal
sync_workspace_member_removal(workspace_id=tenant.id, member_id=account.id, source="workspace_member_removed")
@staticmethod
def update_member_role(tenant: Tenant, member: Account, new_role: str, operator: Account):
"""Update member role"""

View File

@ -0,0 +1,115 @@
import json
import logging
import uuid
from datetime import UTC, datetime
from redis import RedisError
from configs import dify_config
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from models.account import TenantAccountJoin
logger = logging.getLogger(__name__)
ACCOUNT_DELETION_SYNC_QUEUE = "enterprise:member:sync:queue"
ACCOUNT_DELETION_SYNC_TASK_TYPE = "sync_member_deletion_from_workspace"
def _queue_task(workspace_id: str, member_id: str, *, source: str) -> bool:
"""
Queue an account deletion sync task to Redis.
Internal helper function. Do not call directly - use the public functions instead.
Args:
workspace_id: The workspace/tenant ID to sync
member_id: The member/account ID that was removed
source: Source of the sync request (for debugging/tracking)
Returns:
bool: True if task was queued successfully, False otherwise
"""
try:
task = {
"task_id": str(uuid.uuid4()),
"workspace_id": workspace_id,
"member_id": member_id,
"retry_count": 0,
"created_at": datetime.now(UTC).isoformat(),
"source": source,
"type": ACCOUNT_DELETION_SYNC_TASK_TYPE,
}
# Push to Redis list (queue) - LPUSH adds to the head, worker consumes from tail with RPOP
redis_client.lpush(ACCOUNT_DELETION_SYNC_QUEUE, json.dumps(task))
logger.info(
"Queued account deletion sync task for workspace %s, member %s, task_id: %s, source: %s",
workspace_id,
member_id,
task["task_id"],
source,
)
return True
except (RedisError, TypeError) as e:
logger.error(
"Failed to queue account deletion sync for workspace %s, member %s: %s",
workspace_id,
member_id,
str(e),
exc_info=True,
)
# Don't raise - we don't want to fail member deletion if queueing fails
return False
def sync_workspace_member_removal(workspace_id: str, member_id: str, *, source: str) -> bool:
"""
Sync a single workspace member removal (enterprise only).
Queues a task for the enterprise backend to reassign resources from the removed member.
Handles enterprise edition check internally. Safe to call in community edition (no-op).
Args:
workspace_id: The workspace/tenant ID
member_id: The member/account ID that was removed
source: Source of the sync request (e.g., "workspace_member_removed")
Returns:
bool: True if task was queued (or skipped in community), False if queueing failed
"""
if not dify_config.ENTERPRISE_ENABLED:
return True
return _queue_task(workspace_id=workspace_id, member_id=member_id, source=source)
def sync_account_deletion(account_id: str, *, source: str) -> bool:
"""
Sync full account deletion across all workspaces (enterprise only).
Fetches all workspace memberships for the account and queues a sync task for each.
Handles enterprise edition check internally. Safe to call in community edition (no-op).
Args:
account_id: The account ID being deleted
source: Source of the sync request (e.g., "account_deleted")
Returns:
bool: True if all tasks were queued (or skipped in community), False if any queueing failed
"""
if not dify_config.ENTERPRISE_ENABLED:
return True
# Fetch all workspaces the account belongs to
workspace_joins = db.session.query(TenantAccountJoin).filter_by(account_id=account_id).all()
# Queue sync task for each workspace
success = True
for join in workspace_joins:
if not _queue_task(workspace_id=join.tenant_id, member_id=account_id, source=source):
success = False
return success

View File

@ -4,6 +4,8 @@ from pydantic import BaseModel, Field
from services.enterprise.base import EnterpriseRequest
ALLOWED_ACCESS_MODES = ["public", "private", "private_all", "sso_verified"]
class WebAppSettings(BaseModel):
access_mode: str = Field(
@ -123,8 +125,8 @@ class EnterpriseService:
def update_app_access_mode(cls, app_id: str, access_mode: str):
if not app_id:
raise ValueError("app_id must be provided.")
if access_mode not in ["public", "private", "private_all"]:
raise ValueError("access_mode must be either 'public', 'private', or 'private_all'")
if access_mode not in ALLOWED_ACCESS_MODES:
raise ValueError(f"access_mode must be one of: {', '.join(ALLOWED_ACCESS_MODES)}")
data = {"appId": app_id, "accessMode": access_mode}

View File

@ -2,7 +2,10 @@ import json
import logging
from collections.abc import Mapping
from pathlib import Path
from typing import Any
from typing import TYPE_CHECKING, Any
if TYPE_CHECKING:
from models.account import Account
from sqlalchemy import exists, select
from sqlalchemy.orm import Session
@ -406,20 +409,37 @@ class BuiltinToolManageService:
return {"result": "success"}
@staticmethod
def set_default_provider(tenant_id: str, user_id: str, provider: str, id: str):
def set_default_provider(tenant_id: str, user_id: str, provider: str, id: str, account: "Account | None" = None):
"""
set default provider
"""
with Session(db.engine) as session:
# get provider
target_provider = session.query(BuiltinToolProvider).filter_by(id=id).first()
# get provider (verify tenant ownership to prevent IDOR)
target_provider = session.query(BuiltinToolProvider).filter_by(id=id, tenant_id=tenant_id).first()
if target_provider is None:
raise ValueError("provider not found")
# clear default provider
session.query(BuiltinToolProvider).filter_by(
tenant_id=tenant_id, user_id=user_id, provider=provider, is_default=True
).update({"is_default": False})
if dify_config.ENTERPRISE_ENABLED:
# Enterprise: verify admin permission for tenant-wide operation
from models.account import TenantAccountRole
if account is None:
# In enterprise mode, an account context is required to perform permission checks
raise ValueError("Account is required to set default credentials in enterprise mode")
if not TenantAccountRole.is_privileged_role(account.current_role):
raise ValueError("Only workspace admins/owners can set default credentials in enterprise mode")
# Enterprise: clear ALL defaults for this provider in the tenant
# (regardless of user_id, since enterprise credentials may have different user_id)
session.query(BuiltinToolProvider).filter_by(
tenant_id=tenant_id, provider=provider, is_default=True
).update({"is_default": False})
else:
# Non-enterprise: only clear defaults for the current user
session.query(BuiltinToolProvider).filter_by(
tenant_id=tenant_id, user_id=user_id, provider=provider, is_default=True
).update({"is_default": False})
# set new default provider
target_provider.is_default = True

View File

@ -6,7 +6,6 @@ from celery import shared_task
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.models.document import Document
from extensions.ext_database import db
from models.dataset import Dataset
from services.dataset_service import DatasetCollectionBindingService
@ -58,5 +57,3 @@ def add_annotation_to_index_task(
)
except Exception:
logger.exception("Build index for annotation failed")
finally:
db.session.close()

View File

@ -5,7 +5,6 @@ import click
from celery import shared_task
from core.rag.datasource.vdb.vector_factory import Vector
from extensions.ext_database import db
from models.dataset import Dataset
from services.dataset_service import DatasetCollectionBindingService
@ -40,5 +39,3 @@ def delete_annotation_index_task(annotation_id: str, app_id: str, tenant_id: str
logger.info(click.style(f"App annotations index deleted : {app_id} latency: {end_at - start_at}", fg="green"))
except Exception:
logger.exception("Annotation deleted index failed")
finally:
db.session.close()

View File

@ -6,7 +6,6 @@ from celery import shared_task
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.models.document import Document
from extensions.ext_database import db
from models.dataset import Dataset
from services.dataset_service import DatasetCollectionBindingService
@ -59,5 +58,3 @@ def update_annotation_to_index_task(
)
except Exception:
logger.exception("Build index for annotation failed")
finally:
db.session.close()

View File

@ -14,6 +14,9 @@ from models.model import UploadFile
logger = logging.getLogger(__name__)
# Batch size for database operations to keep transactions short
BATCH_SIZE = 1000
@shared_task(queue="dataset")
def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form: str | None, file_ids: list[str]):
@ -31,63 +34,179 @@ def batch_clean_document_task(document_ids: list[str], dataset_id: str, doc_form
if not doc_form:
raise ValueError("doc_form is required")
with session_factory.create_session() as session:
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Document has no dataset")
session.query(DatasetMetadataBinding).where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
).delete(synchronize_session=False)
storage_keys_to_delete: list[str] = []
index_node_ids: list[str] = []
segment_ids: list[str] = []
total_image_upload_file_ids: list[str] = []
try:
# ============ Step 1: Query segment and file data (short read-only transaction) ============
with session_factory.create_session() as session:
# Get segments info
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
).all()
# check segment is exist
if segments:
index_node_ids = [segment.index_node_id for segment in segments]
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
segment_ids = [segment.id for segment in segments]
# Collect image file IDs from segment content
for segment in segments:
image_upload_file_ids = get_image_upload_file_ids(segment.content)
image_files = session.query(UploadFile).where(UploadFile.id.in_(image_upload_file_ids)).all()
for image_file in image_files:
try:
if image_file and image_file.key:
storage.delete(image_file.key)
except Exception:
logger.exception(
"Delete image_files failed when storage deleted, \
image_upload_file_is: %s",
image_file.id,
)
stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
session.execute(stmt)
session.delete(segment)
total_image_upload_file_ids.extend(image_upload_file_ids)
# Query storage keys for image files
if total_image_upload_file_ids:
image_files = session.scalars(
select(UploadFile).where(UploadFile.id.in_(total_image_upload_file_ids))
).all()
storage_keys_to_delete.extend([f.key for f in image_files if f and f.key])
# Query storage keys for document files
if file_ids:
files = session.scalars(select(UploadFile).where(UploadFile.id.in_(file_ids))).all()
for file in files:
try:
storage.delete(file.key)
except Exception:
logger.exception("Delete file failed when document deleted, file_id: %s", file.id)
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
session.execute(stmt)
storage_keys_to_delete.extend([f.key for f in files if f and f.key])
session.commit()
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned documents when documents deleted latency: {end_at - start_at}",
fg="green",
# ============ Step 2: Clean vector index (external service, fresh session for dataset) ============
if index_node_ids:
try:
# Fetch dataset in a fresh session to avoid DetachedInstanceError
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
logger.warning("Dataset not found for vector index cleanup, dataset_id: %s", dataset_id)
else:
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
except Exception:
logger.exception(
"Failed to clean vector index for dataset_id: %s, document_ids: %s, index_node_ids count: %d",
dataset_id,
document_ids,
len(index_node_ids),
)
)
# ============ Step 3: Delete metadata binding (separate short transaction) ============
try:
with session_factory.create_session() as session:
deleted_count = (
session.query(DatasetMetadataBinding)
.where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id.in_(document_ids),
)
.delete(synchronize_session=False)
)
session.commit()
logger.debug("Deleted %d metadata bindings for dataset_id: %s", deleted_count, dataset_id)
except Exception:
logger.exception("Cleaned documents when documents deleted failed")
logger.exception(
"Failed to delete metadata bindings for dataset_id: %s, document_ids: %s",
dataset_id,
document_ids,
)
# ============ Step 4: Batch delete UploadFile records (multiple short transactions) ============
if total_image_upload_file_ids:
failed_batches = 0
total_batches = (len(total_image_upload_file_ids) + BATCH_SIZE - 1) // BATCH_SIZE
for i in range(0, len(total_image_upload_file_ids), BATCH_SIZE):
batch = total_image_upload_file_ids[i : i + BATCH_SIZE]
try:
with session_factory.create_session() as session:
stmt = delete(UploadFile).where(UploadFile.id.in_(batch))
session.execute(stmt)
session.commit()
except Exception:
failed_batches += 1
logger.exception(
"Failed to delete image UploadFile batch %d-%d for dataset_id: %s",
i,
i + len(batch),
dataset_id,
)
if failed_batches > 0:
logger.warning(
"Image UploadFile deletion: %d/%d batches failed for dataset_id: %s",
failed_batches,
total_batches,
dataset_id,
)
# ============ Step 5: Batch delete DocumentSegment records (multiple short transactions) ============
if segment_ids:
failed_batches = 0
total_batches = (len(segment_ids) + BATCH_SIZE - 1) // BATCH_SIZE
for i in range(0, len(segment_ids), BATCH_SIZE):
batch = segment_ids[i : i + BATCH_SIZE]
try:
with session_factory.create_session() as session:
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(batch))
session.execute(segment_delete_stmt)
session.commit()
except Exception:
failed_batches += 1
logger.exception(
"Failed to delete DocumentSegment batch %d-%d for dataset_id: %s, document_ids: %s",
i,
i + len(batch),
dataset_id,
document_ids,
)
if failed_batches > 0:
logger.warning(
"DocumentSegment deletion: %d/%d batches failed, document_ids: %s",
failed_batches,
total_batches,
document_ids,
)
# ============ Step 6: Delete document-associated files (separate short transaction) ============
if file_ids:
try:
with session_factory.create_session() as session:
stmt = delete(UploadFile).where(UploadFile.id.in_(file_ids))
session.execute(stmt)
session.commit()
except Exception:
logger.exception(
"Failed to delete document UploadFile records for dataset_id: %s, file_ids: %s",
dataset_id,
file_ids,
)
# ============ Step 7: Delete storage files (I/O operations, no DB transaction) ============
storage_delete_failures = 0
for storage_key in storage_keys_to_delete:
try:
storage.delete(storage_key)
except Exception:
storage_delete_failures += 1
logger.exception("Failed to delete file from storage, key: %s", storage_key)
if storage_delete_failures > 0:
logger.warning(
"Storage file deletion completed with %d failures out of %d total files for dataset_id: %s",
storage_delete_failures,
len(storage_keys_to_delete),
dataset_id,
)
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned documents when documents deleted latency: {end_at - start_at:.2f}s, "
f"dataset_id: {dataset_id}, document_ids: {document_ids}, "
f"segments: {len(segment_ids)}, image_files: {len(total_image_upload_file_ids)}, "
f"storage_files: {len(storage_keys_to_delete)}",
fg="green",
)
)
except Exception:
logger.exception(
"Batch clean documents failed for dataset_id: %s, document_ids: %s",
dataset_id,
document_ids,
)

View File

@ -48,6 +48,11 @@ def batch_create_segment_to_index_task(
indexing_cache_key = f"segment_batch_import_{job_id}"
# Initialize variables with default values
upload_file_key: str | None = None
dataset_config: dict | None = None
document_config: dict | None = None
with session_factory.create_session() as session:
try:
dataset = session.get(Dataset, dataset_id)
@ -69,86 +74,115 @@ def batch_create_segment_to_index_task(
if not upload_file:
raise ValueError("UploadFile not found.")
with tempfile.TemporaryDirectory() as temp_dir:
suffix = Path(upload_file.key).suffix
file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore
storage.download(upload_file.key, file_path)
dataset_config = {
"id": dataset.id,
"indexing_technique": dataset.indexing_technique,
"tenant_id": dataset.tenant_id,
"embedding_model_provider": dataset.embedding_model_provider,
"embedding_model": dataset.embedding_model,
}
df = pd.read_csv(file_path)
content = []
for _, row in df.iterrows():
if dataset_document.doc_form == "qa_model":
data = {"content": row.iloc[0], "answer": row.iloc[1]}
else:
data = {"content": row.iloc[0]}
content.append(data)
if len(content) == 0:
raise ValueError("The CSV file is empty.")
document_config = {
"id": dataset_document.id,
"doc_form": dataset_document.doc_form,
"word_count": dataset_document.word_count or 0,
}
document_segments = []
embedding_model = None
if dataset.indexing_technique == "high_quality":
model_manager = ModelManager()
embedding_model = model_manager.get_model_instance(
tenant_id=dataset.tenant_id,
provider=dataset.embedding_model_provider,
model_type=ModelType.TEXT_EMBEDDING,
model=dataset.embedding_model,
)
upload_file_key = upload_file.key
word_count_change = 0
if embedding_model:
tokens_list = embedding_model.get_text_embedding_num_tokens(
texts=[segment["content"] for segment in content]
)
except Exception:
logger.exception("Segments batch created index failed")
redis_client.setex(indexing_cache_key, 600, "error")
return
# Ensure required variables are set before proceeding
if upload_file_key is None or dataset_config is None or document_config is None:
logger.error("Required configuration not set due to session error")
redis_client.setex(indexing_cache_key, 600, "error")
return
with tempfile.TemporaryDirectory() as temp_dir:
suffix = Path(upload_file_key).suffix
file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore
storage.download(upload_file_key, file_path)
df = pd.read_csv(file_path)
content = []
for _, row in df.iterrows():
if document_config["doc_form"] == "qa_model":
data = {"content": row.iloc[0], "answer": row.iloc[1]}
else:
tokens_list = [0] * len(content)
data = {"content": row.iloc[0]}
content.append(data)
if len(content) == 0:
raise ValueError("The CSV file is empty.")
for segment, tokens in zip(content, tokens_list):
content = segment["content"]
doc_id = str(uuid.uuid4())
segment_hash = helper.generate_text_hash(content)
max_position = (
session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == dataset_document.id)
.scalar()
)
segment_document = DocumentSegment(
tenant_id=tenant_id,
dataset_id=dataset_id,
document_id=document_id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=max_position + 1 if max_position else 1,
content=content,
word_count=len(content),
tokens=tokens,
created_by=user_id,
indexing_at=naive_utc_now(),
status="completed",
completed_at=naive_utc_now(),
)
if dataset_document.doc_form == "qa_model":
segment_document.answer = segment["answer"]
segment_document.word_count += len(segment["answer"])
word_count_change += segment_document.word_count
session.add(segment_document)
document_segments.append(segment_document)
document_segments = []
embedding_model = None
if dataset_config["indexing_technique"] == "high_quality":
model_manager = ModelManager()
embedding_model = model_manager.get_model_instance(
tenant_id=dataset_config["tenant_id"],
provider=dataset_config["embedding_model_provider"],
model_type=ModelType.TEXT_EMBEDDING,
model=dataset_config["embedding_model"],
)
word_count_change = 0
if embedding_model:
tokens_list = embedding_model.get_text_embedding_num_tokens(texts=[segment["content"] for segment in content])
else:
tokens_list = [0] * len(content)
with session_factory.create_session() as session, session.begin():
for segment, tokens in zip(content, tokens_list):
content = segment["content"]
doc_id = str(uuid.uuid4())
segment_hash = helper.generate_text_hash(content)
max_position = (
session.query(func.max(DocumentSegment.position))
.where(DocumentSegment.document_id == document_config["id"])
.scalar()
)
segment_document = DocumentSegment(
tenant_id=tenant_id,
dataset_id=dataset_id,
document_id=document_id,
index_node_id=doc_id,
index_node_hash=segment_hash,
position=max_position + 1 if max_position else 1,
content=content,
word_count=len(content),
tokens=tokens,
created_by=user_id,
indexing_at=naive_utc_now(),
status="completed",
completed_at=naive_utc_now(),
)
if document_config["doc_form"] == "qa_model":
segment_document.answer = segment["answer"]
segment_document.word_count += len(segment["answer"])
word_count_change += segment_document.word_count
session.add(segment_document)
document_segments.append(segment_document)
with session_factory.create_session() as session, session.begin():
dataset_document = session.get(Document, document_id)
if dataset_document:
assert dataset_document.word_count is not None
dataset_document.word_count += word_count_change
session.add(dataset_document)
VectorService.create_segments_vector(None, document_segments, dataset, dataset_document.doc_form)
session.commit()
redis_client.setex(indexing_cache_key, 600, "completed")
end_at = time.perf_counter()
logger.info(
click.style(
f"Segment batch created job: {job_id} latency: {end_at - start_at}",
fg="green",
)
)
except Exception:
logger.exception("Segments batch created index failed")
redis_client.setex(indexing_cache_key, 600, "error")
with session_factory.create_session() as session:
dataset = session.get(Dataset, dataset_id)
if dataset:
VectorService.create_segments_vector(None, document_segments, dataset, document_config["doc_form"])
redis_client.setex(indexing_cache_key, 600, "completed")
end_at = time.perf_counter()
logger.info(
click.style(
f"Segment batch created job: {job_id} latency: {end_at - start_at}",
fg="green",
)
)

View File

@ -28,6 +28,7 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
"""
logger.info(click.style(f"Start clean document when document deleted: {document_id}", fg="green"))
start_at = time.perf_counter()
total_attachment_files = []
with session_factory.create_session() as session:
try:
@ -47,78 +48,91 @@ def clean_document_task(document_id: str, dataset_id: str, doc_form: str, file_i
SegmentAttachmentBinding.document_id == document_id,
)
).all()
# check segment is exist
if segments:
index_node_ids = [segment.index_node_id for segment in segments]
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
attachment_ids = [attachment_file.id for _, attachment_file in attachments_with_bindings]
binding_ids = [binding.id for binding, _ in attachments_with_bindings]
total_attachment_files.extend([attachment_file.key for _, attachment_file in attachments_with_bindings])
index_node_ids = [segment.index_node_id for segment in segments]
segment_contents = [segment.content for segment in segments]
except Exception:
logger.exception("Cleaned document when document deleted failed")
return
# check segment is exist
if index_node_ids:
index_processor = IndexProcessorFactory(doc_form).init_index_processor()
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if dataset:
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
for segment in segments:
image_upload_file_ids = get_image_upload_file_ids(segment.content)
image_files = session.scalars(
select(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
).all()
for image_file in image_files:
if image_file is None:
continue
try:
storage.delete(image_file.key)
except Exception:
logger.exception(
"Delete image_files failed when storage deleted, \
image_upload_file_is: %s",
image_file.id,
)
total_image_files = []
with session_factory.create_session() as session, session.begin():
for segment_content in segment_contents:
image_upload_file_ids = get_image_upload_file_ids(segment_content)
image_files = session.scalars(select(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))).all()
total_image_files.extend([image_file.key for image_file in image_files])
image_file_delete_stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
session.execute(image_file_delete_stmt)
image_file_delete_stmt = delete(UploadFile).where(UploadFile.id.in_(image_upload_file_ids))
session.execute(image_file_delete_stmt)
session.delete(segment)
with session_factory.create_session() as session, session.begin():
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id == document_id)
session.execute(segment_delete_stmt)
session.commit()
if file_id:
file = session.query(UploadFile).where(UploadFile.id == file_id).first()
if file:
try:
storage.delete(file.key)
except Exception:
logger.exception("Delete file failed when document deleted, file_id: %s", file_id)
session.delete(file)
# delete segment attachments
if attachments_with_bindings:
attachment_ids = [attachment_file.id for _, attachment_file in attachments_with_bindings]
binding_ids = [binding.id for binding, _ in attachments_with_bindings]
for binding, attachment_file in attachments_with_bindings:
try:
storage.delete(attachment_file.key)
except Exception:
logger.exception(
"Delete attachment_file failed when storage deleted, \
attachment_file_id: %s",
binding.attachment_id,
)
attachment_file_delete_stmt = delete(UploadFile).where(UploadFile.id.in_(attachment_ids))
session.execute(attachment_file_delete_stmt)
binding_delete_stmt = delete(SegmentAttachmentBinding).where(
SegmentAttachmentBinding.id.in_(binding_ids)
)
session.execute(binding_delete_stmt)
# delete dataset metadata binding
session.query(DatasetMetadataBinding).where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id == document_id,
).delete()
session.commit()
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned document when document deleted: {document_id} latency: {end_at - start_at}",
fg="green",
)
)
for image_file_key in total_image_files:
try:
storage.delete(image_file_key)
except Exception:
logger.exception("Cleaned document when document deleted failed")
logger.exception(
"Delete image_files failed when storage deleted, \
image_upload_file_is: %s",
image_file_key,
)
with session_factory.create_session() as session, session.begin():
if file_id:
file = session.query(UploadFile).where(UploadFile.id == file_id).first()
if file:
try:
storage.delete(file.key)
except Exception:
logger.exception("Delete file failed when document deleted, file_id: %s", file_id)
session.delete(file)
with session_factory.create_session() as session, session.begin():
# delete segment attachments
if attachment_ids:
attachment_file_delete_stmt = delete(UploadFile).where(UploadFile.id.in_(attachment_ids))
session.execute(attachment_file_delete_stmt)
if binding_ids:
binding_delete_stmt = delete(SegmentAttachmentBinding).where(SegmentAttachmentBinding.id.in_(binding_ids))
session.execute(binding_delete_stmt)
for attachment_file_key in total_attachment_files:
try:
storage.delete(attachment_file_key)
except Exception:
logger.exception(
"Delete attachment_file failed when storage deleted, \
attachment_file_id: %s",
attachment_file_key,
)
with session_factory.create_session() as session, session.begin():
# delete dataset metadata binding
session.query(DatasetMetadataBinding).where(
DatasetMetadataBinding.dataset_id == dataset_id,
DatasetMetadataBinding.document_id == document_id,
).delete()
end_at = time.perf_counter()
logger.info(
click.style(
f"Cleaned document when document deleted: {document_id} latency: {end_at - start_at}",
fg="green",
)
)

View File

@ -23,40 +23,40 @@ def clean_notion_document_task(document_ids: list[str], dataset_id: str):
"""
logger.info(click.style(f"Start clean document when import form notion document deleted: {dataset_id}", fg="green"))
start_at = time.perf_counter()
total_index_node_ids = []
with session_factory.create_session() as session:
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Document has no dataset")
index_type = dataset.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
if not dataset:
raise Exception("Document has no dataset")
index_type = dataset.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
document_delete_stmt = delete(Document).where(Document.id.in_(document_ids))
session.execute(document_delete_stmt)
document_delete_stmt = delete(Document).where(Document.id.in_(document_ids))
session.execute(document_delete_stmt)
for document_id in document_ids:
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
).all()
index_node_ids = [segment.index_node_id for segment in segments]
for document_id in document_ids:
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
total_index_node_ids.extend([segment.index_node_id for segment in segments])
index_processor.clean(
dataset, index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
session.commit()
end_at = time.perf_counter()
logger.info(
click.style(
"Clean document when import form notion document deleted end :: {} latency: {}".format(
dataset_id, end_at - start_at
),
fg="green",
)
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if dataset:
index_processor.clean(
dataset, total_index_node_ids, with_keywords=True, delete_child_chunks=True, delete_summaries=True
)
except Exception:
logger.exception("Cleaned document when import form notion document deleted failed")
with session_factory.create_session() as session, session.begin():
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id.in_(document_ids))
session.execute(segment_delete_stmt)
end_at = time.perf_counter()
logger.info(
click.style(
"Clean document when import form notion document deleted end :: {} latency: {}".format(
dataset_id, end_at - start_at
),
fg="green",
)
)

View File

@ -3,6 +3,7 @@ import time
import click
from celery import shared_task
from sqlalchemy import delete
from core.db.session_factory import session_factory
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
@ -67,8 +68,14 @@ def delete_segment_from_index_task(
if segment_attachment_bindings:
attachment_ids = [binding.attachment_id for binding in segment_attachment_bindings]
index_processor.clean(dataset=dataset, node_ids=attachment_ids, with_keywords=False)
for binding in segment_attachment_bindings:
session.delete(binding)
segment_attachment_bind_ids = [i.id for i in segment_attachment_bindings]
for i in range(0, len(segment_attachment_bind_ids), 1000):
segment_attachment_bind_delete_stmt = delete(SegmentAttachmentBinding).where(
SegmentAttachmentBinding.id.in_(segment_attachment_bind_ids[i : i + 1000])
)
session.execute(segment_attachment_bind_delete_stmt)
# delete upload file
session.query(UploadFile).where(UploadFile.id.in_(attachment_ids)).delete(synchronize_session=False)
session.commit()

View File

@ -27,104 +27,129 @@ def document_indexing_sync_task(dataset_id: str, document_id: str):
"""
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
start_at = time.perf_counter()
tenant_id = None
with session_factory.create_session() as session:
with session_factory.create_session() as session, session.begin():
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
if not document:
logger.info(click.style(f"Document not found: {document_id}", fg="red"))
return
if document.indexing_status == "parsing":
logger.info(click.style(f"Document {document_id} is already being processed, skipping", fg="yellow"))
return
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Dataset not found")
data_source_info = document.data_source_info_dict
if document.data_source_type == "notion_import":
if (
not data_source_info
or "notion_page_id" not in data_source_info
or "notion_workspace_id" not in data_source_info
):
raise ValueError("no notion page found")
workspace_id = data_source_info["notion_workspace_id"]
page_id = data_source_info["notion_page_id"]
page_type = data_source_info["type"]
page_edited_time = data_source_info["last_edited_time"]
credential_id = data_source_info.get("credential_id")
if document.data_source_type != "notion_import":
logger.info(click.style(f"Document {document_id} is not a notion_import, skipping", fg="yellow"))
return
# Get credentials from datasource provider
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=document.tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
)
if (
not data_source_info
or "notion_page_id" not in data_source_info
or "notion_workspace_id" not in data_source_info
):
raise ValueError("no notion page found")
if not credential:
logger.error(
"Datasource credential not found for document %s, tenant_id: %s, credential_id: %s",
document_id,
document.tenant_id,
credential_id,
)
workspace_id = data_source_info["notion_workspace_id"]
page_id = data_source_info["notion_page_id"]
page_type = data_source_info["type"]
page_edited_time = data_source_info["last_edited_time"]
credential_id = data_source_info.get("credential_id")
tenant_id = document.tenant_id
index_type = document.doc_form
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
index_node_ids = [segment.index_node_id for segment in segments]
# Get credentials from datasource provider
datasource_provider_service = DatasourceProviderService()
credential = datasource_provider_service.get_datasource_credentials(
tenant_id=tenant_id,
credential_id=credential_id,
provider="notion_datasource",
plugin_id="langgenius/notion_datasource",
)
if not credential:
logger.error(
"Datasource credential not found for document %s, tenant_id: %s, credential_id: %s",
document_id,
tenant_id,
credential_id,
)
with session_factory.create_session() as session, session.begin():
document = session.query(Document).filter_by(id=document_id).first()
if document:
document.indexing_status = "error"
document.error = "Datasource credential not found. Please reconnect your Notion workspace."
document.stopped_at = naive_utc_now()
session.commit()
return
return
loader = NotionExtractor(
notion_workspace_id=workspace_id,
notion_obj_id=page_id,
notion_page_type=page_type,
notion_access_token=credential.get("integration_secret"),
tenant_id=document.tenant_id,
)
loader = NotionExtractor(
notion_workspace_id=workspace_id,
notion_obj_id=page_id,
notion_page_type=page_type,
notion_access_token=credential.get("integration_secret"),
tenant_id=tenant_id,
)
last_edited_time = loader.get_notion_last_edited_time()
last_edited_time = loader.get_notion_last_edited_time()
if last_edited_time == page_edited_time:
logger.info(click.style(f"Document {document_id} content unchanged, skipping sync", fg="yellow"))
return
# check the page is updated
if last_edited_time != page_edited_time:
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
session.commit()
logger.info(click.style(f"Document {document_id} content changed, starting sync", fg="green"))
# delete all document segment and index
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Dataset not found")
index_type = document.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
try:
index_processor = IndexProcessorFactory(index_type).init_index_processor()
with session_factory.create_session() as session:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if dataset:
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
logger.info(click.style(f"Cleaned vector index for document {document_id}", fg="green"))
except Exception:
logger.exception("Failed to clean vector index for document %s", document_id)
segments = session.scalars(
select(DocumentSegment).where(DocumentSegment.document_id == document_id)
).all()
index_node_ids = [segment.index_node_id for segment in segments]
with session_factory.create_session() as session, session.begin():
document = session.query(Document).filter_by(id=document_id).first()
if not document:
logger.warning(click.style(f"Document {document_id} not found during sync", fg="yellow"))
return
# delete from vector index
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
data_source_info = document.data_source_info_dict
data_source_info["last_edited_time"] = last_edited_time
document.data_source_info = data_source_info
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
end_at = time.perf_counter()
logger.info(
click.style(
"Cleaned document when document update data source or process rule: {} latency: {}".format(
document_id, end_at - start_at
),
fg="green",
)
)
except Exception:
logger.exception("Cleaned document when document update data source or process rule failed")
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id == document_id)
session.execute(segment_delete_stmt)
try:
indexing_runner = IndexingRunner()
indexing_runner.run([document])
end_at = time.perf_counter()
logger.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green"))
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
except Exception:
logger.exception("document_indexing_sync_task failed, document_id: %s", document_id)
logger.info(click.style(f"Deleted segments for document {document_id}", fg="green"))
try:
indexing_runner = IndexingRunner()
with session_factory.create_session() as session:
document = session.query(Document).filter_by(id=document_id).first()
if document:
indexing_runner.run([document])
end_at = time.perf_counter()
logger.info(click.style(f"Sync completed for document {document_id} latency: {end_at - start_at}", fg="green"))
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
except Exception as e:
logger.exception("document_indexing_sync_task failed for document_id: %s", document_id)
with session_factory.create_session() as session, session.begin():
document = session.query(Document).filter_by(id=document_id).first()
if document:
document.indexing_status = "error"
document.error = str(e)
document.stopped_at = naive_utc_now()

View File

@ -81,26 +81,35 @@ def _document_indexing(dataset_id: str, document_ids: Sequence[str]):
session.commit()
return
for document_id in document_ids:
logger.info(click.style(f"Start process document: {document_id}", fg="green"))
document = (
session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
)
# Phase 1: Update status to parsing (short transaction)
with session_factory.create_session() as session, session.begin():
documents = (
session.query(Document).where(Document.id.in_(document_ids), Document.dataset_id == dataset_id).all()
)
for document in documents:
if document:
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
documents.append(document)
session.add(document)
session.commit()
# Transaction committed and closed
try:
indexing_runner = IndexingRunner()
indexing_runner.run(documents)
end_at = time.perf_counter()
logger.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
# Phase 2: Execute indexing (no transaction - IndexingRunner creates its own sessions)
has_error = False
try:
indexing_runner = IndexingRunner()
indexing_runner.run(documents)
end_at = time.perf_counter()
logger.info(click.style(f"Processed dataset: {dataset_id} latency: {end_at - start_at}", fg="green"))
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
has_error = True
except Exception:
logger.exception("Document indexing task failed, dataset_id: %s", dataset_id)
has_error = True
if not has_error:
with session_factory.create_session() as session:
# Trigger summary index generation for completed documents if enabled
# Only generate for high_quality indexing technique and when summary_index_setting is enabled
# Re-query dataset to get latest summary_index_setting (in case it was updated)
@ -115,17 +124,18 @@ def _document_indexing(dataset_id: str, document_ids: Sequence[str]):
# expire all session to get latest document's indexing status
session.expire_all()
# Check each document's indexing status and trigger summary generation if completed
for document_id in document_ids:
# Re-query document to get latest status (IndexingRunner may have updated it)
document = (
session.query(Document)
.where(Document.id == document_id, Document.dataset_id == dataset_id)
.first()
)
documents = (
session.query(Document)
.where(Document.id.in_(document_ids), Document.dataset_id == dataset_id)
.all()
)
for document in documents:
if document:
logger.info(
"Checking document %s for summary generation: status=%s, doc_form=%s, need_summary=%s",
document_id,
document.id,
document.indexing_status,
document.doc_form,
document.need_summary,
@ -136,46 +146,36 @@ def _document_indexing(dataset_id: str, document_ids: Sequence[str]):
and document.need_summary is True
):
try:
generate_summary_index_task.delay(dataset.id, document_id, None)
generate_summary_index_task.delay(dataset.id, document.id, None)
logger.info(
"Queued summary index generation task for document %s in dataset %s "
"after indexing completed",
document_id,
document.id,
dataset.id,
)
except Exception:
logger.exception(
"Failed to queue summary index generation task for document %s",
document_id,
document.id,
)
# Don't fail the entire indexing process if summary task queuing fails
else:
logger.info(
"Skipping summary generation for document %s: "
"status=%s, doc_form=%s, need_summary=%s",
document_id,
document.id,
document.indexing_status,
document.doc_form,
document.need_summary,
)
else:
logger.warning("Document %s not found after indexing", document_id)
else:
logger.info(
"Summary index generation skipped for dataset %s: summary_index_setting.enable=%s",
dataset.id,
summary_index_setting.get("enable") if summary_index_setting else None,
)
logger.warning("Document %s not found after indexing", document.id)
else:
logger.info(
"Summary index generation skipped for dataset %s: indexing_technique=%s (not 'high_quality')",
dataset.id,
dataset.indexing_technique,
)
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
except Exception:
logger.exception("Document indexing task failed, dataset_id: %s", dataset_id)
def _document_indexing_with_tenant_queue(

View File

@ -8,7 +8,6 @@ from sqlalchemy import delete, select
from core.db.session_factory import session_factory
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from models.dataset import Dataset, Document, DocumentSegment
@ -27,7 +26,7 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
logger.info(click.style(f"Start update document: {document_id}", fg="green"))
start_at = time.perf_counter()
with session_factory.create_session() as session:
with session_factory.create_session() as session, session.begin():
document = session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
if not document:
@ -36,27 +35,20 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
document.indexing_status = "parsing"
document.processing_started_at = naive_utc_now()
session.commit()
# delete all document segment and index
try:
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
raise Exception("Dataset not found")
dataset = session.query(Dataset).where(Dataset.id == dataset_id).first()
if not dataset:
return
index_type = document.doc_form
index_processor = IndexProcessorFactory(index_type).init_index_processor()
index_type = document.doc_form
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
index_node_ids = [segment.index_node_id for segment in segments]
segments = session.scalars(select(DocumentSegment).where(DocumentSegment.document_id == document_id)).all()
if segments:
index_node_ids = [segment.index_node_id for segment in segments]
# delete from vector index
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
segment_ids = [segment.id for segment in segments]
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.id.in_(segment_ids))
session.execute(segment_delete_stmt)
db.session.commit()
clean_success = False
try:
index_processor = IndexProcessorFactory(index_type).init_index_processor()
if index_node_ids:
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
end_at = time.perf_counter()
logger.info(
click.style(
@ -66,15 +58,21 @@ def document_indexing_update_task(dataset_id: str, document_id: str):
fg="green",
)
)
except Exception:
logger.exception("Cleaned document when document update data source or process rule failed")
clean_success = True
except Exception:
logger.exception("Failed to clean document index during update, document_id: %s", document_id)
try:
indexing_runner = IndexingRunner()
indexing_runner.run([document])
end_at = time.perf_counter()
logger.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green"))
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
except Exception:
logger.exception("document_indexing_update_task failed, document_id: %s", document_id)
if clean_success:
with session_factory.create_session() as session, session.begin():
segment_delete_stmt = delete(DocumentSegment).where(DocumentSegment.document_id == document_id)
session.execute(segment_delete_stmt)
try:
indexing_runner = IndexingRunner()
indexing_runner.run([document])
end_at = time.perf_counter()
logger.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green"))
except DocumentIsPausedError as ex:
logger.info(click.style(str(ex), fg="yellow"))
except Exception:
logger.exception("document_indexing_update_task failed, document_id: %s", document_id)

View File

@ -6,9 +6,8 @@ improving performance by offloading storage operations to background workers.
"""
from celery import shared_task # type: ignore[import-untyped]
from sqlalchemy.orm import Session
from extensions.ext_database import db
from core.db.session_factory import session_factory
from services.workflow_draft_variable_service import DraftVarFileDeletion, WorkflowDraftVariableService
@ -17,6 +16,6 @@ def save_workflow_execution_task(
self,
deletions: list[DraftVarFileDeletion],
):
with Session(bind=db.engine) as session, session.begin():
with session_factory.create_session() as session, session.begin():
srv = WorkflowDraftVariableService(session=session)
srv.delete_workflow_draft_variable_file(deletions=deletions)

View File

@ -0,0 +1,38 @@
"""
Integration tests for DbMigrationAutoRenewLock using real Redis via TestContainers.
"""
import time
import uuid
import pytest
from extensions.ext_redis import redis_client
from libs.db_migration_lock import DbMigrationAutoRenewLock
@pytest.mark.usefixtures("flask_app_with_containers")
def test_db_migration_lock_renews_ttl_and_releases():
lock_name = f"test:db_migration_auto_renew_lock:{uuid.uuid4().hex}"
# Keep base TTL very small, and renew frequently so the test is stable even on slower CI.
lock = DbMigrationAutoRenewLock(
redis_client=redis_client,
name=lock_name,
ttl_seconds=1.0,
renew_interval_seconds=0.2,
log_context="test_db_migration_lock",
)
acquired = lock.acquire(blocking=True, blocking_timeout=5)
assert acquired is True
# Wait beyond the base TTL; key should still exist due to renewal.
time.sleep(1.5)
ttl = redis_client.ttl(lock_name)
assert ttl > 0
lock.release_safely(status="successful")
# After release, the key should not exist.
assert redis_client.exists(lock_name) == 0

View File

@ -605,26 +605,20 @@ class TestBatchCreateSegmentToIndexTask:
mock_storage.download.side_effect = mock_download
# Execute the task
# Execute the task - should raise ValueError for empty CSV
job_id = str(uuid.uuid4())
batch_create_segment_to_index_task(
job_id=job_id,
upload_file_id=upload_file.id,
dataset_id=dataset.id,
document_id=document.id,
tenant_id=tenant.id,
user_id=account.id,
)
with pytest.raises(ValueError, match="The CSV file is empty"):
batch_create_segment_to_index_task(
job_id=job_id,
upload_file_id=upload_file.id,
dataset_id=dataset.id,
document_id=document.id,
tenant_id=tenant.id,
user_id=account.id,
)
# Verify error handling
# Check Redis cache was set to error status
from extensions.ext_redis import redis_client
cache_key = f"segment_batch_import_{job_id}"
cache_value = redis_client.get(cache_key)
assert cache_value == b"error"
# Verify no segments were created
# Since exception was raised, no segments should be created
from extensions.ext_database import db
segments = db.session.query(DocumentSegment).all()

View File

@ -153,8 +153,7 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task
clean_notion_document_task(document_ids, dataset.id)
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id.in_(document_ids)).count() == 0
# Verify segments are deleted
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id.in_(document_ids))
@ -162,9 +161,9 @@ class TestCleanNotionDocumentTask:
== 0
)
# Verify index processor was called for each document
# Verify index processor was called
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
assert mock_processor.clean.call_count == len(document_ids)
mock_processor.clean.assert_called_once()
# This test successfully verifies:
# 1. Document records are properly deleted from the database
@ -186,12 +185,12 @@ class TestCleanNotionDocumentTask:
non_existent_dataset_id = str(uuid.uuid4())
document_ids = [str(uuid.uuid4()), str(uuid.uuid4())]
# Execute cleanup task with non-existent dataset
clean_notion_document_task(document_ids, non_existent_dataset_id)
# Execute cleanup task with non-existent dataset - expect exception
with pytest.raises(Exception, match="Document has no dataset"):
clean_notion_document_task(document_ids, non_existent_dataset_id)
# Verify that the index processor was not called
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
mock_processor.clean.assert_not_called()
# Verify that the index processor factory was not used
mock_index_processor_factory.return_value.init_index_processor.assert_not_called()
def test_clean_notion_document_task_empty_document_list(
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
@ -229,9 +228,13 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task with empty document list
clean_notion_document_task([], dataset.id)
# Verify that the index processor was not called
# Verify that the index processor was called once with empty node list
mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
mock_processor.clean.assert_not_called()
assert mock_processor.clean.call_count == 1
args, kwargs = mock_processor.clean.call_args
# args: (dataset, total_index_node_ids)
assert isinstance(args[0], Dataset)
assert args[1] == []
def test_clean_notion_document_task_with_different_index_types(
self, db_session_with_containers, mock_index_processor_factory, mock_external_service_dependencies
@ -315,8 +318,7 @@ class TestCleanNotionDocumentTask:
# Note: This test successfully verifies cleanup with different document types.
# The task properly handles various index types and document configurations.
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
# Verify segments are deleted
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id == document.id)
@ -404,8 +406,7 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task
clean_notion_document_task([document.id], dataset.id)
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
# Verify segments are deleted
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).count()
== 0
@ -508,8 +509,7 @@ class TestCleanNotionDocumentTask:
clean_notion_document_task(documents_to_clean, dataset.id)
# Verify only specified documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id.in_(documents_to_clean)).count() == 0
# Verify only specified documents' segments are deleted
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id.in_(documents_to_clean))
@ -697,11 +697,12 @@ class TestCleanNotionDocumentTask:
db_session_with_containers.commit()
# Mock index processor to raise an exception
mock_index_processor = mock_index_processor_factory.init_index_processor.return_value
mock_index_processor = mock_index_processor_factory.return_value.init_index_processor.return_value
mock_index_processor.clean.side_effect = Exception("Index processor error")
# Execute cleanup task - it should handle the exception gracefully
clean_notion_document_task([document.id], dataset.id)
# Execute cleanup task - current implementation propagates the exception
with pytest.raises(Exception, match="Index processor error"):
clean_notion_document_task([document.id], dataset.id)
# Note: This test demonstrates the task's error handling capability.
# Even with external service errors, the database operations complete successfully.
@ -803,8 +804,7 @@ class TestCleanNotionDocumentTask:
all_document_ids = [doc.id for doc in documents]
clean_notion_document_task(all_document_ids, dataset.id)
# Verify all documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.dataset_id == dataset.id).count() == 0
# Verify all segments are deleted
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset.id).count()
== 0
@ -914,8 +914,7 @@ class TestCleanNotionDocumentTask:
clean_notion_document_task([target_document.id], target_dataset.id)
# Verify only documents from target dataset are deleted
assert db_session_with_containers.query(Document).filter(Document.id == target_document.id).count() == 0
# Verify only documents' segments from target dataset are deleted
assert (
db_session_with_containers.query(DocumentSegment)
.filter(DocumentSegment.document_id == target_document.id)
@ -1030,8 +1029,7 @@ class TestCleanNotionDocumentTask:
all_document_ids = [doc.id for doc in documents]
clean_notion_document_task(all_document_ids, dataset.id)
# Verify all documents and segments are deleted regardless of status
assert db_session_with_containers.query(Document).filter(Document.dataset_id == dataset.id).count() == 0
# Verify all segments are deleted regardless of status
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.dataset_id == dataset.id).count()
== 0
@ -1142,8 +1140,7 @@ class TestCleanNotionDocumentTask:
# Execute cleanup task
clean_notion_document_task([document.id], dataset.id)
# Verify documents and segments are deleted
assert db_session_with_containers.query(Document).filter(Document.id == document.id).count() == 0
# Verify segments are deleted
assert (
db_session_with_containers.query(DocumentSegment).filter(DocumentSegment.document_id == document.id).count()
== 0

View File

@ -0,0 +1,182 @@
from unittest.mock import MagicMock, patch
import pytest
from faker import Faker
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.dataset import Dataset, Document, DocumentSegment
from tasks.document_indexing_update_task import document_indexing_update_task
class TestDocumentIndexingUpdateTask:
@pytest.fixture
def mock_external_dependencies(self):
"""Patch external collaborators used by the update task.
- IndexProcessorFactory.init_index_processor().clean(...)
- IndexingRunner.run([...])
"""
with (
patch("tasks.document_indexing_update_task.IndexProcessorFactory") as mock_factory,
patch("tasks.document_indexing_update_task.IndexingRunner") as mock_runner,
):
processor_instance = MagicMock()
mock_factory.return_value.init_index_processor.return_value = processor_instance
runner_instance = MagicMock()
mock_runner.return_value = runner_instance
yield {
"factory": mock_factory,
"processor": processor_instance,
"runner": mock_runner,
"runner_instance": runner_instance,
}
def _create_dataset_document_with_segments(self, db_session_with_containers, *, segment_count: int = 2):
fake = Faker()
# Account and tenant
account = Account(
email=fake.email(),
name=fake.name(),
interface_language="en-US",
status="active",
)
db_session_with_containers.add(account)
db_session_with_containers.commit()
tenant = Tenant(name=fake.company(), status="normal")
db_session_with_containers.add(tenant)
db_session_with_containers.commit()
join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=TenantAccountRole.OWNER,
current=True,
)
db_session_with_containers.add(join)
db_session_with_containers.commit()
# Dataset and document
dataset = Dataset(
tenant_id=tenant.id,
name=fake.company(),
description=fake.text(max_nb_chars=64),
data_source_type="upload_file",
indexing_technique="high_quality",
created_by=account.id,
)
db_session_with_containers.add(dataset)
db_session_with_containers.commit()
document = Document(
tenant_id=tenant.id,
dataset_id=dataset.id,
position=0,
data_source_type="upload_file",
batch="test_batch",
name=fake.file_name(),
created_from="upload_file",
created_by=account.id,
indexing_status="waiting",
enabled=True,
doc_form="text_model",
)
db_session_with_containers.add(document)
db_session_with_containers.commit()
# Segments
node_ids = []
for i in range(segment_count):
node_id = f"node-{i + 1}"
seg = DocumentSegment(
tenant_id=tenant.id,
dataset_id=dataset.id,
document_id=document.id,
position=i,
content=fake.text(max_nb_chars=32),
answer=None,
word_count=10,
tokens=5,
index_node_id=node_id,
status="completed",
created_by=account.id,
)
db_session_with_containers.add(seg)
node_ids.append(node_id)
db_session_with_containers.commit()
# Refresh to ensure ORM state
db_session_with_containers.refresh(dataset)
db_session_with_containers.refresh(document)
return dataset, document, node_ids
def test_cleans_segments_and_reindexes(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Act
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Assert document status updated before reindex
updated = db_session_with_containers.query(Document).where(Document.id == document.id).first()
assert updated.indexing_status == "parsing"
assert updated.processing_started_at is not None
# Segments should be deleted
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining == 0
# Assert index processor clean was called with expected args
clean_call = mock_external_dependencies["processor"].clean.call_args
assert clean_call is not None
args, kwargs = clean_call
# args[0] is a Dataset instance (from another session) — validate by id
assert getattr(args[0], "id", None) == dataset.id
# args[1] should contain our node_ids
assert set(args[1]) == set(node_ids)
assert kwargs.get("with_keywords") is True
assert kwargs.get("delete_child_chunks") is True
# Assert indexing runner invoked with the updated document
run_call = mock_external_dependencies["runner_instance"].run.call_args
assert run_call is not None
run_docs = run_call[0][0]
assert len(run_docs) == 1
first = run_docs[0]
assert getattr(first, "id", None) == document.id
def test_clean_error_is_logged_and_indexing_continues(self, db_session_with_containers, mock_external_dependencies):
dataset, document, node_ids = self._create_dataset_document_with_segments(db_session_with_containers)
# Force clean to raise; task should continue to indexing
mock_external_dependencies["processor"].clean.side_effect = Exception("boom")
document_indexing_update_task(dataset.id, document.id)
# Ensure we see committed changes from another session
db_session_with_containers.expire_all()
# Indexing should still be triggered
mock_external_dependencies["runner_instance"].run.assert_called_once()
# Segments should remain (since clean failed before DB delete)
remaining = (
db_session_with_containers.query(DocumentSegment).where(DocumentSegment.document_id == document.id).count()
)
assert remaining > 0
def test_document_not_found_noop(self, db_session_with_containers, mock_external_dependencies):
fake = Faker()
# Act with non-existent document id
document_indexing_update_task(dataset_id=fake.uuid4(), document_id=fake.uuid4())
# Neither processor nor runner should be called
mock_external_dependencies["processor"].clean.assert_not_called()
mock_external_dependencies["runner_instance"].run.assert_not_called()

View File

@ -0,0 +1,146 @@
import sys
import threading
import types
from unittest.mock import MagicMock
import commands
from libs.db_migration_lock import LockNotOwnedError, RedisError
HEARTBEAT_WAIT_TIMEOUT_SECONDS = 5.0
def _install_fake_flask_migrate(monkeypatch, upgrade_impl) -> None:
module = types.ModuleType("flask_migrate")
module.upgrade = upgrade_impl
monkeypatch.setitem(sys.modules, "flask_migrate", module)
def _invoke_upgrade_db() -> int:
try:
commands.upgrade_db.callback()
except SystemExit as e:
return int(e.code or 0)
return 0
def test_upgrade_db_skips_when_lock_not_acquired(monkeypatch, capsys):
monkeypatch.setattr(commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 1234)
lock = MagicMock()
lock.acquire.return_value = False
commands.redis_client.lock.return_value = lock
exit_code = _invoke_upgrade_db()
captured = capsys.readouterr()
assert exit_code == 0
assert "Database migration skipped" in captured.out
commands.redis_client.lock.assert_called_once_with(name="db_upgrade_lock", timeout=1234, thread_local=False)
lock.acquire.assert_called_once_with(blocking=False)
lock.release.assert_not_called()
def test_upgrade_db_failure_not_masked_by_lock_release(monkeypatch, capsys):
monkeypatch.setattr(commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 321)
lock = MagicMock()
lock.acquire.return_value = True
lock.release.side_effect = LockNotOwnedError("simulated")
commands.redis_client.lock.return_value = lock
def _upgrade():
raise RuntimeError("boom")
_install_fake_flask_migrate(monkeypatch, _upgrade)
exit_code = _invoke_upgrade_db()
captured = capsys.readouterr()
assert exit_code == 1
assert "Database migration failed: boom" in captured.out
commands.redis_client.lock.assert_called_once_with(name="db_upgrade_lock", timeout=321, thread_local=False)
lock.acquire.assert_called_once_with(blocking=False)
lock.release.assert_called_once()
def test_upgrade_db_success_ignores_lock_not_owned_on_release(monkeypatch, capsys):
monkeypatch.setattr(commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 999)
lock = MagicMock()
lock.acquire.return_value = True
lock.release.side_effect = LockNotOwnedError("simulated")
commands.redis_client.lock.return_value = lock
_install_fake_flask_migrate(monkeypatch, lambda: None)
exit_code = _invoke_upgrade_db()
captured = capsys.readouterr()
assert exit_code == 0
assert "Database migration successful!" in captured.out
commands.redis_client.lock.assert_called_once_with(name="db_upgrade_lock", timeout=999, thread_local=False)
lock.acquire.assert_called_once_with(blocking=False)
lock.release.assert_called_once()
def test_upgrade_db_renews_lock_during_migration(monkeypatch, capsys):
"""
Ensure the lock is renewed while migrations are running, so the base TTL can stay short.
"""
# Use a small TTL so the heartbeat interval triggers quickly.
monkeypatch.setattr(commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 0.3)
lock = MagicMock()
lock.acquire.return_value = True
commands.redis_client.lock.return_value = lock
renewed = threading.Event()
def _reacquire():
renewed.set()
return True
lock.reacquire.side_effect = _reacquire
def _upgrade():
assert renewed.wait(HEARTBEAT_WAIT_TIMEOUT_SECONDS)
_install_fake_flask_migrate(monkeypatch, _upgrade)
exit_code = _invoke_upgrade_db()
_ = capsys.readouterr()
assert exit_code == 0
assert lock.reacquire.call_count >= 1
def test_upgrade_db_ignores_reacquire_errors(monkeypatch, capsys):
# Use a small TTL so heartbeat runs during the upgrade call.
monkeypatch.setattr(commands, "DB_UPGRADE_LOCK_TTL_SECONDS", 0.3)
lock = MagicMock()
lock.acquire.return_value = True
commands.redis_client.lock.return_value = lock
attempted = threading.Event()
def _reacquire():
attempted.set()
raise RedisError("simulated")
lock.reacquire.side_effect = _reacquire
def _upgrade():
assert attempted.wait(HEARTBEAT_WAIT_TIMEOUT_SECONDS)
_install_fake_flask_migrate(monkeypatch, _upgrade)
exit_code = _invoke_upgrade_db()
_ = capsys.readouterr()
assert exit_code == 0
assert lock.reacquire.call_count >= 1

View File

@ -25,15 +25,19 @@ class TestMessageCycleManagerOptimization:
task_state = Mock()
return MessageCycleManager(application_generate_entity=mock_application_generate_entity, task_state=task_state)
def test_get_message_event_type_with_message_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE_FILE when message has files."""
def test_get_message_event_type_with_assistant_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE_FILE when message has assistant-generated files.
This ensures that AI-generated images (belongs_to='assistant') trigger the MESSAGE_FILE event,
allowing the frontend to properly display generated image files with url field.
"""
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
# Setup mock session and message file
mock_session = Mock()
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
mock_message_file = Mock()
# Current implementation uses session.scalar(select(...))
mock_message_file.belongs_to = "assistant"
mock_session.scalar.return_value = mock_message_file
# Execute
@ -44,6 +48,31 @@ class TestMessageCycleManagerOptimization:
assert result == StreamEvent.MESSAGE_FILE
mock_session.scalar.assert_called_once()
def test_get_message_event_type_with_user_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE when message only has user-uploaded files.
This is a regression test for the issue where user-uploaded images (belongs_to='user')
caused the LLM text response to be incorrectly tagged with MESSAGE_FILE event,
resulting in broken images in the chat UI. The query filters for belongs_to='assistant',
so when only user files exist, the database query returns None, resulting in MESSAGE event type.
"""
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
# Setup mock session and message file
mock_session = Mock()
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
# When querying for assistant files with only user files present, return None
# (simulates database query with belongs_to='assistant' filter returning no results)
mock_session.scalar.return_value = None
# Execute
with current_app.app_context():
result = message_cycle_manager.get_message_event_type("test-message-id")
# Assert
assert result == StreamEvent.MESSAGE
mock_session.scalar.assert_called_once()
def test_get_message_event_type_without_message_file(self, message_cycle_manager):
"""Test get_message_event_type returns MESSAGE when message has no files."""
with patch("core.app.task_pipeline.message_cycle_manager.session_factory") as mock_session_factory:
@ -69,7 +98,7 @@ class TestMessageCycleManagerOptimization:
mock_session_factory.create_session.return_value.__enter__.return_value = mock_session
mock_message_file = Mock()
# Current implementation uses session.scalar(select(...))
mock_message_file.belongs_to = "assistant"
mock_session.scalar.return_value = mock_message_file
# Execute: compute event type once, then pass to message_to_stream_response

View File

@ -4,7 +4,7 @@ from typing import Any
from uuid import uuid4
import pytest
from hypothesis import given, settings
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from core.file import File, FileTransferMethod, FileType
@ -493,7 +493,7 @@ def _scalar_value() -> st.SearchStrategy[int | float | str | File | None]:
)
@settings(max_examples=50)
@settings(max_examples=30, suppress_health_check=[HealthCheck.too_slow, HealthCheck.filter_too_much], deadline=None)
@given(_scalar_value())
def test_build_segment_and_extract_values_for_scalar_types(value):
seg = variable_factory.build_segment(value)
@ -504,7 +504,7 @@ def test_build_segment_and_extract_values_for_scalar_types(value):
assert seg.value == value
@settings(max_examples=50)
@settings(max_examples=30, suppress_health_check=[HealthCheck.too_slow, HealthCheck.filter_too_much], deadline=None)
@given(values=st.lists(_scalar_value(), max_size=20))
def test_build_segment_and_extract_values_for_array_types(values):
seg = variable_factory.build_segment(values)

View File

@ -83,23 +83,127 @@ def mock_documents(document_ids, dataset_id):
def mock_db_session():
"""Mock database session via session_factory.create_session()."""
with patch("tasks.document_indexing_task.session_factory") as mock_sf:
session = MagicMock()
# Ensure tests that expect session.close() to be called can observe it via the context manager
session.close = MagicMock()
cm = MagicMock()
cm.__enter__.return_value = session
# Link __exit__ to session.close so "close" expectations reflect context manager teardown
sessions = [] # Track all created sessions
# Shared mock data that all sessions will access
shared_mock_data = {"dataset": None, "documents": None, "doc_iter": None}
def _exit_side_effect(*args, **kwargs):
session.close()
def create_session_side_effect():
session = MagicMock()
session.close = MagicMock()
cm.__exit__.side_effect = _exit_side_effect
mock_sf.create_session.return_value = cm
# Track commit calls
commit_mock = MagicMock()
session.commit = commit_mock
cm = MagicMock()
cm.__enter__.return_value = session
query = MagicMock()
session.query.return_value = query
query.where.return_value = query
yield session
def _exit_side_effect(*args, **kwargs):
session.close()
cm.__exit__.side_effect = _exit_side_effect
# Support session.begin() for transactions
begin_cm = MagicMock()
begin_cm.__enter__.return_value = session
def begin_exit_side_effect(*args, **kwargs):
# Auto-commit on transaction exit (like SQLAlchemy)
session.commit()
# Also mark wrapper's commit as called
if sessions:
sessions[0].commit()
begin_cm.__exit__ = MagicMock(side_effect=begin_exit_side_effect)
session.begin = MagicMock(return_value=begin_cm)
sessions.append(session)
# Setup query with side_effect to handle both Dataset and Document queries
def query_side_effect(*args):
query = MagicMock()
if args and args[0] == Dataset and shared_mock_data["dataset"] is not None:
where_result = MagicMock()
where_result.first.return_value = shared_mock_data["dataset"]
query.where = MagicMock(return_value=where_result)
elif args and args[0] == Document and shared_mock_data["documents"] is not None:
# Support both .first() and .all() calls with chaining
where_result = MagicMock()
where_result.where = MagicMock(return_value=where_result)
# Create an iterator for .first() calls if not exists
if shared_mock_data["doc_iter"] is None:
docs = shared_mock_data["documents"] or [None]
shared_mock_data["doc_iter"] = iter(docs)
where_result.first = lambda: next(shared_mock_data["doc_iter"], None)
docs_or_empty = shared_mock_data["documents"] or []
where_result.all = MagicMock(return_value=docs_or_empty)
query.where = MagicMock(return_value=where_result)
else:
query.where = MagicMock(return_value=query)
return query
session.query = MagicMock(side_effect=query_side_effect)
return cm
mock_sf.create_session.side_effect = create_session_side_effect
# Create a wrapper that behaves like the first session but has access to all sessions
class SessionWrapper:
def __init__(self):
self._sessions = sessions
self._shared_data = shared_mock_data
# Create a default session for setup phase
self._default_session = MagicMock()
self._default_session.close = MagicMock()
self._default_session.commit = MagicMock()
# Support session.begin() for default session too
begin_cm = MagicMock()
begin_cm.__enter__.return_value = self._default_session
def default_begin_exit_side_effect(*args, **kwargs):
self._default_session.commit()
begin_cm.__exit__ = MagicMock(side_effect=default_begin_exit_side_effect)
self._default_session.begin = MagicMock(return_value=begin_cm)
def default_query_side_effect(*args):
query = MagicMock()
if args and args[0] == Dataset and shared_mock_data["dataset"] is not None:
where_result = MagicMock()
where_result.first.return_value = shared_mock_data["dataset"]
query.where = MagicMock(return_value=where_result)
elif args and args[0] == Document and shared_mock_data["documents"] is not None:
where_result = MagicMock()
where_result.where = MagicMock(return_value=where_result)
if shared_mock_data["doc_iter"] is None:
docs = shared_mock_data["documents"] or [None]
shared_mock_data["doc_iter"] = iter(docs)
where_result.first = lambda: next(shared_mock_data["doc_iter"], None)
docs_or_empty = shared_mock_data["documents"] or []
where_result.all = MagicMock(return_value=docs_or_empty)
query.where = MagicMock(return_value=where_result)
else:
query.where = MagicMock(return_value=query)
return query
self._default_session.query = MagicMock(side_effect=default_query_side_effect)
def __getattr__(self, name):
# Forward all attribute access to the first session, or default if none created yet
target_session = self._sessions[0] if self._sessions else self._default_session
return getattr(target_session, name)
@property
def all_sessions(self):
"""Access all created sessions for testing."""
return self._sessions
wrapper = SessionWrapper()
yield wrapper
@pytest.fixture
@ -252,18 +356,9 @@ class TestTaskEnqueuing:
use the deprecated function.
"""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
# Return documents one by one for each call
mock_query.where.return_value.first.side_effect = mock_documents
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -304,21 +399,9 @@ class TestBatchProcessing:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
# Create an iterator for documents
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
# Return documents one by one for each call
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -357,19 +440,9 @@ class TestBatchProcessing:
doc.stopped_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
mock_feature_service.get_features.return_value.billing.enabled = True
mock_feature_service.get_features.return_value.billing.subscription.plan = CloudPlan.PROFESSIONAL
@ -407,19 +480,9 @@ class TestBatchProcessing:
doc.stopped_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
mock_feature_service.get_features.return_value.billing.enabled = True
mock_feature_service.get_features.return_value.billing.subscription.plan = CloudPlan.SANDBOX
@ -444,7 +507,10 @@ class TestBatchProcessing:
"""
# Arrange
document_ids = []
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
# Set shared mock data with empty documents list
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = []
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -482,19 +548,9 @@ class TestProgressTracking:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -528,19 +584,9 @@ class TestProgressTracking:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -635,19 +681,9 @@ class TestErrorHandling:
doc.stopped_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Set up to trigger vector space limit error
mock_feature_service.get_features.return_value.billing.enabled = True
@ -674,17 +710,9 @@ class TestErrorHandling:
Errors during indexing should be caught and logged, but not crash the task.
"""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first.side_effect = mock_documents
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Make IndexingRunner raise an exception
mock_indexing_runner.run.side_effect = Exception("Indexing failed")
@ -708,17 +736,9 @@ class TestErrorHandling:
but not treated as a failure.
"""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first.side_effect = mock_documents
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Make IndexingRunner raise DocumentIsPausedError
mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document is paused")
@ -853,17 +873,9 @@ class TestTaskCancellation:
Session cleanup should happen in finally block.
"""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first.side_effect = mock_documents
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -883,17 +895,9 @@ class TestTaskCancellation:
Session cleanup should happen even when errors occur.
"""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first.side_effect = mock_documents
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Make IndexingRunner raise an exception
mock_indexing_runner.run.side_effect = Exception("Test error")
@ -962,6 +966,7 @@ class TestAdvancedScenarios:
document_ids = [str(uuid.uuid4()) for _ in range(3)]
# Create only 2 documents (simulate one missing)
# The new code uses .all() which will only return existing documents
mock_documents = []
for i, doc_id in enumerate([document_ids[0], document_ids[2]]): # Skip middle one
doc = MagicMock(spec=Document)
@ -971,21 +976,9 @@ class TestAdvancedScenarios:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
# Create iterator that returns None for missing document
doc_responses = [mock_documents[0], None, mock_documents[1]]
doc_iter = iter(doc_responses)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data - .all() will only return existing documents
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -1075,19 +1068,9 @@ class TestAdvancedScenarios:
doc.stopped_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Set vector space exactly at limit
mock_feature_service.get_features.return_value.billing.enabled = True
@ -1219,19 +1202,9 @@ class TestAdvancedScenarios:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Billing disabled - limits should not be checked
mock_feature_service.get_features.return_value.billing.enabled = False
@ -1273,19 +1246,9 @@ class TestIntegration:
# Set up rpop to return None for concurrency check (no more tasks)
mock_redis.rpop.side_effect = [None]
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -1321,19 +1284,9 @@ class TestIntegration:
# Set up rpop to return None for concurrency check (no more tasks)
mock_redis.rpop.side_effect = [None]
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -1415,17 +1368,9 @@ class TestEdgeCases:
mock_document.indexing_status = "waiting"
mock_document.processing_started_at = None
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: mock_document
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = [mock_document]
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -1465,17 +1410,9 @@ class TestEdgeCases:
mock_document.indexing_status = "waiting"
mock_document.processing_started_at = None
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: mock_document
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = [mock_document]
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -1555,19 +1492,9 @@ class TestEdgeCases:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Set vector space limit to 0 (unlimited)
mock_feature_service.get_features.return_value.billing.enabled = True
@ -1612,19 +1539,9 @@ class TestEdgeCases:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Set negative vector space limit
mock_feature_service.get_features.return_value.billing.enabled = True
@ -1675,19 +1592,9 @@ class TestPerformanceScenarios:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Configure billing with sufficient limits
mock_feature_service.get_features.return_value.billing.enabled = True
@ -1826,19 +1733,9 @@ class TestRobustness:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
# Make IndexingRunner raise an exception
mock_indexing_runner.run.side_effect = RuntimeError("Unexpected indexing error")
@ -1866,7 +1763,7 @@ class TestRobustness:
- No exceptions occur
Expected behavior:
- Database session is closed
- All database sessions are closed
- No connection leaks
"""
# Arrange
@ -1879,19 +1776,9 @@ class TestRobustness:
doc.processing_started_at = None
mock_documents.append(doc)
mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset
doc_iter = iter(mock_documents)
def mock_query_side_effect(*args):
mock_query = MagicMock()
if args[0] == Dataset:
mock_query.where.return_value.first.return_value = mock_dataset
elif args[0] == Document:
mock_query.where.return_value.first = lambda: next(doc_iter, None)
return mock_query
mock_db_session.query.side_effect = mock_query_side_effect
# Set shared mock data so all sessions can access it
mock_db_session._shared_data["dataset"] = mock_dataset
mock_db_session._shared_data["documents"] = mock_documents
with patch("tasks.document_indexing_task.FeatureService.get_features") as mock_features:
mock_features.return_value.billing.enabled = False
@ -1899,10 +1786,11 @@ class TestRobustness:
# Act
_document_indexing(dataset_id, document_ids)
# Assert
assert mock_db_session.close.called
# Verify close is called exactly once
assert mock_db_session.close.call_count == 1
# Assert - All created sessions should be closed
# The code creates multiple sessions: validation, Phase 1 (parsing), Phase 3 (summary)
assert len(mock_db_session.all_sessions) >= 1
for session in mock_db_session.all_sessions:
assert session.close.called, "All sessions should be closed"
def test_task_proxy_handles_feature_service_failure(self, tenant_id, dataset_id, document_ids, mock_redis):
"""

View File

@ -109,25 +109,87 @@ def mock_document_segments(document_id):
@pytest.fixture
def mock_db_session():
"""Mock database session via session_factory.create_session()."""
"""Mock database session via session_factory.create_session().
After session split refactor, the code calls create_session() multiple times.
This fixture creates shared query mocks so all sessions use the same
query configuration, simulating database persistence across sessions.
The fixture automatically converts side_effect to cycle to prevent StopIteration.
Tests configure mocks the same way as before, but behind the scenes the values
are cycled infinitely for all sessions.
"""
from itertools import cycle
with patch("tasks.document_indexing_sync_task.session_factory") as mock_sf:
session = MagicMock()
# Ensure tests can observe session.close() via context manager teardown
session.close = MagicMock()
cm = MagicMock()
cm.__enter__.return_value = session
sessions = []
def _exit_side_effect(*args, **kwargs):
session.close()
# Shared query mocks - all sessions use these
shared_query = MagicMock()
shared_filter_by = MagicMock()
shared_scalars_result = MagicMock()
cm.__exit__.side_effect = _exit_side_effect
mock_sf.create_session.return_value = cm
# Create custom first mock that auto-cycles side_effect
class CyclicMock(MagicMock):
def __setattr__(self, name, value):
if name == "side_effect" and value is not None:
# Convert list/tuple to infinite cycle
if isinstance(value, (list, tuple)):
value = cycle(value)
super().__setattr__(name, value)
query = MagicMock()
session.query.return_value = query
query.where.return_value = query
session.scalars.return_value = MagicMock()
yield session
shared_query.where.return_value.first = CyclicMock()
shared_filter_by.first = CyclicMock()
def _create_session():
"""Create a new mock session for each create_session() call."""
session = MagicMock()
session.close = MagicMock()
session.commit = MagicMock()
# Mock session.begin() context manager
begin_cm = MagicMock()
begin_cm.__enter__.return_value = session
def _begin_exit_side_effect(exc_type, exc, tb):
# commit on success
if exc_type is None:
session.commit()
# return False to propagate exceptions
return False
begin_cm.__exit__.side_effect = _begin_exit_side_effect
session.begin.return_value = begin_cm
# Mock create_session() context manager
cm = MagicMock()
cm.__enter__.return_value = session
def _exit_side_effect(exc_type, exc, tb):
session.close()
return False
cm.__exit__.side_effect = _exit_side_effect
# All sessions use the same shared query mocks
session.query.return_value = shared_query
shared_query.where.return_value = shared_query
shared_query.filter_by.return_value = shared_filter_by
session.scalars.return_value = shared_scalars_result
sessions.append(session)
# Attach helpers on the first created session for assertions across all sessions
if len(sessions) == 1:
session.get_all_sessions = lambda: sessions
session.any_close_called = lambda: any(s.close.called for s in sessions)
session.any_commit_called = lambda: any(s.commit.called for s in sessions)
return cm
mock_sf.create_session.side_effect = _create_session
# Create first session and return it
_create_session()
yield sessions[0]
@pytest.fixture
@ -186,8 +248,8 @@ class TestDocumentIndexingSyncTask:
# Act
document_indexing_sync_task(dataset_id, document_id)
# Assert
mock_db_session.close.assert_called_once()
# Assert - at least one session should have been closed
assert mock_db_session.any_close_called()
def test_missing_notion_workspace_id(self, mock_db_session, mock_document, dataset_id, document_id):
"""Test that task raises error when notion_workspace_id is missing."""
@ -230,6 +292,7 @@ class TestDocumentIndexingSyncTask:
"""Test that task handles missing credentials by updating document status."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_datasource_provider_service.get_datasource_credentials.return_value = None
# Act
@ -239,8 +302,8 @@ class TestDocumentIndexingSyncTask:
assert mock_document.indexing_status == "error"
assert "Datasource credential not found" in mock_document.error
assert mock_document.stopped_at is not None
mock_db_session.commit.assert_called()
mock_db_session.close.assert_called()
assert mock_db_session.any_commit_called()
assert mock_db_session.any_close_called()
def test_page_not_updated(
self,
@ -254,6 +317,7 @@ class TestDocumentIndexingSyncTask:
"""Test that task does nothing when page has not been updated."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.return_value = mock_document
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
# Return same time as stored in document
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-01T00:00:00Z"
@ -263,8 +327,8 @@ class TestDocumentIndexingSyncTask:
# Assert
# Document status should remain unchanged
assert mock_document.indexing_status == "completed"
# Session should still be closed via context manager teardown
assert mock_db_session.close.called
# At least one session should have been closed via context manager teardown
assert mock_db_session.any_close_called()
def test_successful_sync_when_page_updated(
self,
@ -281,7 +345,20 @@ class TestDocumentIndexingSyncTask:
):
"""Test successful sync flow when Notion page has been updated."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
# Set exact sequence of returns across calls to `.first()`:
# 1) document (initial fetch)
# 2) dataset (pre-check)
# 3) dataset (cleaning phase)
# 4) document (pre-indexing update)
# 5) document (indexing runner fetch)
mock_db_session.query.return_value.where.return_value.first.side_effect = [
mock_document,
mock_dataset,
mock_dataset,
mock_document,
mock_document,
]
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
# NotionExtractor returns updated time
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
@ -299,28 +376,40 @@ class TestDocumentIndexingSyncTask:
mock_processor.clean.assert_called_once()
# Verify segments were deleted from database in batch (DELETE FROM document_segments)
execute_sqls = [" ".join(str(c[0][0]).split()) for c in mock_db_session.execute.call_args_list]
# Aggregate execute calls across all created sessions
execute_sqls = []
for s in mock_db_session.get_all_sessions():
execute_sqls.extend([" ".join(str(c[0][0]).split()) for c in s.execute.call_args_list])
assert any("DELETE FROM document_segments" in sql for sql in execute_sqls)
# Verify indexing runner was called
mock_indexing_runner.run.assert_called_once_with([mock_document])
# Verify session operations
assert mock_db_session.commit.called
mock_db_session.close.assert_called_once()
# Verify session operations (across any created session)
assert mock_db_session.any_commit_called()
assert mock_db_session.any_close_called()
def test_dataset_not_found_during_cleaning(
self,
mock_db_session,
mock_datasource_provider_service,
mock_notion_extractor,
mock_indexing_runner,
mock_document,
dataset_id,
document_id,
):
"""Test that task handles dataset not found during cleaning phase."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, None]
# Sequence: document (initial), dataset (pre-check), None (cleaning), document (update), document (indexing)
mock_db_session.query.return_value.where.return_value.first.side_effect = [
mock_document,
mock_dataset,
None,
mock_document,
mock_document,
]
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
# Act
@ -329,8 +418,8 @@ class TestDocumentIndexingSyncTask:
# Assert
# Document should still be set to parsing
assert mock_document.indexing_status == "parsing"
# Session should be closed after error
mock_db_session.close.assert_called_once()
# At least one session should be closed after error
assert mock_db_session.any_close_called()
def test_cleaning_error_continues_to_indexing(
self,
@ -346,8 +435,14 @@ class TestDocumentIndexingSyncTask:
):
"""Test that indexing continues even if cleaning fails."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
mock_db_session.scalars.return_value.all.side_effect = Exception("Cleaning error")
from itertools import cycle
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
# Make the cleaning step fail but not the segment fetch
processor = mock_index_processor_factory.return_value.init_index_processor.return_value
processor.clean.side_effect = Exception("Cleaning error")
mock_db_session.scalars.return_value.all.return_value = []
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
# Act
@ -356,7 +451,7 @@ class TestDocumentIndexingSyncTask:
# Assert
# Indexing should still be attempted despite cleaning error
mock_indexing_runner.run.assert_called_once_with([mock_document])
mock_db_session.close.assert_called_once()
assert mock_db_session.any_close_called()
def test_indexing_runner_document_paused_error(
self,
@ -373,7 +468,10 @@ class TestDocumentIndexingSyncTask:
):
"""Test that DocumentIsPausedError is handled gracefully."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
from itertools import cycle
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document paused")
@ -383,7 +481,7 @@ class TestDocumentIndexingSyncTask:
# Assert
# Session should be closed after handling error
mock_db_session.close.assert_called_once()
assert mock_db_session.any_close_called()
def test_indexing_runner_general_error(
self,
@ -400,7 +498,10 @@ class TestDocumentIndexingSyncTask:
):
"""Test that general exceptions during indexing are handled."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
from itertools import cycle
mock_db_session.query.return_value.where.return_value.first.side_effect = cycle([mock_document, mock_dataset])
mock_db_session.query.return_value.filter_by.return_value.first.return_value = mock_document
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"
mock_indexing_runner.run.side_effect = Exception("Indexing error")
@ -410,7 +511,7 @@ class TestDocumentIndexingSyncTask:
# Assert
# Session should be closed after error
mock_db_session.close.assert_called_once()
assert mock_db_session.any_close_called()
def test_notion_extractor_initialized_with_correct_params(
self,
@ -517,7 +618,14 @@ class TestDocumentIndexingSyncTask:
):
"""Test that index processor clean is called with correct parameters."""
# Arrange
mock_db_session.query.return_value.where.return_value.first.side_effect = [mock_document, mock_dataset]
# Sequence: document (initial), dataset (pre-check), dataset (cleaning), document (update), document (indexing)
mock_db_session.query.return_value.where.return_value.first.side_effect = [
mock_document,
mock_dataset,
mock_dataset,
mock_document,
mock_document,
]
mock_db_session.scalars.return_value.all.return_value = mock_document_segments
mock_notion_extractor.get_notion_last_edited_time.return_value = "2024-01-02T00:00:00Z"

331
api/uv.lock generated
View File

@ -1593,14 +1593,14 @@ requires-dist = [
{ name = "flask-sqlalchemy", specifier = "~=3.1.1" },
{ name = "gevent", specifier = "~=25.9.1" },
{ name = "gmpy2", specifier = "~=2.2.1" },
{ name = "google-api-core", specifier = "==2.18.0" },
{ name = "google-api-core", specifier = ">=2.19.1" },
{ name = "google-api-python-client", specifier = "==2.90.0" },
{ name = "google-auth", specifier = "==2.29.0" },
{ name = "google-auth", specifier = ">=2.47.0" },
{ name = "google-auth-httplib2", specifier = "==0.2.0" },
{ name = "google-cloud-aiplatform", specifier = "==1.49.0" },
{ name = "googleapis-common-protos", specifier = "==1.63.0" },
{ name = "google-cloud-aiplatform", specifier = ">=1.123.0" },
{ name = "googleapis-common-protos", specifier = ">=1.65.0" },
{ name = "gunicorn", specifier = "~=23.0.0" },
{ name = "httpx", extras = ["socks"], specifier = "~=0.27.0" },
{ name = "httpx", extras = ["socks"], specifier = "~=0.28.0" },
{ name = "httpx-sse", specifier = "~=0.4.0" },
{ name = "jieba", specifier = "==0.42.1" },
{ name = "json-repair", specifier = ">=0.55.1" },
@ -1612,23 +1612,23 @@ requires-dist = [
{ name = "mlflow-skinny", specifier = ">=3.0.0" },
{ name = "numpy", specifier = "~=1.26.4" },
{ name = "openpyxl", specifier = "~=3.1.5" },
{ name = "opentelemetry-api", specifier = "==1.27.0" },
{ name = "opentelemetry-distro", specifier = "==0.48b0" },
{ name = "opentelemetry-exporter-otlp", specifier = "==1.27.0" },
{ name = "opentelemetry-exporter-otlp-proto-common", specifier = "==1.27.0" },
{ name = "opentelemetry-exporter-otlp-proto-grpc", specifier = "==1.27.0" },
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = "==1.27.0" },
{ name = "opentelemetry-instrumentation", specifier = "==0.48b0" },
{ name = "opentelemetry-instrumentation-celery", specifier = "==0.48b0" },
{ name = "opentelemetry-instrumentation-flask", specifier = "==0.48b0" },
{ name = "opentelemetry-instrumentation-httpx", specifier = "==0.48b0" },
{ name = "opentelemetry-instrumentation-redis", specifier = "==0.48b0" },
{ name = "opentelemetry-instrumentation-sqlalchemy", specifier = "==0.48b0" },
{ name = "opentelemetry-propagator-b3", specifier = "==1.27.0" },
{ name = "opentelemetry-proto", specifier = "==1.27.0" },
{ name = "opentelemetry-sdk", specifier = "==1.27.0" },
{ name = "opentelemetry-semantic-conventions", specifier = "==0.48b0" },
{ name = "opentelemetry-util-http", specifier = "==0.48b0" },
{ name = "opentelemetry-api", specifier = "==1.28.0" },
{ name = "opentelemetry-distro", specifier = "==0.49b0" },
{ name = "opentelemetry-exporter-otlp", specifier = "==1.28.0" },
{ name = "opentelemetry-exporter-otlp-proto-common", specifier = "==1.28.0" },
{ name = "opentelemetry-exporter-otlp-proto-grpc", specifier = "==1.28.0" },
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = "==1.28.0" },
{ name = "opentelemetry-instrumentation", specifier = "==0.49b0" },
{ name = "opentelemetry-instrumentation-celery", specifier = "==0.49b0" },
{ name = "opentelemetry-instrumentation-flask", specifier = "==0.49b0" },
{ name = "opentelemetry-instrumentation-httpx", specifier = "==0.49b0" },
{ name = "opentelemetry-instrumentation-redis", specifier = "==0.49b0" },
{ name = "opentelemetry-instrumentation-sqlalchemy", specifier = "==0.49b0" },
{ name = "opentelemetry-propagator-b3", specifier = "==1.28.0" },
{ name = "opentelemetry-proto", specifier = "==1.28.0" },
{ name = "opentelemetry-sdk", specifier = "==1.28.0" },
{ name = "opentelemetry-semantic-conventions", specifier = "==0.49b0" },
{ name = "opentelemetry-util-http", specifier = "==0.49b0" },
{ name = "opik", specifier = "~=1.8.72" },
{ name = "packaging", specifier = "~=23.2" },
{ name = "pandas", extras = ["excel", "output-formatting", "performance"], specifier = "~=2.2.2" },
@ -1653,7 +1653,7 @@ requires-dist = [
{ name = "starlette", specifier = "==0.49.1" },
{ name = "tiktoken", specifier = "~=0.9.0" },
{ name = "transformers", specifier = "~=4.56.1" },
{ name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.16.1" },
{ name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.18.18" },
{ name = "weave", specifier = ">=0.52.16" },
{ name = "weaviate-client", specifier = "==4.17.0" },
{ name = "webvtt-py", specifier = "~=0.5.1" },
@ -2284,7 +2284,7 @@ wheels = [
[[package]]
name = "google-api-core"
version = "2.18.0"
version = "2.30.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "google-auth" },
@ -2293,9 +2293,9 @@ dependencies = [
{ name = "protobuf" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b2/8f/ecd68579bd2bf5e9321df60dcdee6e575adf77fedacb1d8378760b2b16b6/google-api-core-2.18.0.tar.gz", hash = "sha256:62d97417bfc674d6cef251e5c4d639a9655e00c45528c4364fbfebb478ce72a9", size = 148047, upload-time = "2024-03-21T20:16:56.269Z" }
sdist = { url = "https://files.pythonhosted.org/packages/22/98/586ec94553b569080caef635f98a3723db36a38eac0e3d7eb3ea9d2e4b9a/google_api_core-2.30.0.tar.gz", hash = "sha256:02edfa9fab31e17fc0befb5f161b3bf93c9096d99aed584625f38065c511ad9b", size = 176959, upload-time = "2026-02-18T20:28:11.926Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/86/75/59a3ad90d9b4ff5b3e0537611dbe885aeb96124521c9d35aa079f1e0f2c9/google_api_core-2.18.0-py3-none-any.whl", hash = "sha256:5a63aa102e0049abe85b5b88cb9409234c1f70afcda21ce1e40b285b9629c1d6", size = 138293, upload-time = "2024-03-21T20:16:53.645Z" },
{ url = "https://files.pythonhosted.org/packages/45/27/09c33d67f7e0dcf06d7ac17d196594e66989299374bfb0d4331d1038e76b/google_api_core-2.30.0-py3-none-any.whl", hash = "sha256:80be49ee937ff9aba0fd79a6eddfde35fe658b9953ab9b79c57dd7061afa8df5", size = 173288, upload-time = "2026-02-18T20:28:10.367Z" },
]
[package.optional-dependencies]
@ -2322,16 +2322,21 @@ wheels = [
[[package]]
name = "google-auth"
version = "2.29.0"
version = "2.48.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cachetools" },
{ name = "cryptography" },
{ name = "pyasn1-modules" },
{ name = "rsa" },
]
sdist = { url = "https://files.pythonhosted.org/packages/18/b2/f14129111cfd61793609643a07ecb03651a71dd65c6974f63b0310ff4b45/google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360", size = 244326, upload-time = "2024-03-20T17:24:27.72Z" }
sdist = { url = "https://files.pythonhosted.org/packages/0c/41/242044323fbd746615884b1c16639749e73665b718209946ebad7ba8a813/google_auth-2.48.0.tar.gz", hash = "sha256:4f7e706b0cd3208a3d940a19a822c37a476ddba5450156c3e6624a71f7c841ce", size = 326522, upload-time = "2026-01-26T19:22:47.157Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9e/8d/ddbcf81ec751d8ee5fd18ac11ff38a0e110f39dfbf105e6d9db69d556dd0/google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415", size = 189186, upload-time = "2024-03-20T17:24:24.292Z" },
{ url = "https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl", hash = "sha256:2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f", size = 236499, upload-time = "2026-01-26T19:22:45.099Z" },
]
[package.optional-dependencies]
requests = [
{ name = "requests" },
]
[[package]]
@ -2349,7 +2354,7 @@ wheels = [
[[package]]
name = "google-cloud-aiplatform"
version = "1.49.0"
version = "1.139.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docstring-parser" },
@ -2358,15 +2363,16 @@ dependencies = [
{ name = "google-cloud-bigquery" },
{ name = "google-cloud-resource-manager" },
{ name = "google-cloud-storage" },
{ name = "google-genai" },
{ name = "packaging" },
{ name = "proto-plus" },
{ name = "protobuf" },
{ name = "pydantic" },
{ name = "shapely" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/47/21/5930a1420f82bec246ae09e1b7cc8458544f3befe669193b33a7b5c0691c/google-cloud-aiplatform-1.49.0.tar.gz", hash = "sha256:e6e6d01079bb5def49e4be4db4d12b13c624b5c661079c869c13c855e5807429", size = 5766450, upload-time = "2024-04-29T17:25:31.646Z" }
sdist = { url = "https://files.pythonhosted.org/packages/20/40/6767bd4d694354fd55842990da66f7b6ccfdce283d10f65d4a82d9a8e8df/google_cloud_aiplatform-1.139.0.tar.gz", hash = "sha256:cfaa95375bfb79a97b8c949c3ec1600505a4a9c08ca2b01c36ed659a5e05e37c", size = 9964138, upload-time = "2026-02-25T00:51:06.976Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/39/6a/7d9e1c03c814e760361fe8b0ffd373ead4124ace66ed33bb16d526ae1ecf/google_cloud_aiplatform-1.49.0-py2.py3-none-any.whl", hash = "sha256:8072d9e0c18d8942c704233d1a93b8d6312fc7b278786a283247950e28ae98df", size = 4914049, upload-time = "2024-04-29T17:25:27.625Z" },
{ url = "https://files.pythonhosted.org/packages/1d/20/a8a77dfdbf2a8169a3cce2d4e9cfbbfc168454ddd435891e59908ea8bf33/google_cloud_aiplatform-1.139.0-py2.py3-none-any.whl", hash = "sha256:3190b255cf510bce9e4b1adc8162ab0b3f9eca48801657d7af058d8e1d5ad9d0", size = 8209776, upload-time = "2026-02-25T00:51:03.526Z" },
]
[[package]]
@ -2454,6 +2460,27 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048, upload-time = "2025-03-26T14:41:46.696Z" },
]
[[package]]
name = "google-genai"
version = "1.65.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "distro" },
{ name = "google-auth", extra = ["requests"] },
{ name = "httpx" },
{ name = "pydantic" },
{ name = "requests" },
{ name = "sniffio" },
{ name = "tenacity" },
{ name = "typing-extensions" },
{ name = "websockets" },
]
sdist = { url = "https://files.pythonhosted.org/packages/79/f9/cc1191c2540d6a4e24609a586c4ed45d2db57cfef47931c139ee70e5874a/google_genai-1.65.0.tar.gz", hash = "sha256:d470eb600af802d58a79c7f13342d9ea0d05d965007cae8f76c7adff3d7a4750", size = 497206, upload-time = "2026-02-26T00:20:33.824Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/3c/3fea4e7c91357c71782d7dcaad7a2577d636c90317e003386893c25bc62c/google_genai-1.65.0-py3-none-any.whl", hash = "sha256:68c025205856919bc03edb0155c11b4b833810b7ce17ad4b7a9eeba5158f6c44", size = 724429, upload-time = "2026-02-26T00:20:32.186Z" },
]
[[package]]
name = "google-resumable-media"
version = "2.8.0"
@ -2468,14 +2495,14 @@ wheels = [
[[package]]
name = "googleapis-common-protos"
version = "1.63.0"
version = "1.72.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "protobuf" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d2/dc/291cebf3c73e108ef8210f19cb83d671691354f4f7dd956445560d778715/googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e", size = 121646, upload-time = "2024-03-11T12:33:15.765Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/dc/a6/12a0c976140511d8bc8a16ad15793b2aef29ac927baa0786ccb7ddbb6e1c/googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632", size = 229141, upload-time = "2024-03-11T12:33:14.052Z" },
{ url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" },
]
[package.optional-dependencies]
@ -2665,31 +2692,35 @@ wheels = [
[[package]]
name = "grpcio-tools"
version = "1.62.3"
version = "1.71.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "grpcio" },
{ name = "protobuf" },
{ name = "setuptools" },
]
sdist = { url = "https://files.pythonhosted.org/packages/54/fa/b69bd8040eafc09b88bb0ec0fea59e8aacd1a801e688af087cead213b0d0/grpcio-tools-1.62.3.tar.gz", hash = "sha256:7c7136015c3d62c3eef493efabaf9e3380e3e66d24ee8e94c01cb71377f57833", size = 4538520, upload-time = "2024-08-06T00:37:11.035Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ad/9a/edfefb47f11ef6b0f39eea4d8f022c5bb05ac1d14fcc7058e84a51305b73/grpcio_tools-1.71.2.tar.gz", hash = "sha256:b5304d65c7569b21270b568e404a5a843cf027c66552a6a0978b23f137679c09", size = 5330655, upload-time = "2025-06-28T04:22:00.308Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/23/52/2dfe0a46b63f5ebcd976570aa5fc62f793d5a8b169e211c6a5aede72b7ae/grpcio_tools-1.62.3-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:703f46e0012af83a36082b5f30341113474ed0d91e36640da713355cd0ea5d23", size = 5147623, upload-time = "2024-08-06T00:30:54.894Z" },
{ url = "https://files.pythonhosted.org/packages/f0/2e/29fdc6c034e058482e054b4a3c2432f84ff2e2765c1342d4f0aa8a5c5b9a/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:7cc83023acd8bc72cf74c2edbe85b52098501d5b74d8377bfa06f3e929803492", size = 2719538, upload-time = "2024-08-06T00:30:57.928Z" },
{ url = "https://files.pythonhosted.org/packages/f9/60/abe5deba32d9ec2c76cdf1a2f34e404c50787074a2fee6169568986273f1/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ff7d58a45b75df67d25f8f144936a3e44aabd91afec833ee06826bd02b7fbe7", size = 3070964, upload-time = "2024-08-06T00:31:00.267Z" },
{ url = "https://files.pythonhosted.org/packages/bc/ad/e2b066684c75f8d9a48508cde080a3a36618064b9cadac16d019ca511444/grpcio_tools-1.62.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f2483ea232bd72d98a6dc6d7aefd97e5bc80b15cd909b9e356d6f3e326b6e43", size = 2805003, upload-time = "2024-08-06T00:31:02.565Z" },
{ url = "https://files.pythonhosted.org/packages/9c/3f/59bf7af786eae3f9d24ee05ce75318b87f541d0950190ecb5ffb776a1a58/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:962c84b4da0f3b14b3cdb10bc3837ebc5f136b67d919aea8d7bb3fd3df39528a", size = 3685154, upload-time = "2024-08-06T00:31:05.339Z" },
{ url = "https://files.pythonhosted.org/packages/f1/79/4dd62478b91e27084c67b35a2316ce8a967bd8b6cb8d6ed6c86c3a0df7cb/grpcio_tools-1.62.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8ad0473af5544f89fc5a1ece8676dd03bdf160fb3230f967e05d0f4bf89620e3", size = 3297942, upload-time = "2024-08-06T00:31:08.456Z" },
{ url = "https://files.pythonhosted.org/packages/b8/cb/86449ecc58bea056b52c0b891f26977afc8c4464d88c738f9648da941a75/grpcio_tools-1.62.3-cp311-cp311-win32.whl", hash = "sha256:db3bc9fa39afc5e4e2767da4459df82b095ef0cab2f257707be06c44a1c2c3e5", size = 910231, upload-time = "2024-08-06T00:31:11.464Z" },
{ url = "https://files.pythonhosted.org/packages/45/a4/9736215e3945c30ab6843280b0c6e1bff502910156ea2414cd77fbf1738c/grpcio_tools-1.62.3-cp311-cp311-win_amd64.whl", hash = "sha256:e0898d412a434e768a0c7e365acabe13ff1558b767e400936e26b5b6ed1ee51f", size = 1052496, upload-time = "2024-08-06T00:31:13.665Z" },
{ url = "https://files.pythonhosted.org/packages/2a/a5/d6887eba415ce318ae5005e8dfac3fa74892400b54b6d37b79e8b4f14f5e/grpcio_tools-1.62.3-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:d102b9b21c4e1e40af9a2ab3c6d41afba6bd29c0aa50ca013bf85c99cdc44ac5", size = 5147690, upload-time = "2024-08-06T00:31:16.436Z" },
{ url = "https://files.pythonhosted.org/packages/8a/7c/3cde447a045e83ceb4b570af8afe67ffc86896a2fe7f59594dc8e5d0a645/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:0a52cc9444df978438b8d2332c0ca99000521895229934a59f94f37ed896b133", size = 2720538, upload-time = "2024-08-06T00:31:18.905Z" },
{ url = "https://files.pythonhosted.org/packages/88/07/f83f2750d44ac4f06c07c37395b9c1383ef5c994745f73c6bfaf767f0944/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141d028bf5762d4a97f981c501da873589df3f7e02f4c1260e1921e565b376fa", size = 3071571, upload-time = "2024-08-06T00:31:21.684Z" },
{ url = "https://files.pythonhosted.org/packages/37/74/40175897deb61e54aca716bc2e8919155b48f33aafec8043dda9592d8768/grpcio_tools-1.62.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47a5c093ab256dec5714a7a345f8cc89315cb57c298b276fa244f37a0ba507f0", size = 2806207, upload-time = "2024-08-06T00:31:24.208Z" },
{ url = "https://files.pythonhosted.org/packages/ec/ee/d8de915105a217cbcb9084d684abdc032030dcd887277f2ef167372287fe/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6831fdec2b853c9daa3358535c55eed3694325889aa714070528cf8f92d7d6d", size = 3685815, upload-time = "2024-08-06T00:31:26.917Z" },
{ url = "https://files.pythonhosted.org/packages/fd/d9/4360a6c12be3d7521b0b8c39e5d3801d622fbb81cc2721dbd3eee31e28c8/grpcio_tools-1.62.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e02d7c1a02e3814c94ba0cfe43d93e872c758bd8fd5c2797f894d0c49b4a1dfc", size = 3298378, upload-time = "2024-08-06T00:31:30.401Z" },
{ url = "https://files.pythonhosted.org/packages/29/3b/7cdf4a9e5a3e0a35a528b48b111355cd14da601413a4f887aa99b6da468f/grpcio_tools-1.62.3-cp312-cp312-win32.whl", hash = "sha256:b881fd9505a84457e9f7e99362eeedd86497b659030cf57c6f0070df6d9c2b9b", size = 910416, upload-time = "2024-08-06T00:31:33.118Z" },
{ url = "https://files.pythonhosted.org/packages/6c/66/dd3ec249e44c1cc15e902e783747819ed41ead1336fcba72bf841f72c6e9/grpcio_tools-1.62.3-cp312-cp312-win_amd64.whl", hash = "sha256:11c625eebefd1fd40a228fc8bae385e448c7e32a6ae134e43cf13bbc23f902b7", size = 1052856, upload-time = "2024-08-06T00:31:36.519Z" },
{ url = "https://files.pythonhosted.org/packages/17/e4/0568d38b8da6237ea8ea15abb960fb7ab83eb7bb51e0ea5926dab3d865b1/grpcio_tools-1.71.2-cp311-cp311-linux_armv7l.whl", hash = "sha256:0acb8151ea866be5b35233877fbee6445c36644c0aa77e230c9d1b46bf34b18b", size = 2385557, upload-time = "2025-06-28T04:20:54.323Z" },
{ url = "https://files.pythonhosted.org/packages/76/fb/700d46f72b0f636cf0e625f3c18a4f74543ff127471377e49a071f64f1e7/grpcio_tools-1.71.2-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:b28f8606f4123edb4e6da281547465d6e449e89f0c943c376d1732dc65e6d8b3", size = 5447590, upload-time = "2025-06-28T04:20:55.836Z" },
{ url = "https://files.pythonhosted.org/packages/12/69/d9bb2aec3de305162b23c5c884b9f79b1a195d42b1e6dabcc084cc9d0804/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:cbae6f849ad2d1f5e26cd55448b9828e678cb947fa32c8729d01998238266a6a", size = 2348495, upload-time = "2025-06-28T04:20:57.33Z" },
{ url = "https://files.pythonhosted.org/packages/d5/83/f840aba1690461b65330efbca96170893ee02fae66651bcc75f28b33a46c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4d1027615cfb1e9b1f31f2f384251c847d68c2f3e025697e5f5c72e26ed1316", size = 2742333, upload-time = "2025-06-28T04:20:59.051Z" },
{ url = "https://files.pythonhosted.org/packages/30/34/c02cd9b37de26045190ba665ee6ab8597d47f033d098968f812d253bbf8c/grpcio_tools-1.71.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bac95662dc69338edb9eb727cc3dd92342131b84b12b3e8ec6abe973d4cbf1b", size = 2473490, upload-time = "2025-06-28T04:21:00.614Z" },
{ url = "https://files.pythonhosted.org/packages/4d/c7/375718ae091c8f5776828ce97bdcb014ca26244296f8b7f70af1a803ed2f/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c50250c7248055040f89eb29ecad39d3a260a4b6d3696af1575945f7a8d5dcdc", size = 2850333, upload-time = "2025-06-28T04:21:01.95Z" },
{ url = "https://files.pythonhosted.org/packages/19/37/efc69345bd92a73b2bc80f4f9e53d42dfdc234b2491ae58c87da20ca0ea5/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6ab1ad955e69027ef12ace4d700c5fc36341bdc2f420e87881e9d6d02af3d7b8", size = 3300748, upload-time = "2025-06-28T04:21:03.451Z" },
{ url = "https://files.pythonhosted.org/packages/d2/1f/15f787eb25ae42086f55ed3e4260e85f385921c788debf0f7583b34446e3/grpcio_tools-1.71.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dd75dde575781262b6b96cc6d0b2ac6002b2f50882bf5e06713f1bf364ee6e09", size = 2913178, upload-time = "2025-06-28T04:21:04.879Z" },
{ url = "https://files.pythonhosted.org/packages/12/aa/69cb3a9dff7d143a05e4021c3c9b5cde07aacb8eb1c892b7c5b9fb4973e3/grpcio_tools-1.71.2-cp311-cp311-win32.whl", hash = "sha256:9a3cb244d2bfe0d187f858c5408d17cb0e76ca60ec9a274c8fd94cc81457c7fc", size = 946256, upload-time = "2025-06-28T04:21:06.518Z" },
{ url = "https://files.pythonhosted.org/packages/1e/df/fb951c5c87eadb507a832243942e56e67d50d7667b0e5324616ffd51b845/grpcio_tools-1.71.2-cp311-cp311-win_amd64.whl", hash = "sha256:00eb909997fd359a39b789342b476cbe291f4dd9c01ae9887a474f35972a257e", size = 1117661, upload-time = "2025-06-28T04:21:08.18Z" },
{ url = "https://files.pythonhosted.org/packages/9c/d3/3ed30a9c5b2424627b4b8411e2cd6a1a3f997d3812dbc6a8630a78bcfe26/grpcio_tools-1.71.2-cp312-cp312-linux_armv7l.whl", hash = "sha256:bfc0b5d289e383bc7d317f0e64c9dfb59dc4bef078ecd23afa1a816358fb1473", size = 2385479, upload-time = "2025-06-28T04:21:10.413Z" },
{ url = "https://files.pythonhosted.org/packages/54/61/e0b7295456c7e21ef777eae60403c06835160c8d0e1e58ebfc7d024c51d3/grpcio_tools-1.71.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b4669827716355fa913b1376b1b985855d5cfdb63443f8d18faf210180199006", size = 5431521, upload-time = "2025-06-28T04:21:12.261Z" },
{ url = "https://files.pythonhosted.org/packages/75/d7/7bcad6bcc5f5b7fab53e6bce5db87041f38ef3e740b1ec2d8c49534fa286/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:d4071f9b44564e3f75cdf0f05b10b3e8c7ea0ca5220acbf4dc50b148552eef2f", size = 2350289, upload-time = "2025-06-28T04:21:13.625Z" },
{ url = "https://files.pythonhosted.org/packages/b2/8a/e4c1c4cb8c9ff7f50b7b2bba94abe8d1e98ea05f52a5db476e7f1c1a3c70/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a28eda8137d587eb30081384c256f5e5de7feda34776f89848b846da64e4be35", size = 2743321, upload-time = "2025-06-28T04:21:15.007Z" },
{ url = "https://files.pythonhosted.org/packages/fd/aa/95bc77fda5c2d56fb4a318c1b22bdba8914d5d84602525c99047114de531/grpcio_tools-1.71.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b19c083198f5eb15cc69c0a2f2c415540cbc636bfe76cea268e5894f34023b40", size = 2474005, upload-time = "2025-06-28T04:21:16.443Z" },
{ url = "https://files.pythonhosted.org/packages/c9/ff/ca11f930fe1daa799ee0ce1ac9630d58a3a3deed3dd2f465edb9a32f299d/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:784c284acda0d925052be19053d35afbf78300f4d025836d424cf632404f676a", size = 2851559, upload-time = "2025-06-28T04:21:18.139Z" },
{ url = "https://files.pythonhosted.org/packages/64/10/c6fc97914c7e19c9bb061722e55052fa3f575165da9f6510e2038d6e8643/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:381e684d29a5d052194e095546eef067201f5af30fd99b07b5d94766f44bf1ae", size = 3300622, upload-time = "2025-06-28T04:21:20.291Z" },
{ url = "https://files.pythonhosted.org/packages/e5/d6/965f36cfc367c276799b730d5dd1311b90a54a33726e561393b808339b04/grpcio_tools-1.71.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3e4b4801fabd0427fc61d50d09588a01b1cfab0ec5e8a5f5d515fbdd0891fd11", size = 2913863, upload-time = "2025-06-28T04:21:22.196Z" },
{ url = "https://files.pythonhosted.org/packages/8d/f0/c05d5c3d0c1d79ac87df964e9d36f1e3a77b60d948af65bec35d3e5c75a3/grpcio_tools-1.71.2-cp312-cp312-win32.whl", hash = "sha256:84ad86332c44572305138eafa4cc30040c9a5e81826993eae8227863b700b490", size = 945744, upload-time = "2025-06-28T04:21:23.463Z" },
{ url = "https://files.pythonhosted.org/packages/e2/e9/c84c1078f0b7af7d8a40f5214a9bdd8d2a567ad6c09975e6e2613a08d29d/grpcio_tools-1.71.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e1108d37eecc73b1c4a27350a6ed921b5dda25091700c1da17cfe30761cd462", size = 1117695, upload-time = "2025-06-28T04:21:25.22Z" },
]
[[package]]
@ -2846,18 +2877,17 @@ wheels = [
[[package]]
name = "httpx"
version = "0.27.2"
version = "0.28.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "certifi" },
{ name = "httpcore" },
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189, upload-time = "2024-08-27T12:54:01.334Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395, upload-time = "2024-08-27T12:53:59.653Z" },
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
]
[package.optional-dependencies]
@ -3941,59 +3971,59 @@ wheels = [
[[package]]
name = "opentelemetry-api"
version = "1.27.0"
version = "1.28.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "deprecated" },
{ name = "importlib-metadata" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/83/93114b6de85a98963aec218a51509a52ed3f8de918fe91eb0f7299805c3f/opentelemetry_api-1.27.0.tar.gz", hash = "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342", size = 62693, upload-time = "2024-08-28T21:35:31.445Z" }
sdist = { url = "https://files.pythonhosted.org/packages/79/36/260eaea0f74fdd0c0d8f22ed3a3031109ea1c85531f94f4fde266c29e29a/opentelemetry_api-1.28.0.tar.gz", hash = "sha256:578610bcb8aa5cdcb11169d136cc752958548fb6ccffb0969c1036b0ee9e5353", size = 62803, upload-time = "2024-11-05T19:14:45.497Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fb/1f/737dcdbc9fea2fa96c1b392ae47275165a7c641663fbb08a8d252968eed2/opentelemetry_api-1.27.0-py3-none-any.whl", hash = "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", size = 63970, upload-time = "2024-08-28T21:35:00.598Z" },
{ url = "https://files.pythonhosted.org/packages/22/e4/3b25d8b856791c04d8a62b1257b5fc09dc41a057800db06885af8ddcdce1/opentelemetry_api-1.28.0-py3-none-any.whl", hash = "sha256:8457cd2c59ea1bd0988560f021656cecd254ad7ef6be4ba09dbefeca2409ce52", size = 64314, upload-time = "2024-11-05T19:14:21.659Z" },
]
[[package]]
name = "opentelemetry-distro"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-instrumentation" },
{ name = "opentelemetry-sdk" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f4/09/423e17c439ed24c45110affe84aad886a536b7871a42637d2ad14a179b47/opentelemetry_distro-0.48b0.tar.gz", hash = "sha256:5cb15915780ac4972583286a56683d43bd4ca95371d72f5f3f179c8b0b2ddc91", size = 2556, upload-time = "2024-08-28T21:27:40.455Z" }
sdist = { url = "https://files.pythonhosted.org/packages/4d/75/7cb7c33899e66bb366d40a889111a78c22df0951038b6699f1663e715a9f/opentelemetry_distro-0.49b0.tar.gz", hash = "sha256:1bafa274f9e83baa0d2a5d47ed02caffcf9bcca60107b389b145400d82b07513", size = 2560, upload-time = "2024-11-05T19:21:39.379Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/82/cf/fa9a5fe954f1942e03b319ae0e319ebc93d9f984b548bcd9b3f232a1434d/opentelemetry_distro-0.48b0-py3-none-any.whl", hash = "sha256:b2f8fce114325b020769af3b9bf503efb8af07efc190bd1b9deac7843171664a", size = 3321, upload-time = "2024-08-28T21:26:26.584Z" },
{ url = "https://files.pythonhosted.org/packages/4c/db/806172b6a4933966eee518db814b375e620602f7fe776b74ef795690f135/opentelemetry_distro-0.49b0-py3-none-any.whl", hash = "sha256:1af4074702f605ea210753dd41947dc2fd61b39724f23cdcf15d5654867cd3c2", size = 3318, upload-time = "2024-11-05T19:20:34.065Z" },
]
[[package]]
name = "opentelemetry-exporter-otlp"
version = "1.27.0"
version = "1.28.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-exporter-otlp-proto-grpc" },
{ name = "opentelemetry-exporter-otlp-proto-http" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fc/d3/8156cc14e8f4573a3572ee7f30badc7aabd02961a09acc72ab5f2c789ef1/opentelemetry_exporter_otlp-1.27.0.tar.gz", hash = "sha256:4a599459e623868cc95d933c301199c2367e530f089750e115599fccd67cb2a1", size = 6166, upload-time = "2024-08-28T21:35:33.746Z" }
sdist = { url = "https://files.pythonhosted.org/packages/eb/16/14e3fc163930ea68f0980a4cdd4ae5796e60aeb898965990e13263d64baf/opentelemetry_exporter_otlp-1.28.0.tar.gz", hash = "sha256:31ae7495831681dd3da34ac457f6970f147465ae4b9aae3a888d7a581c7cd868", size = 6170, upload-time = "2024-11-05T19:14:47.349Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/59/6d/95e1fc2c8d945a734db32e87a5aa7a804f847c1657a21351df9338bd1c9c/opentelemetry_exporter_otlp-1.27.0-py3-none-any.whl", hash = "sha256:7688791cbdd951d71eb6445951d1cfbb7b6b2d7ee5948fac805d404802931145", size = 7001, upload-time = "2024-08-28T21:35:04.02Z" },
{ url = "https://files.pythonhosted.org/packages/c2/82/3f521b3c1f2a411ed60a24a8c9f486c1beeaf8c6c55337c87d3ae1642151/opentelemetry_exporter_otlp-1.28.0-py3-none-any.whl", hash = "sha256:1fd02d70f2c1b7ac5579c81e78de4594b188d3317c8ceb69e8b53900fb7b40fd", size = 7024, upload-time = "2024-11-05T19:14:24.534Z" },
]
[[package]]
name = "opentelemetry-exporter-otlp-proto-common"
version = "1.27.0"
version = "1.28.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-proto" },
]
sdist = { url = "https://files.pythonhosted.org/packages/cd/2e/7eaf4ba595fb5213cf639c9158dfb64aacb2e4c7d74bfa664af89fa111f4/opentelemetry_exporter_otlp_proto_common-1.27.0.tar.gz", hash = "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8", size = 17860, upload-time = "2024-08-28T21:35:34.896Z" }
sdist = { url = "https://files.pythonhosted.org/packages/c2/8d/5d411084ac441052f4c9bae03a1aec65ae5d16b439fea7b9c5ac3842c013/opentelemetry_exporter_otlp_proto_common-1.28.0.tar.gz", hash = "sha256:5fa0419b0c8e291180b0fc8430a20dd44a3f3236f8e0827992145914f273ec4f", size = 18505, upload-time = "2024-11-05T19:14:48.204Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/41/27/4610ab3d9bb3cde4309b6505f98b3aabca04a26aa480aa18cede23149837/opentelemetry_exporter_otlp_proto_common-1.27.0-py3-none-any.whl", hash = "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a", size = 17848, upload-time = "2024-08-28T21:35:05.412Z" },
{ url = "https://files.pythonhosted.org/packages/e1/72/3c44aabc74db325aaba09361b6a0d80f6d601f0ff86ecea8ee655c9538fc/opentelemetry_exporter_otlp_proto_common-1.28.0-py3-none-any.whl", hash = "sha256:467e6437d24e020156dffecece8c0a4471a8a60f6a34afeda7386df31a092410", size = 18403, upload-time = "2024-11-05T19:14:25.798Z" },
]
[[package]]
name = "opentelemetry-exporter-otlp-proto-grpc"
version = "1.27.0"
version = "1.28.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "deprecated" },
@ -4004,14 +4034,14 @@ dependencies = [
{ name = "opentelemetry-proto" },
{ name = "opentelemetry-sdk" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a1/d0/c1e375b292df26e0ffebf194e82cd197e4c26cc298582bda626ce3ce74c5/opentelemetry_exporter_otlp_proto_grpc-1.27.0.tar.gz", hash = "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f", size = 26244, upload-time = "2024-08-28T21:35:36.314Z" }
sdist = { url = "https://files.pythonhosted.org/packages/43/4d/f215162e58041afb4bdf5dbd0d8faf0b7fc9bf7b3d3fc0e44e06f9e7e869/opentelemetry_exporter_otlp_proto_grpc-1.28.0.tar.gz", hash = "sha256:47a11c19dc7f4289e220108e113b7de90d59791cb4c37fc29f69a6a56f2c3735", size = 26237, upload-time = "2024-11-05T19:14:49.026Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8d/80/32217460c2c64c0568cea38410124ff680a9b65f6732867bbf857c4d8626/opentelemetry_exporter_otlp_proto_grpc-1.27.0-py3-none-any.whl", hash = "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e", size = 18541, upload-time = "2024-08-28T21:35:06.493Z" },
{ url = "https://files.pythonhosted.org/packages/1d/b5/afabc8106abc0f9cfeecf5b3e682622b3e04bba1d9b967dbfcd91b9c4ebe/opentelemetry_exporter_otlp_proto_grpc-1.28.0-py3-none-any.whl", hash = "sha256:edbdc53e7783f88d4535db5807cb91bd7b1ec9e9b9cdbfee14cd378f29a3b328", size = 18532, upload-time = "2024-11-05T19:14:26.853Z" },
]
[[package]]
name = "opentelemetry-exporter-otlp-proto-http"
version = "1.27.0"
version = "1.28.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "deprecated" },
@ -4022,28 +4052,29 @@ dependencies = [
{ name = "opentelemetry-sdk" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/31/0a/f05c55e8913bf58a033583f2580a0ec31a5f4cf2beacc9e286dcb74d6979/opentelemetry_exporter_otlp_proto_http-1.27.0.tar.gz", hash = "sha256:2103479092d8eb18f61f3fbff084f67cc7f2d4a7d37e75304b8b56c1d09ebef5", size = 15059, upload-time = "2024-08-28T21:35:37.079Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/2a/555f2845928086cd51aa6941c7a546470805b68ed631ec139ce7d841763d/opentelemetry_exporter_otlp_proto_http-1.28.0.tar.gz", hash = "sha256:d83a9a03a8367ead577f02a64127d827c79567de91560029688dd5cfd0152a8e", size = 15051, upload-time = "2024-11-05T19:14:49.813Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2d/8d/4755884afc0b1db6000527cac0ca17273063b6142c773ce4ecd307a82e72/opentelemetry_exporter_otlp_proto_http-1.27.0-py3-none-any.whl", hash = "sha256:688027575c9da42e179a69fe17e2d1eba9b14d81de8d13553a21d3114f3b4d75", size = 17203, upload-time = "2024-08-28T21:35:08.141Z" },
{ url = "https://files.pythonhosted.org/packages/b2/ce/80d5adabbf7ab4a0ca7b5e0f4039b24d273be370c3ba85fc05b13794411c/opentelemetry_exporter_otlp_proto_http-1.28.0-py3-none-any.whl", hash = "sha256:e8f3f7961b747edb6b44d51de4901a61e9c01d50debd747b120a08c4996c7e7b", size = 17228, upload-time = "2024-11-05T19:14:28.613Z" },
]
[[package]]
name = "opentelemetry-instrumentation"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "setuptools" },
{ name = "opentelemetry-semantic-conventions" },
{ name = "packaging" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/04/0e/d9394839af5d55c8feb3b22cd11138b953b49739b20678ca96289e30f904/opentelemetry_instrumentation-0.48b0.tar.gz", hash = "sha256:94929685d906380743a71c3970f76b5f07476eea1834abd5dd9d17abfe23cc35", size = 24724, upload-time = "2024-08-28T21:27:42.82Z" }
sdist = { url = "https://files.pythonhosted.org/packages/de/6b/6c25b15063c92a011cf3f68375971e2c58a9c764690847edc97df2d94eeb/opentelemetry_instrumentation-0.49b0.tar.gz", hash = "sha256:398a93e0b9dc2d11cc8627e1761665c506fe08c6b2df252a2ab3ade53d751c46", size = 26478, upload-time = "2024-11-05T19:21:41.402Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0a/7f/405c41d4f359121376c9d5117dcf68149b8122d3f6c718996d037bd4d800/opentelemetry_instrumentation-0.48b0-py3-none-any.whl", hash = "sha256:a69750dc4ba6a5c3eb67986a337185a25b739966d80479befe37b546fc870b44", size = 29449, upload-time = "2024-08-28T21:26:31.288Z" },
{ url = "https://files.pythonhosted.org/packages/93/61/e0d21e958d6072ce25c4f5e26a1d22835fc86f80836660adf6badb6038ce/opentelemetry_instrumentation-0.49b0-py3-none-any.whl", hash = "sha256:68364d73a1ff40894574cbc6138c5f98674790cae1f3b0865e21cf702f24dcb3", size = 30694, upload-time = "2024-11-05T19:20:38.584Z" },
]
[[package]]
name = "opentelemetry-instrumentation-asgi"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "asgiref" },
@ -4052,28 +4083,28 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" },
]
sdist = { url = "https://files.pythonhosted.org/packages/44/ac/fd3d40bab3234ec3f5c052a815100676baaae1832fa1067935f11e5c59c6/opentelemetry_instrumentation_asgi-0.48b0.tar.gz", hash = "sha256:04c32174b23c7fa72ddfe192dad874954968a6a924608079af9952964ecdf785", size = 23435, upload-time = "2024-08-28T21:27:47.276Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/55/693c3d0938ba5fead5c3aa4ac7022a992b4ff99a8e9979800d0feb843ff4/opentelemetry_instrumentation_asgi-0.49b0.tar.gz", hash = "sha256:959fd9b1345c92f20c6ef1d42f92ef6a76b3c3083fbc4104d59da6859b15b083", size = 24117, upload-time = "2024-11-05T19:21:46.769Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/db/74/a0e0d38622856597dd8e630f2bd793760485eb165708e11b8be1696bbb5a/opentelemetry_instrumentation_asgi-0.48b0-py3-none-any.whl", hash = "sha256:ddb1b5fc800ae66e85a4e2eca4d9ecd66367a8c7b556169d9e7b57e10676e44d", size = 15958, upload-time = "2024-08-28T21:26:38.139Z" },
{ url = "https://files.pythonhosted.org/packages/2c/0b/7900c782a1dfaa584588d724bc3bbdf8405a32497537dd96b3fcbf8461b9/opentelemetry_instrumentation_asgi-0.49b0-py3-none-any.whl", hash = "sha256:722a90856457c81956c88f35a6db606cc7db3231046b708aae2ddde065723dbe", size = 16326, upload-time = "2024-11-05T19:20:46.176Z" },
]
[[package]]
name = "opentelemetry-instrumentation-celery"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-instrumentation" },
{ name = "opentelemetry-semantic-conventions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/42/68/72975eff50cc22d8f65f96c425a2e8844f91488e78ffcfb603ac7cee0e5a/opentelemetry_instrumentation_celery-0.48b0.tar.gz", hash = "sha256:1d33aa6c4a1e6c5d17a64215245208a96e56c9d07611685dbae09a557704af26", size = 14445, upload-time = "2024-08-28T21:27:56.392Z" }
sdist = { url = "https://files.pythonhosted.org/packages/4c/8b/9b8a9dda3ed53354c6f707a45cdb7a4730e1c109b50fc1b413525493f811/opentelemetry_instrumentation_celery-0.49b0.tar.gz", hash = "sha256:afbaee97cc9c75f29bcc9784f16f8e37c415d4fe9b334748c5b90a3d30d12473", size = 14702, upload-time = "2024-11-05T19:21:53.672Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/28/59/f09e8f9f596d375fd86b7677751525bbc485c8cc8c5388e39786a3d3b968/opentelemetry_instrumentation_celery-0.48b0-py3-none-any.whl", hash = "sha256:c1904e38cc58fb2a33cd657d6e296285c5ffb0dca3f164762f94b905e5abc88e", size = 13697, upload-time = "2024-08-28T21:26:50.01Z" },
{ url = "https://files.pythonhosted.org/packages/21/8c/d7d4adb36abbc0e517a69f7a069f32742122ae22d6017202f64570d9f4c5/opentelemetry_instrumentation_celery-0.49b0-py3-none-any.whl", hash = "sha256:38d4a78c78f33020032ef77ef0ead756bdf7838bcfb603de10f5925d39f14929", size = 13749, upload-time = "2024-11-05T19:20:54.98Z" },
]
[[package]]
name = "opentelemetry-instrumentation-fastapi"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -4082,17 +4113,16 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" },
]
sdist = { url = "https://files.pythonhosted.org/packages/58/20/43477da5850ef2cd3792715d442aecd051e885e0603b6ee5783b2104ba8f/opentelemetry_instrumentation_fastapi-0.48b0.tar.gz", hash = "sha256:21a72563ea412c0b535815aeed75fc580240f1f02ebc72381cfab672648637a2", size = 18497, upload-time = "2024-08-28T21:28:01.14Z" }
sdist = { url = "https://files.pythonhosted.org/packages/fe/bf/8e6d2a4807360f2203192017eb4845f5628dbeaf0597adf3d141cc5c24e1/opentelemetry_instrumentation_fastapi-0.49b0.tar.gz", hash = "sha256:6d14935c41fd3e49328188b6a59dd4c37bd17a66b01c15b0c64afa9714a1f905", size = 19230, upload-time = "2024-11-05T19:21:59.361Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/50/745ab075a3041b7a5f29a579d2c28eaad54f64b4589d8f9fd364c62cf0f3/opentelemetry_instrumentation_fastapi-0.48b0-py3-none-any.whl", hash = "sha256:afeb820a59e139d3e5d96619600f11ce0187658b8ae9e3480857dd790bc024f2", size = 11777, upload-time = "2024-08-28T21:26:57.457Z" },
{ url = "https://files.pythonhosted.org/packages/b1/f4/0895b9410c10abf987c90dee1b7688a8f2214a284fe15e575648f6a1473a/opentelemetry_instrumentation_fastapi-0.49b0-py3-none-any.whl", hash = "sha256:646e1b18523cbe6860ae9711eb2c7b9c85466c3c7697cd6b8fb5180d85d3fe6e", size = 12101, upload-time = "2024-11-05T19:21:01.805Z" },
]
[[package]]
name = "opentelemetry-instrumentation-flask"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "importlib-metadata" },
{ name = "opentelemetry-api" },
{ name = "opentelemetry-instrumentation" },
{ name = "opentelemetry-instrumentation-wsgi" },
@ -4100,29 +4130,30 @@ dependencies = [
{ name = "opentelemetry-util-http" },
{ name = "packaging" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ed/2f/5c3af780a69f9ba78445fe0e5035c41f67281a31b08f3c3e7ec460bda726/opentelemetry_instrumentation_flask-0.48b0.tar.gz", hash = "sha256:e03a34428071aebf4864ea6c6a564acef64f88c13eb3818e64ea90da61266c3d", size = 19196, upload-time = "2024-08-28T21:28:01.986Z" }
sdist = { url = "https://files.pythonhosted.org/packages/17/12/dc72873fb1e35699941d8eb6a53ef25e8c5843dea37665dad33bd720f047/opentelemetry_instrumentation_flask-0.49b0.tar.gz", hash = "sha256:f7c5ab67753c4781a2e21c8f43dc5fc02ece74fdd819466c75d025db80aa7576", size = 19176, upload-time = "2024-11-05T19:22:00.816Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/3d/fcde4f8f0bf9fa1ee73a12304fa538076fb83fe0a2ae966ab0f0b7da5109/opentelemetry_instrumentation_flask-0.48b0-py3-none-any.whl", hash = "sha256:26b045420b9d76e85493b1c23fcf27517972423480dc6cf78fd6924248ba5808", size = 14588, upload-time = "2024-08-28T21:26:58.504Z" },
{ url = "https://files.pythonhosted.org/packages/a2/fc/354da8f33ef0daebfc8e4eac995d342ae13a35097bbad512cfe0d2f3c61a/opentelemetry_instrumentation_flask-0.49b0-py3-none-any.whl", hash = "sha256:f3ef330c3cee3e2c161f27f1e7017c8800b9bfb6f9204f2f7bfb0b274874be0e", size = 14582, upload-time = "2024-11-05T19:21:02.793Z" },
]
[[package]]
name = "opentelemetry-instrumentation-httpx"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-instrumentation" },
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d3/d9/c65d818607c16d1b7ea8d2de6111c6cecadf8d2fd38c1885a72733a7c6d3/opentelemetry_instrumentation_httpx-0.48b0.tar.gz", hash = "sha256:ee977479e10398931921fb995ac27ccdeea2e14e392cb27ef012fc549089b60a", size = 16931, upload-time = "2024-08-28T21:28:03.794Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a0/53/8b5e05e55a513d846ead5afb0509bec37a34a1c3e82f30b13d14156334b1/opentelemetry_instrumentation_httpx-0.49b0.tar.gz", hash = "sha256:07165b624f3e58638cee47ecf1c81939a8c2beb7e42ce9f69e25a9f21dc3f4cf", size = 17750, upload-time = "2024-11-05T19:22:02.911Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/fe/f2daa9d6d988c093b8c7b1d35df675761a8ece0b600b035dc04982746c9d/opentelemetry_instrumentation_httpx-0.48b0-py3-none-any.whl", hash = "sha256:d94f9d612c82d09fe22944d1904a30a464c19bea2ba76be656c99a28ad8be8e5", size = 13900, upload-time = "2024-08-28T21:27:01.566Z" },
{ url = "https://files.pythonhosted.org/packages/3b/9f/843391c6d645cd4f6914b27bc807fc1ff52b97f84cbe3ca675641976b23f/opentelemetry_instrumentation_httpx-0.49b0-py3-none-any.whl", hash = "sha256:e59e0d2fda5ef841630c68da1d78ff9192f63590a9099f12f0eab614abdf239a", size = 14110, upload-time = "2024-11-05T19:21:04.698Z" },
]
[[package]]
name = "opentelemetry-instrumentation-redis"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -4130,14 +4161,14 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/70/be/92e98e4c7f275be3d373899a41b0a7d4df64266657d985dbbdb9a54de0d5/opentelemetry_instrumentation_redis-0.48b0.tar.gz", hash = "sha256:61e33e984b4120e1b980d9fba6e9f7ca0c8d972f9970654d8f6e9f27fa115a8c", size = 10511, upload-time = "2024-08-28T21:28:15.061Z" }
sdist = { url = "https://files.pythonhosted.org/packages/19/5b/1398eb2f92fd76787ccec28d24dc4c7dfaaf97a7557e7729e2f7c2c05d84/opentelemetry_instrumentation_redis-0.49b0.tar.gz", hash = "sha256:922542c3bd192ad4ba74e2c7e0a253c7c58a5cefbd6f89da2aba4d193a974703", size = 11353, upload-time = "2024-11-05T19:22:12.822Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/94/40/892f30d400091106309cc047fd3f6d76a828fedd984a953fd5386b78a2fb/opentelemetry_instrumentation_redis-0.48b0-py3-none-any.whl", hash = "sha256:48c7f2e25cbb30bde749dc0d8b9c74c404c851f554af832956b9630b27f5bcb7", size = 11610, upload-time = "2024-08-28T21:27:18.759Z" },
{ url = "https://files.pythonhosted.org/packages/24/e4/4f258fef0759629f2e8a0210d5533cfef3ecad69ff35be044637a3e2783e/opentelemetry_instrumentation_redis-0.49b0-py3-none-any.whl", hash = "sha256:b7d8f758bac53e77b7e7ca98ce80f91230577502dacb619ebe8e8b6058042067", size = 12453, upload-time = "2024-11-05T19:21:18.534Z" },
]
[[package]]
name = "opentelemetry-instrumentation-sqlalchemy"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -4146,14 +4177,14 @@ dependencies = [
{ name = "packaging" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4c/77/3fcebbca8bd729da50dc2130d8ca869a235aa5483a85ef06c5dc8643476b/opentelemetry_instrumentation_sqlalchemy-0.48b0.tar.gz", hash = "sha256:dbf2d5a755b470e64e5e2762b56f8d56313787e4c7d71a87fe25c33f48eb3493", size = 13194, upload-time = "2024-08-28T21:28:18.122Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a0/a7/24f6cce3808ae1802dd1b60d752fbab877db5655198929cf4ee8ea416923/opentelemetry_instrumentation_sqlalchemy-0.49b0.tar.gz", hash = "sha256:32658e520fc8b35823c722f5d8831d3a410b76dd2724adb2887befc041ddef04", size = 13194, upload-time = "2024-11-05T19:22:14.92Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/84/4b6f1e9e9f83a52d966e91963f5a8424edc4a3d5ea32854c96c2d1618284/opentelemetry_instrumentation_sqlalchemy-0.48b0-py3-none-any.whl", hash = "sha256:625848a34aa5770cb4b1dcdbd95afce4307a0230338711101325261d739f391f", size = 13360, upload-time = "2024-08-28T21:27:22.102Z" },
{ url = "https://files.pythonhosted.org/packages/ec/6b/a1a3685fed593282999cdc374ece15efbd56f8d774bd368bf7ff2cf5923c/opentelemetry_instrumentation_sqlalchemy-0.49b0-py3-none-any.whl", hash = "sha256:d854052d2b02cd0562e5628a514c8153fceada7f585137e173165dfd0a46ef6a", size = 13358, upload-time = "2024-11-05T19:21:23.654Z" },
]
[[package]]
name = "opentelemetry-instrumentation-wsgi"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
@ -4161,70 +4192,70 @@ dependencies = [
{ name = "opentelemetry-semantic-conventions" },
{ name = "opentelemetry-util-http" },
]
sdist = { url = "https://files.pythonhosted.org/packages/de/a5/f45cdfba18f22aefd2378eac8c07c1f8c9656d6bf7ce315ced48c67f3437/opentelemetry_instrumentation_wsgi-0.48b0.tar.gz", hash = "sha256:1a1e752367b0df4397e0b835839225ef5c2c3c053743a261551af13434fc4d51", size = 17974, upload-time = "2024-08-28T21:28:24.902Z" }
sdist = { url = "https://files.pythonhosted.org/packages/17/2b/91b022b004ac9e9ab0eefd10bc4257975291f88adc81b4ef2c601ddb1adf/opentelemetry_instrumentation_wsgi-0.49b0.tar.gz", hash = "sha256:0812a02e132f8fc3d5c897bba84e530c37b85c315b199bb97ca6508279e7eb23", size = 17733, upload-time = "2024-11-05T19:22:24.3Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fb/87/fa420007e0ba7e8cd43799ab204717ab515f000236fa2726a6be3299efdd/opentelemetry_instrumentation_wsgi-0.48b0-py3-none-any.whl", hash = "sha256:c6051124d741972090fe94b2fa302555e1e2a22e9cdda32dd39ed49a5b34e0c6", size = 13691, upload-time = "2024-08-28T21:27:33.257Z" },
{ url = "https://files.pythonhosted.org/packages/02/1d/59979665778ed8c85bc31c92b75571cd7afb8e3322fb513c87fe1bad6d78/opentelemetry_instrumentation_wsgi-0.49b0-py3-none-any.whl", hash = "sha256:8869ccf96611827e4448417718920e9eec6d25bffb5bf72c7952c7346ec33fbc", size = 13699, upload-time = "2024-11-05T19:21:35.039Z" },
]
[[package]]
name = "opentelemetry-propagator-b3"
version = "1.27.0"
version = "1.28.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "deprecated" },
{ name = "opentelemetry-api" },
]
sdist = { url = "https://files.pythonhosted.org/packages/53/a3/3ceeb5ff5a1906371834d5c594e24e5b84f35528d219054833deca4ac44c/opentelemetry_propagator_b3-1.27.0.tar.gz", hash = "sha256:39377b6aa619234e08fbc6db79bf880aff36d7e2761efa9afa28b78d5937308f", size = 9590, upload-time = "2024-08-28T21:35:43.971Z" }
sdist = { url = "https://files.pythonhosted.org/packages/6f/1d/225ea036785119964509e92f4e1bc0313ba6ec790fbf51bd363abafeafae/opentelemetry_propagator_b3-1.28.0.tar.gz", hash = "sha256:cf6f0d2a1881c4858898be47e8a94b11bc5b16fc73b6c37ebfa2121c4825adc6", size = 9592, upload-time = "2024-11-05T19:14:57.193Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/03/3f/75ba77b8d9938bae575bc457a5c56ca2246ff5367b54c7d4252a31d1c91f/opentelemetry_propagator_b3-1.27.0-py3-none-any.whl", hash = "sha256:1dd75e9801ba02e870df3830097d35771a64c123127c984d9b05c352a35aa9cc", size = 8899, upload-time = "2024-08-28T21:35:18.317Z" },
{ url = "https://files.pythonhosted.org/packages/4e/fa/438d53d73a6c45df5d416b56dc371a65d0b07859bc107ab632349a079d4a/opentelemetry_propagator_b3-1.28.0-py3-none-any.whl", hash = "sha256:9f6923a5da56d7da6724e4fdd758a67ede2a2732efb929e538cf6fea337700c5", size = 8917, upload-time = "2024-11-05T19:14:37.317Z" },
]
[[package]]
name = "opentelemetry-proto"
version = "1.27.0"
version = "1.28.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "protobuf" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9a/59/959f0beea798ae0ee9c979b90f220736fbec924eedbefc60ca581232e659/opentelemetry_proto-1.27.0.tar.gz", hash = "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6", size = 34749, upload-time = "2024-08-28T21:35:45.839Z" }
sdist = { url = "https://files.pythonhosted.org/packages/c9/63/ac4cef4d30ea0ca1d2153ad2fc62d91d1cf3b89b0e4e5cbd61a8c567885f/opentelemetry_proto-1.28.0.tar.gz", hash = "sha256:4a45728dfefa33f7908b828b9b7c9f2c6de42a05d5ec7b285662ddae71c4c870", size = 34331, upload-time = "2024-11-05T19:14:59.503Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/94/56/3d2d826834209b19a5141eed717f7922150224d1a982385d19a9444cbf8d/opentelemetry_proto-1.27.0-py3-none-any.whl", hash = "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace", size = 52464, upload-time = "2024-08-28T21:35:21.434Z" },
{ url = "https://files.pythonhosted.org/packages/86/94/c0b43d16e1d96ee1e699373aa59f14a3aa2e7126af3f11d6adc5dcc531cd/opentelemetry_proto-1.28.0-py3-none-any.whl", hash = "sha256:d5ad31b997846543b8e15504657d9a8cf1ad3c71dcbbb6c4799b1ab29e38f7f9", size = 55832, upload-time = "2024-11-05T19:14:40.446Z" },
]
[[package]]
name = "opentelemetry-sdk"
version = "1.27.0"
version = "1.28.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-semantic-conventions" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/0d/9a/82a6ac0f06590f3d72241a587cb8b0b751bd98728e896cc4cbd4847248e6/opentelemetry_sdk-1.27.0.tar.gz", hash = "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f", size = 145019, upload-time = "2024-08-28T21:35:46.708Z" }
sdist = { url = "https://files.pythonhosted.org/packages/0c/5b/a509ccab93eacc6044591d5ec437d8266e76f893d0389bbf7e5592c7da32/opentelemetry_sdk-1.28.0.tar.gz", hash = "sha256:41d5420b2e3fb7716ff4981b510d551eff1fc60eb5a95cf7335b31166812a893", size = 156155, upload-time = "2024-11-05T19:15:00.451Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/bd/a6602e71e315055d63b2ff07172bd2d012b4cba2d4e00735d74ba42fc4d6/opentelemetry_sdk-1.27.0-py3-none-any.whl", hash = "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", size = 110505, upload-time = "2024-08-28T21:35:24.769Z" },
{ url = "https://files.pythonhosted.org/packages/c3/fe/c8decbebb5660529f1d6ba65e50a45b1294022dfcba2968fc9c8697c42b2/opentelemetry_sdk-1.28.0-py3-none-any.whl", hash = "sha256:4b37da81d7fad67f6683c4420288c97f4ed0d988845d5886435f428ec4b8429a", size = 118692, upload-time = "2024-11-05T19:14:41.669Z" },
]
[[package]]
name = "opentelemetry-semantic-conventions"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "deprecated" },
{ name = "opentelemetry-api" },
]
sdist = { url = "https://files.pythonhosted.org/packages/0a/89/1724ad69f7411772446067cdfa73b598694c8c91f7f8c922e344d96d81f9/opentelemetry_semantic_conventions-0.48b0.tar.gz", hash = "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", size = 89445, upload-time = "2024-08-28T21:35:47.673Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ee/c8/433b0e54143f8c9369f5c4a7a83e73eec7eb2ee7d0b7e81a9243e78c8e80/opentelemetry_semantic_conventions-0.49b0.tar.gz", hash = "sha256:dbc7b28339e5390b6b28e022835f9bac4e134a80ebf640848306d3c5192557e8", size = 95227, upload-time = "2024-11-05T19:15:01.443Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/7a/4f0063dbb0b6c971568291a8bc19a4ca70d3c185db2d956230dd67429dfc/opentelemetry_semantic_conventions-0.48b0-py3-none-any.whl", hash = "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f", size = 149685, upload-time = "2024-08-28T21:35:25.983Z" },
{ url = "https://files.pythonhosted.org/packages/25/05/20104df4ef07d3bf5c3fd6bcc796ef70ab4ea4309378a9ba57bc4b4d01fa/opentelemetry_semantic_conventions-0.49b0-py3-none-any.whl", hash = "sha256:0458117f6ead0b12e3221813e3e511d85698c31901cac84682052adb9c17c7cd", size = 159214, upload-time = "2024-11-05T19:14:43.047Z" },
]
[[package]]
name = "opentelemetry-util-http"
version = "0.48b0"
version = "0.49b0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d6/d7/185c494754340e0a3928fd39fde2616ee78f2c9d66253affaad62d5b7935/opentelemetry_util_http-0.48b0.tar.gz", hash = "sha256:60312015153580cc20f322e5cdc3d3ecad80a71743235bdb77716e742814623c", size = 7863, upload-time = "2024-08-28T21:28:27.266Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a3/99/377ef446928808211b127b9ab31c348bc465c8da4514ebeec6e4a3de3d21/opentelemetry_util_http-0.49b0.tar.gz", hash = "sha256:02928496afcffd58a7c15baf99d2cedae9b8325a8ac52b0d0877b2e8f936dd1b", size = 7863, upload-time = "2024-11-05T19:22:26.973Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ad/2e/36097c0a4d0115b8c7e377c90bab7783ac183bc5cb4071308f8959454311/opentelemetry_util_http-0.48b0-py3-none-any.whl", hash = "sha256:76f598af93aab50328d2a69c786beaedc8b6a7770f7a818cc307eb353debfffb", size = 6946, upload-time = "2024-08-28T21:27:37.975Z" },
{ url = "https://files.pythonhosted.org/packages/66/0e/ab0a89b315d0bacdd355a345bb69b20c50fc1f0804b52b56fe1c35a60e68/opentelemetry_util_http-0.49b0-py3-none-any.whl", hash = "sha256:8661bbd6aea1839badc44de067ec9c15c05eab05f729f496c856c50a1203caf1", size = 6945, upload-time = "2024-11-05T19:21:37.81Z" },
]
[[package]]
@ -4433,15 +4464,15 @@ wheels = [
[[package]]
name = "pdfminer-six"
version = "20251230"
version = "20260107"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "charset-normalizer" },
{ name = "cryptography" },
]
sdist = { url = "https://files.pythonhosted.org/packages/46/9a/d79d8fa6d47a0338846bb558b39b9963b8eb2dfedec61867c138c1b17eeb/pdfminer_six-20251230.tar.gz", hash = "sha256:e8f68a14c57e00c2d7276d26519ea64be1b48f91db1cdc776faa80528ca06c1e", size = 8511285, upload-time = "2025-12-30T15:49:13.104Z" }
sdist = { url = "https://files.pythonhosted.org/packages/34/a4/5cec1112009f0439a5ca6afa8ace321f0ab2f48da3255b7a1c8953014670/pdfminer_six-20260107.tar.gz", hash = "sha256:96bfd431e3577a55a0efd25676968ca4ce8fd5b53f14565f85716ff363889602", size = 8512094, upload-time = "2026-01-07T13:29:12.937Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/65/d7/b288ea32deb752a09aab73c75e1e7572ab2a2b56c3124a5d1eb24c62ceb3/pdfminer_six-20251230-py3-none-any.whl", hash = "sha256:9ff2e3466a7dfc6de6fd779478850b6b7c2d9e9405aa2a5869376a822771f485", size = 6591909, upload-time = "2025-12-30T15:49:10.76Z" },
{ url = "https://files.pythonhosted.org/packages/20/8b/28c4eaec9d6b036a52cb44720408f26b1a143ca9bce76cc19e8f5de00ab4/pdfminer_six-20260107-py3-none-any.whl", hash = "sha256:366585ba97e80dffa8f00cebe303d2f381884d8637af4ce422f1df3ef38111a9", size = 6592252, upload-time = "2026-01-07T13:29:10.742Z" },
]
[[package]]
@ -4670,16 +4701,16 @@ wheels = [
[[package]]
name = "protobuf"
version = "4.25.8"
version = "5.29.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/df/01/34c8d2b6354906d728703cb9d546a0e534de479e25f1b581e4094c4a85cc/protobuf-4.25.8.tar.gz", hash = "sha256:6135cf8affe1fc6f76cced2641e4ea8d3e59518d1f24ae41ba97bcad82d397cd", size = 380920, upload-time = "2025-05-28T14:22:25.153Z" }
sdist = { url = "https://files.pythonhosted.org/packages/7e/57/394a763c103e0edf87f0938dafcd918d53b4c011dfc5c8ae80f3b0452dbb/protobuf-5.29.6.tar.gz", hash = "sha256:da9ee6a5424b6b30fd5e45c5ea663aef540ca95f9ad99d1e887e819cdf9b8723", size = 425623, upload-time = "2026-02-04T22:54:40.584Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/45/ff/05f34305fe6b85bbfbecbc559d423a5985605cad5eda4f47eae9e9c9c5c5/protobuf-4.25.8-cp310-abi3-win32.whl", hash = "sha256:504435d831565f7cfac9f0714440028907f1975e4bed228e58e72ecfff58a1e0", size = 392745, upload-time = "2025-05-28T14:22:10.524Z" },
{ url = "https://files.pythonhosted.org/packages/08/35/8b8a8405c564caf4ba835b1fdf554da869954712b26d8f2a98c0e434469b/protobuf-4.25.8-cp310-abi3-win_amd64.whl", hash = "sha256:bd551eb1fe1d7e92c1af1d75bdfa572eff1ab0e5bf1736716814cdccdb2360f9", size = 413736, upload-time = "2025-05-28T14:22:13.156Z" },
{ url = "https://files.pythonhosted.org/packages/28/d7/ab27049a035b258dab43445eb6ec84a26277b16105b277cbe0a7698bdc6c/protobuf-4.25.8-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:ca809b42f4444f144f2115c4c1a747b9a404d590f18f37e9402422033e464e0f", size = 394537, upload-time = "2025-05-28T14:22:14.768Z" },
{ url = "https://files.pythonhosted.org/packages/bd/6d/a4a198b61808dd3d1ee187082ccc21499bc949d639feb948961b48be9a7e/protobuf-4.25.8-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:9ad7ef62d92baf5a8654fbb88dac7fa5594cfa70fd3440488a5ca3bfc6d795a7", size = 294005, upload-time = "2025-05-28T14:22:16.052Z" },
{ url = "https://files.pythonhosted.org/packages/d6/c6/c9deaa6e789b6fc41b88ccbdfe7a42d2b82663248b715f55aa77fbc00724/protobuf-4.25.8-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:83e6e54e93d2b696a92cad6e6efc924f3850f82b52e1563778dfab8b355101b0", size = 294924, upload-time = "2025-05-28T14:22:17.105Z" },
{ url = "https://files.pythonhosted.org/packages/0c/c1/6aece0ab5209981a70cd186f164c133fdba2f51e124ff92b73de7fd24d78/protobuf-4.25.8-py3-none-any.whl", hash = "sha256:15a0af558aa3b13efef102ae6e4f3efac06f1eea11afb3a57db2901447d9fb59", size = 156757, upload-time = "2025-05-28T14:22:24.135Z" },
{ url = "https://files.pythonhosted.org/packages/d4/88/9ee58ff7863c479d6f8346686d4636dd4c415b0cbeed7a6a7d0617639c2a/protobuf-5.29.6-cp310-abi3-win32.whl", hash = "sha256:62e8a3114992c7c647bce37dcc93647575fc52d50e48de30c6fcb28a6a291eb1", size = 423357, upload-time = "2026-02-04T22:54:25.805Z" },
{ url = "https://files.pythonhosted.org/packages/1c/66/2dc736a4d576847134fb6d80bd995c569b13cdc7b815d669050bf0ce2d2c/protobuf-5.29.6-cp310-abi3-win_amd64.whl", hash = "sha256:7e6ad413275be172f67fdee0f43484b6de5a904cc1c3ea9804cb6fe2ff366eda", size = 435175, upload-time = "2026-02-04T22:54:28.592Z" },
{ url = "https://files.pythonhosted.org/packages/06/db/49b05966fd208ae3f44dcd33837b6243b4915c57561d730a43f881f24dea/protobuf-5.29.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:b5a169e664b4057183a34bdc424540e86eea47560f3c123a0d64de4e137f9269", size = 418619, upload-time = "2026-02-04T22:54:30.266Z" },
{ url = "https://files.pythonhosted.org/packages/b7/d7/48cbf6b0c3c39761e47a99cb483405f0fde2be22cf00d71ef316ce52b458/protobuf-5.29.6-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:a8866b2cff111f0f863c1b3b9e7572dc7eaea23a7fae27f6fc613304046483e6", size = 320284, upload-time = "2026-02-04T22:54:31.782Z" },
{ url = "https://files.pythonhosted.org/packages/e3/dd/cadd6ec43069247d91f6345fa7a0d2858bef6af366dbd7ba8f05d2c77d3b/protobuf-5.29.6-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:e3387f44798ac1106af0233c04fb8abf543772ff241169946f698b3a9a3d3ab9", size = 320478, upload-time = "2026-02-04T22:54:32.909Z" },
{ url = "https://files.pythonhosted.org/packages/5a/cb/e3065b447186cb70aa65acc70c86baf482d82bf75625bf5a2c4f6919c6a3/protobuf-5.29.6-py3-none-any.whl", hash = "sha256:6b9edb641441b2da9fa8f428760fc136a49cf97a52076010cf22a2ff73438a86", size = 173126, upload-time = "2026-02-04T22:54:39.462Z" },
]
[[package]]
@ -5762,33 +5793,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" },
]
[[package]]
name = "shapely"
version = "2.1.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" },
{ url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" },
{ url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" },
{ url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" },
{ url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" },
{ url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" },
{ url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" },
{ url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" },
{ url = "https://files.pythonhosted.org/packages/24/c0/f3b6453cf2dfa99adc0ba6675f9aaff9e526d2224cbd7ff9c1a879238693/shapely-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fe2533caae6a91a543dec62e8360fe86ffcdc42a7c55f9dfd0128a977a896b94", size = 1833550, upload-time = "2025-09-24T13:50:30.019Z" },
{ url = "https://files.pythonhosted.org/packages/86/07/59dee0bc4b913b7ab59ab1086225baca5b8f19865e6101db9ebb7243e132/shapely-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ba4d1333cc0bc94381d6d4308d2e4e008e0bd128bdcff5573199742ee3634359", size = 1643556, upload-time = "2025-09-24T13:50:32.291Z" },
{ url = "https://files.pythonhosted.org/packages/26/29/a5397e75b435b9895cd53e165083faed5d12fd9626eadec15a83a2411f0f/shapely-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0bd308103340030feef6c111d3eb98d50dc13feea33affc8a6f9fa549e9458a3", size = 2988308, upload-time = "2025-09-24T13:50:33.862Z" },
{ url = "https://files.pythonhosted.org/packages/b9/37/e781683abac55dde9771e086b790e554811a71ed0b2b8a1e789b7430dd44/shapely-2.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1e7d4d7ad262a48bb44277ca12c7c78cb1b0f56b32c10734ec9a1d30c0b0c54b", size = 3099844, upload-time = "2025-09-24T13:50:35.459Z" },
{ url = "https://files.pythonhosted.org/packages/d8/f3/9876b64d4a5a321b9dc482c92bb6f061f2fa42131cba643c699f39317cb9/shapely-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e9eddfe513096a71896441a7c37db72da0687b34752c4e193577a145c71736fc", size = 3988842, upload-time = "2025-09-24T13:50:37.478Z" },
{ url = "https://files.pythonhosted.org/packages/d1/a0/704c7292f7014c7e74ec84eddb7b109e1fbae74a16deae9c1504b1d15565/shapely-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:980c777c612514c0cf99bc8a9de6d286f5e186dcaf9091252fcd444e5638193d", size = 4152714, upload-time = "2025-09-24T13:50:39.9Z" },
{ url = "https://files.pythonhosted.org/packages/53/46/319c9dc788884ad0785242543cdffac0e6530e4d0deb6c4862bc4143dcf3/shapely-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9111274b88e4d7b54a95218e243282709b330ef52b7b86bc6aaf4f805306f454", size = 1542745, upload-time = "2025-09-24T13:50:41.414Z" },
{ url = "https://files.pythonhosted.org/packages/ec/bf/cb6c1c505cb31e818e900b9312d514f381fbfa5c4363edfce0fcc4f8c1a4/shapely-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:743044b4cfb34f9a67205cee9279feaf60ba7d02e69febc2afc609047cb49179", size = 1722861, upload-time = "2025-09-24T13:50:43.35Z" },
]
[[package]]
name = "shellingham"
version = "1.5.4"
@ -6814,12 +6818,12 @@ wheels = [
[[package]]
name = "unstructured"
version = "0.16.25"
version = "0.18.31"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "backoff" },
{ name = "beautifulsoup4" },
{ name = "chardet" },
{ name = "charset-normalizer" },
{ name = "dataclasses-json" },
{ name = "emoji" },
{ name = "filetype" },
@ -6827,6 +6831,7 @@ dependencies = [
{ name = "langdetect" },
{ name = "lxml" },
{ name = "nltk" },
{ name = "numba" },
{ name = "numpy" },
{ name = "psutil" },
{ name = "python-iso639" },
@ -6839,9 +6844,9 @@ dependencies = [
{ name = "unstructured-client" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/64/31/98c4c78e305d1294888adf87fd5ee30577a4c393951341ca32b43f167f1e/unstructured-0.16.25.tar.gz", hash = "sha256:73b9b0f51dbb687af572ecdb849a6811710b9cac797ddeab8ee80fa07d8aa5e6", size = 1683097, upload-time = "2025-03-07T11:19:39.507Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a9/5f/64285bd69a538bc28753f1423fcaa9d64cd79a9e7c097171b1f0d27e9cdb/unstructured-0.18.31.tar.gz", hash = "sha256:af4bbe32d1894ae6e755f0da6fc0dd307a1d0adeebe0e7cc6278f6cf744339ca", size = 1707700, upload-time = "2026-01-27T15:33:05.378Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/12/4f/ad08585b5c8a33c82ea119494c4d3023f4796958c56e668b15cc282ec0a0/unstructured-0.16.25-py3-none-any.whl", hash = "sha256:14719ccef2830216cf1c5bf654f75e2bf07b17ca5dcee9da5ac74618130fd337", size = 1769286, upload-time = "2025-03-07T11:19:37.299Z" },
{ url = "https://files.pythonhosted.org/packages/c8/4a/9c43f39d9e443c9bc3f2e379b305bca27110adc653b071221b3132c18de5/unstructured-0.18.31-py3-none-any.whl", hash = "sha256:fab4641176cb9b192ed38048758aa0d9843121d03626d18f42275afb31e5b2d3", size = 1794889, upload-time = "2026-01-27T15:33:03.136Z" },
]
[package.optional-dependencies]

View File

@ -109,6 +109,7 @@ const AgentTools: FC = () => {
tool_parameters: paramsWithDefaultValue,
notAuthor: !tool.is_team_authorization,
enabled: true,
type: tool.provider_type as CollectionType,
}
}
const handleSelectTool = (tool: ToolDefaultValue) => {

View File

@ -19,19 +19,21 @@ import { useBoolean, useSessionStorageState } from 'ahooks'
import * as React from 'react'
import { useCallback, useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useShallow } from 'zustand/react/shallow'
import Button from '@/app/components/base/button'
import Confirm from '@/app/components/base/confirm'
import { Generator } from '@/app/components/base/icons/src/vender/other'
import Loading from '@/app/components/base/loading'
import Loading from '@/app/components/base/loading'
import Modal from '@/app/components/base/modal'
import Toast from '@/app/components/base/toast'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import { useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
import ModelParameterModal from '@/app/components/header/account-setting/model-provider-page/model-parameter-modal'
import { generateBasicAppFirstTimeRule, generateRule } from '@/service/debug'
import { useGenerateRuleTemplate } from '@/service/use-apps'
import { useStore } from '../../../store'
import IdeaOutput from './idea-output'
import InstructionEditorInBasic from './instruction-editor'
import InstructionEditorInWorkflow from './instruction-editor-in-workflow'
@ -83,6 +85,9 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
onFinished,
}) => {
const { t } = useTranslation()
const { appDetail } = useStore(useShallow(state => ({
appDetail: state.appDetail,
})))
const localModel = localStorage.getItem('auto-gen-model')
? JSON.parse(localStorage.getItem('auto-gen-model') as string) as Model
: null
@ -235,6 +240,7 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
instruction,
model_config: model,
no_variable: false,
app_id: appDetail?.id,
})
apiRes = {
...res,
@ -256,6 +262,7 @@ const GetAutomaticRes: FC<IGetAutomaticResProps> = ({
instruction,
ideal_output: ideaOutput,
model_config: model,
app_id: appDetail?.id,
})
apiRes = res
if (error) {

View File

@ -1,4 +1,3 @@
import type { App } from '@/types/app'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import { useRouter } from 'next/navigation'
import { afterAll, beforeEach, describe, expect, it, vi } from 'vitest'
@ -14,8 +13,8 @@ import { getRedirection } from '@/utils/app-redirection'
import CreateAppModal from './index'
vi.mock('ahooks', () => ({
useDebounceFn: <T extends (...args: unknown[]) => unknown>(fn: T) => {
const run = (...args: Parameters<T>) => fn(...args)
useDebounceFn: (fn: (...args: any[]) => any) => {
const run = (...args: any[]) => fn(...args)
const cancel = vi.fn()
const flush = vi.fn()
return { run, cancel, flush }
@ -84,7 +83,7 @@ describe('CreateAppModal', () => {
beforeEach(() => {
vi.clearAllMocks()
mockUseRouter.mockReturnValue({ push: mockPush } as unknown as ReturnType<typeof useRouter>)
mockUseRouter.mockReturnValue({ push: mockPush } as any)
mockUseProviderContext.mockReturnValue({
plan: {
type: AppModeEnum.ADVANCED_CHAT,
@ -93,10 +92,10 @@ describe('CreateAppModal', () => {
reset: {},
},
enableBilling: true,
} as unknown as ReturnType<typeof useProviderContext>)
} as any)
mockUseAppContext.mockReturnValue({
isCurrentWorkspaceEditor: true,
} as unknown as ReturnType<typeof useAppContext>)
} as any)
mockSetItem.mockClear()
Object.defineProperty(window, 'localStorage', {
value: {
@ -119,8 +118,8 @@ describe('CreateAppModal', () => {
})
it('creates an app, notifies success, and fires callbacks', async () => {
const mockApp: Partial<App> = { id: 'app-1', mode: AppModeEnum.ADVANCED_CHAT }
mockCreateApp.mockResolvedValue(mockApp as App)
const mockApp = { id: 'app-1', mode: AppModeEnum.ADVANCED_CHAT }
mockCreateApp.mockResolvedValue(mockApp as any)
const { onClose, onSuccess } = renderModal()
const nameInput = screen.getByPlaceholderText('app.newApp.appNamePlaceholder')

View File

@ -216,22 +216,13 @@ describe('image-uploader utils', () => {
type FileCallback = (file: MockFile) => void
type EntriesCallback = (entries: FileSystemEntry[]) => void
// Helper to create mock FileSystemEntry with required properties
const createMockEntry = (props: {
isFile: boolean
isDirectory: boolean
name?: string
file?: (callback: FileCallback) => void
createReader?: () => { readEntries: (callback: EntriesCallback) => void }
}): FileSystemEntry => props as unknown as FileSystemEntry
it('should resolve with file array for file entry', async () => {
const mockFile: MockFile = { name: 'test.png' }
const mockEntry = createMockEntry({
const mockEntry = {
isFile: true,
isDirectory: false,
file: (callback: FileCallback) => callback(mockFile),
})
}
const result = await traverseFileEntry(mockEntry)
expect(result).toHaveLength(1)
@ -241,11 +232,11 @@ describe('image-uploader utils', () => {
it('should resolve with file array with prefix for nested file', async () => {
const mockFile: MockFile = { name: 'test.png' }
const mockEntry = createMockEntry({
const mockEntry = {
isFile: true,
isDirectory: false,
file: (callback: FileCallback) => callback(mockFile),
})
}
const result = await traverseFileEntry(mockEntry, 'folder/')
expect(result).toHaveLength(1)
@ -253,24 +244,24 @@ describe('image-uploader utils', () => {
})
it('should resolve empty array for unknown entry type', async () => {
const mockEntry = createMockEntry({
const mockEntry = {
isFile: false,
isDirectory: false,
})
}
const result = await traverseFileEntry(mockEntry)
expect(result).toEqual([])
})
it('should handle directory with no files', async () => {
const mockEntry = createMockEntry({
const mockEntry = {
isFile: false,
isDirectory: true,
name: 'empty-folder',
createReader: () => ({
readEntries: (callback: EntriesCallback) => callback([]),
}),
})
}
const result = await traverseFileEntry(mockEntry)
expect(result).toEqual([])
@ -280,20 +271,20 @@ describe('image-uploader utils', () => {
const mockFile1: MockFile = { name: 'file1.png' }
const mockFile2: MockFile = { name: 'file2.png' }
const mockFileEntry1 = createMockEntry({
const mockFileEntry1 = {
isFile: true,
isDirectory: false,
file: (callback: FileCallback) => callback(mockFile1),
})
}
const mockFileEntry2 = createMockEntry({
const mockFileEntry2 = {
isFile: true,
isDirectory: false,
file: (callback: FileCallback) => callback(mockFile2),
})
}
let readCount = 0
const mockEntry = createMockEntry({
const mockEntry = {
isFile: false,
isDirectory: true,
name: 'folder',
@ -301,14 +292,14 @@ describe('image-uploader utils', () => {
readEntries: (callback: EntriesCallback) => {
if (readCount === 0) {
readCount++
callback([mockFileEntry1, mockFileEntry2])
callback([mockFileEntry1, mockFileEntry2] as unknown as FileSystemEntry[])
}
else {
callback([])
}
},
}),
})
}
const result = await traverseFileEntry(mockEntry)
expect(result).toHaveLength(2)

View File

@ -18,17 +18,17 @@ type FileWithPath = {
relativePath?: string
} & File
export const traverseFileEntry = (entry: FileSystemEntry, prefix = ''): Promise<FileWithPath[]> => {
export const traverseFileEntry = (entry: any, prefix = ''): Promise<FileWithPath[]> => {
return new Promise((resolve) => {
if (entry.isFile) {
(entry as FileSystemFileEntry).file((file: FileWithPath) => {
entry.file((file: FileWithPath) => {
file.relativePath = `${prefix}${file.name}`
resolve([file])
})
}
else if (entry.isDirectory) {
const reader = (entry as FileSystemDirectoryEntry).createReader()
const entries: FileSystemEntry[] = []
const reader = entry.createReader()
const entries: any[] = []
const read = () => {
reader.readEntries(async (results: FileSystemEntry[]) => {
if (!results.length) {

View File

@ -1,218 +0,0 @@
'use client'
import { useDebounceFn } from 'ahooks'
import { useRouter } from 'next/navigation'
import { useCallback, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { ToastContext } from '@/app/components/base/toast'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import {
DSLImportMode,
DSLImportStatus,
} from '@/models/app'
import { useImportPipelineDSL, useImportPipelineDSLConfirm } from '@/service/use-pipeline'
export enum CreateFromDSLModalTab {
FROM_FILE = 'from-file',
FROM_URL = 'from-url',
}
export type UseDSLImportOptions = {
activeTab?: CreateFromDSLModalTab
dslUrl?: string
onSuccess?: () => void
onClose?: () => void
}
export type DSLVersions = {
importedVersion: string
systemVersion: string
}
export const useDSLImport = ({
activeTab = CreateFromDSLModalTab.FROM_FILE,
dslUrl = '',
onSuccess,
onClose,
}: UseDSLImportOptions) => {
const { push } = useRouter()
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const [currentFile, setDSLFile] = useState<File>()
const [fileContent, setFileContent] = useState<string>()
const [currentTab, setCurrentTab] = useState(activeTab)
const [dslUrlValue, setDslUrlValue] = useState(dslUrl)
const [showConfirmModal, setShowConfirmModal] = useState(false)
const [versions, setVersions] = useState<DSLVersions>()
const [importId, setImportId] = useState<string>()
const [isConfirming, setIsConfirming] = useState(false)
const { handleCheckPluginDependencies } = usePluginDependencies()
const isCreatingRef = useRef(false)
const { mutateAsync: importDSL } = useImportPipelineDSL()
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
const readFile = useCallback((file: File) => {
const reader = new FileReader()
reader.onload = (event) => {
const content = event.target?.result
setFileContent(content as string)
}
reader.readAsText(file)
}, [])
const handleFile = useCallback((file?: File) => {
setDSLFile(file)
if (file)
readFile(file)
if (!file)
setFileContent('')
}, [readFile])
const onCreate = useCallback(async () => {
if (currentTab === CreateFromDSLModalTab.FROM_FILE && !currentFile)
return
if (currentTab === CreateFromDSLModalTab.FROM_URL && !dslUrlValue)
return
if (isCreatingRef.current)
return
isCreatingRef.current = true
let response
if (currentTab === CreateFromDSLModalTab.FROM_FILE) {
response = await importDSL({
mode: DSLImportMode.YAML_CONTENT,
yaml_content: fileContent || '',
})
}
if (currentTab === CreateFromDSLModalTab.FROM_URL) {
response = await importDSL({
mode: DSLImportMode.YAML_URL,
yaml_url: dslUrlValue || '',
})
}
if (!response) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
isCreatingRef.current = false
return
}
const { id, status, pipeline_id, dataset_id, imported_dsl_version, current_dsl_version } = response
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
onSuccess?.()
onClose?.()
notify({
type: status === DSLImportStatus.COMPLETED ? 'success' : 'warning',
message: t(status === DSLImportStatus.COMPLETED ? 'creation.successTip' : 'creation.caution', { ns: 'datasetPipeline' }),
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('newApp.appCreateDSLWarning', { ns: 'app' }),
})
if (pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
push(`/datasets/${dataset_id}/pipeline`)
isCreatingRef.current = false
}
else if (status === DSLImportStatus.PENDING) {
setVersions({
importedVersion: imported_dsl_version ?? '',
systemVersion: current_dsl_version ?? '',
})
onClose?.()
setTimeout(() => {
setShowConfirmModal(true)
}, 300)
setImportId(id)
isCreatingRef.current = false
}
else {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
isCreatingRef.current = false
}
}, [
currentTab,
currentFile,
dslUrlValue,
fileContent,
importDSL,
notify,
t,
onSuccess,
onClose,
handleCheckPluginDependencies,
push,
])
const { run: handleCreateApp } = useDebounceFn(onCreate, { wait: 300 })
const onDSLConfirm = useCallback(async () => {
if (!importId)
return
setIsConfirming(true)
const response = await importDSLConfirm(importId)
setIsConfirming(false)
if (!response) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
return
}
const { status, pipeline_id, dataset_id } = response
if (status === DSLImportStatus.COMPLETED) {
onSuccess?.()
setShowConfirmModal(false)
notify({
type: 'success',
message: t('creation.successTip', { ns: 'datasetPipeline' }),
})
if (pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
push(`/datasets/${dataset_id}/pipeline`)
}
else if (status === DSLImportStatus.FAILED) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
}
}, [importId, importDSLConfirm, notify, t, onSuccess, handleCheckPluginDependencies, push])
const handleCancelConfirm = useCallback(() => {
setShowConfirmModal(false)
}, [])
const buttonDisabled = useMemo(() => {
if (currentTab === CreateFromDSLModalTab.FROM_FILE)
return !currentFile
if (currentTab === CreateFromDSLModalTab.FROM_URL)
return !dslUrlValue
return false
}, [currentTab, currentFile, dslUrlValue])
return {
// State
currentFile,
currentTab,
dslUrlValue,
showConfirmModal,
versions,
buttonDisabled,
isConfirming,
// Actions
setCurrentTab,
setDslUrlValue,
handleFile,
handleCreateApp,
onDSLConfirm,
handleCancelConfirm,
}
}

View File

@ -1,18 +1,24 @@
'use client'
import { useKeyPress } from 'ahooks'
import { useDebounceFn, useKeyPress } from 'ahooks'
import { noop } from 'es-toolkit/function'
import { useRouter } from 'next/navigation'
import { useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import Button from '@/app/components/base/button'
import Input from '@/app/components/base/input'
import Modal from '@/app/components/base/modal'
import DSLConfirmModal from './dsl-confirm-modal'
import { ToastContext } from '@/app/components/base/toast'
import { usePluginDependencies } from '@/app/components/workflow/plugin-dependency/hooks'
import {
DSLImportMode,
DSLImportStatus,
} from '@/models/app'
import { useImportPipelineDSL, useImportPipelineDSLConfirm } from '@/service/use-pipeline'
import Header from './header'
import { CreateFromDSLModalTab, useDSLImport } from './hooks/use-dsl-import'
import Tab from './tab'
import Uploader from './uploader'
export { CreateFromDSLModalTab }
type CreateFromDSLModalProps = {
show: boolean
onSuccess?: () => void
@ -21,6 +27,11 @@ type CreateFromDSLModalProps = {
dslUrl?: string
}
export enum CreateFromDSLModalTab {
FROM_FILE = 'from-file',
FROM_URL = 'from-url',
}
const CreateFromDSLModal = ({
show,
onSuccess,
@ -28,34 +39,150 @@ const CreateFromDSLModal = ({
activeTab = CreateFromDSLModalTab.FROM_FILE,
dslUrl = '',
}: CreateFromDSLModalProps) => {
const { push } = useRouter()
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const [currentFile, setDSLFile] = useState<File>()
const [fileContent, setFileContent] = useState<string>()
const [currentTab, setCurrentTab] = useState(activeTab)
const [dslUrlValue, setDslUrlValue] = useState(dslUrl)
const [showErrorModal, setShowErrorModal] = useState(false)
const [versions, setVersions] = useState<{ importedVersion: string, systemVersion: string }>()
const [importId, setImportId] = useState<string>()
const { handleCheckPluginDependencies } = usePluginDependencies()
const {
currentFile,
currentTab,
dslUrlValue,
showConfirmModal,
versions,
buttonDisabled,
isConfirming,
setCurrentTab,
setDslUrlValue,
handleFile,
handleCreateApp,
onDSLConfirm,
handleCancelConfirm,
} = useDSLImport({
activeTab,
dslUrl,
onSuccess,
onClose,
})
const readFile = (file: File) => {
const reader = new FileReader()
reader.onload = function (event) {
const content = event.target?.result
setFileContent(content as string)
}
reader.readAsText(file)
}
const handleFile = (file?: File) => {
setDSLFile(file)
if (file)
readFile(file)
if (!file)
setFileContent('')
}
const isCreatingRef = useRef(false)
const { mutateAsync: importDSL } = useImportPipelineDSL()
const onCreate = async () => {
if (currentTab === CreateFromDSLModalTab.FROM_FILE && !currentFile)
return
if (currentTab === CreateFromDSLModalTab.FROM_URL && !dslUrlValue)
return
if (isCreatingRef.current)
return
isCreatingRef.current = true
let response
if (currentTab === CreateFromDSLModalTab.FROM_FILE) {
response = await importDSL({
mode: DSLImportMode.YAML_CONTENT,
yaml_content: fileContent || '',
})
}
if (currentTab === CreateFromDSLModalTab.FROM_URL) {
response = await importDSL({
mode: DSLImportMode.YAML_URL,
yaml_url: dslUrlValue || '',
})
}
if (!response) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
isCreatingRef.current = false
return
}
const { id, status, pipeline_id, dataset_id, imported_dsl_version, current_dsl_version } = response
if (status === DSLImportStatus.COMPLETED || status === DSLImportStatus.COMPLETED_WITH_WARNINGS) {
if (onSuccess)
onSuccess()
if (onClose)
onClose()
notify({
type: status === DSLImportStatus.COMPLETED ? 'success' : 'warning',
message: t(status === DSLImportStatus.COMPLETED ? 'creation.successTip' : 'creation.caution', { ns: 'datasetPipeline' }),
children: status === DSLImportStatus.COMPLETED_WITH_WARNINGS && t('newApp.appCreateDSLWarning', { ns: 'app' }),
})
if (pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
push(`/datasets/${dataset_id}/pipeline`)
isCreatingRef.current = false
}
else if (status === DSLImportStatus.PENDING) {
setVersions({
importedVersion: imported_dsl_version ?? '',
systemVersion: current_dsl_version ?? '',
})
if (onClose)
onClose()
setTimeout(() => {
setShowErrorModal(true)
}, 300)
setImportId(id)
isCreatingRef.current = false
}
else {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
isCreatingRef.current = false
}
}
const { run: handleCreateApp } = useDebounceFn(onCreate, { wait: 300 })
useKeyPress('esc', () => {
if (show && !showConfirmModal)
if (show && !showErrorModal)
onClose()
})
const { mutateAsync: importDSLConfirm } = useImportPipelineDSLConfirm()
const onDSLConfirm = async () => {
if (!importId)
return
const response = await importDSLConfirm(importId)
if (!response) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
return
}
const { status, pipeline_id, dataset_id } = response
if (status === DSLImportStatus.COMPLETED) {
if (onSuccess)
onSuccess()
if (onClose)
onClose()
notify({
type: 'success',
message: t('creation.successTip', { ns: 'datasetPipeline' }),
})
if (pipeline_id)
await handleCheckPluginDependencies(pipeline_id, true)
push(`datasets/${dataset_id}/pipeline`)
}
else if (status === DSLImportStatus.FAILED) {
notify({ type: 'error', message: t('creation.errorTip', { ns: 'datasetPipeline' }) })
}
}
const buttonDisabled = useMemo(() => {
if (currentTab === CreateFromDSLModalTab.FROM_FILE)
return !currentFile
if (currentTab === CreateFromDSLModalTab.FROM_URL)
return !dslUrlValue
return false
}, [currentTab, currentFile, dslUrlValue])
return (
<>
<Modal
@ -69,25 +196,29 @@ const CreateFromDSLModal = ({
setCurrentTab={setCurrentTab}
/>
<div className="px-6 py-4">
{currentTab === CreateFromDSLModalTab.FROM_FILE && (
<Uploader
className="mt-0"
file={currentFile}
updateFile={handleFile}
/>
)}
{currentTab === CreateFromDSLModalTab.FROM_URL && (
<div>
<div className="system-md-semibold leading6 mb-1 text-text-secondary">
DSL URL
</div>
<Input
placeholder={t('importFromDSLUrlPlaceholder', { ns: 'app' }) || ''}
value={dslUrlValue}
onChange={e => setDslUrlValue(e.target.value)}
{
currentTab === CreateFromDSLModalTab.FROM_FILE && (
<Uploader
className="mt-0"
file={currentFile}
updateFile={handleFile}
/>
</div>
)}
)
}
{
currentTab === CreateFromDSLModalTab.FROM_URL && (
<div>
<div className="system-md-semibold leading6 mb-1 text-text-secondary">
DSL URL
</div>
<Input
placeholder={t('importFromDSLUrlPlaceholder', { ns: 'app' }) || ''}
value={dslUrlValue}
onChange={e => setDslUrlValue(e.target.value)}
/>
</div>
)
}
</div>
<div className="flex justify-end gap-x-2 p-6 pt-5">
<Button onClick={onClose}>
@ -103,14 +234,32 @@ const CreateFromDSLModal = ({
</Button>
</div>
</Modal>
{showConfirmModal && (
<DSLConfirmModal
versions={versions}
onCancel={handleCancelConfirm}
onConfirm={onDSLConfirm}
confirmDisabled={isConfirming}
/>
)}
<Modal
isShow={showErrorModal}
onClose={() => setShowErrorModal(false)}
className="w-[480px]"
>
<div className="flex flex-col items-start gap-2 self-stretch pb-4">
<div className="title-2xl-semi-bold text-text-primary">{t('newApp.appCreateDSLErrorTitle', { ns: 'app' })}</div>
<div className="system-md-regular flex grow flex-col text-text-secondary">
<div>{t('newApp.appCreateDSLErrorPart1', { ns: 'app' })}</div>
<div>{t('newApp.appCreateDSLErrorPart2', { ns: 'app' })}</div>
<br />
<div>
{t('newApp.appCreateDSLErrorPart3', { ns: 'app' })}
<span className="system-md-medium">{versions?.importedVersion}</span>
</div>
<div>
{t('newApp.appCreateDSLErrorPart4', { ns: 'app' })}
<span className="system-md-medium">{versions?.systemVersion}</span>
</div>
</div>
</div>
<div className="flex items-start justify-end gap-2 self-stretch pt-6">
<Button variant="secondary" onClick={() => setShowErrorModal(false)}>{t('newApp.Cancel', { ns: 'app' })}</Button>
<Button variant="primary" destructive onClick={onDSLConfirm}>{t('newApp.Confirm', { ns: 'app' })}</Button>
</div>
</Modal>
</>
)
}

View File

@ -1,334 +0,0 @@
import type { FileListItemProps } from './file-list-item'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { PROGRESS_COMPLETE, PROGRESS_ERROR, PROGRESS_NOT_STARTED } from '../constants'
import FileListItem from './file-list-item'
// Mock theme hook - can be changed per test
let mockTheme = 'light'
vi.mock('@/hooks/use-theme', () => ({
default: () => ({ theme: mockTheme }),
}))
// Mock theme types
vi.mock('@/types/app', () => ({
Theme: { dark: 'dark', light: 'light' },
}))
// Mock SimplePieChart with dynamic import handling
vi.mock('next/dynamic', () => ({
default: () => {
const DynamicComponent = ({ percentage, stroke, fill }: { percentage: number, stroke: string, fill: string }) => (
<div data-testid="pie-chart" data-percentage={percentage} data-stroke={stroke} data-fill={fill}>
Pie Chart:
{' '}
{percentage}
%
</div>
)
DynamicComponent.displayName = 'SimplePieChart'
return DynamicComponent
},
}))
// Mock DocumentFileIcon
vi.mock('@/app/components/datasets/common/document-file-icon', () => ({
default: ({ name, extension, size }: { name: string, extension: string, size: string }) => (
<div data-testid="document-icon" data-name={name} data-extension={extension} data-size={size}>
Document Icon
</div>
),
}))
describe('FileListItem', () => {
const createMockFile = (overrides: Partial<File> = {}): File => ({
name: 'test-document.pdf',
size: 1024 * 100, // 100KB
type: 'application/pdf',
lastModified: Date.now(),
...overrides,
} as File)
const createMockFileItem = (overrides: Partial<FileItem> = {}): FileItem => ({
fileID: 'file-123',
file: createMockFile(overrides.file as Partial<File>),
progress: PROGRESS_NOT_STARTED,
...overrides,
})
const defaultProps: FileListItemProps = {
fileItem: createMockFileItem(),
onPreview: vi.fn(),
onRemove: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
mockTheme = 'light'
})
describe('rendering', () => {
it('should render the file item container', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('flex', 'h-12', 'items-center', 'rounded-lg')
})
it('should render document icon with correct props', () => {
render(<FileListItem {...defaultProps} />)
const icon = screen.getByTestId('document-icon')
expect(icon).toBeInTheDocument()
expect(icon).toHaveAttribute('data-name', 'test-document.pdf')
expect(icon).toHaveAttribute('data-extension', 'pdf')
expect(icon).toHaveAttribute('data-size', 'xl')
})
it('should render file name', () => {
render(<FileListItem {...defaultProps} />)
expect(screen.getByText('test-document.pdf')).toBeInTheDocument()
})
it('should render file extension in uppercase via CSS class', () => {
render(<FileListItem {...defaultProps} />)
const extensionSpan = screen.getByText('pdf')
expect(extensionSpan).toBeInTheDocument()
expect(extensionSpan).toHaveClass('uppercase')
})
it('should render file size', () => {
render(<FileListItem {...defaultProps} />)
// Default mock file is 100KB (1024 * 100 bytes)
expect(screen.getByText('100.00 KB')).toBeInTheDocument()
})
it('should render delete button', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const deleteButton = container.querySelector('.cursor-pointer')
expect(deleteButton).toBeInTheDocument()
})
})
describe('progress states', () => {
it('should show progress chart when uploading (0-99)', () => {
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toBeInTheDocument()
expect(pieChart).toHaveAttribute('data-percentage', '50')
})
it('should show progress chart at 0%', () => {
const fileItem = createMockFileItem({ progress: 0 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-percentage', '0')
})
it('should not show progress chart when complete (100)', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_COMPLETE })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
it('should not show progress chart when not started (-1)', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_NOT_STARTED })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('error state', () => {
it('should show error indicator when progress is PROGRESS_ERROR', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_ERROR })
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const errorIndicator = container.querySelector('.text-text-destructive')
expect(errorIndicator).toBeInTheDocument()
})
it('should not show error indicator when not in error state', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const errorIndicator = container.querySelector('.text-text-destructive')
expect(errorIndicator).not.toBeInTheDocument()
})
})
describe('theme handling', () => {
it('should use correct chart color for light theme', () => {
mockTheme = 'light'
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-stroke', '#296dff')
expect(pieChart).toHaveAttribute('data-fill', '#296dff')
})
it('should use correct chart color for dark theme', () => {
mockTheme = 'dark'
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-stroke', '#5289ff')
expect(pieChart).toHaveAttribute('data-fill', '#5289ff')
})
})
describe('event handlers', () => {
it('should call onPreview when item is clicked with file id', () => {
const onPreview = vi.fn()
const fileItem = createMockFileItem({
file: createMockFile({ id: 'uploaded-id' } as Partial<File>),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} onPreview={onPreview} />)
const item = screen.getByText('test-document.pdf').closest('[class*="flex h-12"]')!
fireEvent.click(item)
expect(onPreview).toHaveBeenCalledTimes(1)
expect(onPreview).toHaveBeenCalledWith(fileItem.file)
})
it('should not call onPreview when file has no id', () => {
const onPreview = vi.fn()
const fileItem = createMockFileItem()
render(<FileListItem {...defaultProps} fileItem={fileItem} onPreview={onPreview} />)
const item = screen.getByText('test-document.pdf').closest('[class*="flex h-12"]')!
fireEvent.click(item)
expect(onPreview).not.toHaveBeenCalled()
})
it('should call onRemove when delete button is clicked', () => {
const onRemove = vi.fn()
const fileItem = createMockFileItem()
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} onRemove={onRemove} />)
const deleteButton = container.querySelector('.cursor-pointer')!
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledTimes(1)
expect(onRemove).toHaveBeenCalledWith('file-123')
})
it('should stop propagation when delete button is clicked', () => {
const onPreview = vi.fn()
const onRemove = vi.fn()
const fileItem = createMockFileItem({
file: createMockFile({ id: 'uploaded-id' } as Partial<File>),
})
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} onPreview={onPreview} onRemove={onRemove} />)
const deleteButton = container.querySelector('.cursor-pointer')!
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledTimes(1)
expect(onPreview).not.toHaveBeenCalled()
})
})
describe('file type handling', () => {
it('should handle files with multiple dots in name', () => {
const fileItem = createMockFileItem({
file: createMockFile({ name: 'my.document.file.docx' }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('my.document.file.docx')).toBeInTheDocument()
expect(screen.getByText('docx')).toBeInTheDocument()
})
it('should handle files without extension', () => {
const fileItem = createMockFileItem({
file: createMockFile({ name: 'README' }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
// File name appears once, and extension area shows empty string
expect(screen.getByText('README')).toBeInTheDocument()
})
it('should handle various file extensions', () => {
const extensions = ['txt', 'md', 'json', 'csv', 'xlsx']
extensions.forEach((ext) => {
const fileItem = createMockFileItem({
file: createMockFile({ name: `file.${ext}` }),
})
const { unmount } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText(ext)).toBeInTheDocument()
unmount()
})
})
})
describe('file size display', () => {
it('should display size in KB for small files', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 5 * 1024 }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('5.00 KB')).toBeInTheDocument()
})
it('should display size in MB for larger files', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 5 * 1024 * 1024 }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('5.00 MB')).toBeInTheDocument()
})
})
describe('upload progress values', () => {
it('should show chart at progress 1', () => {
const fileItem = createMockFileItem({ progress: 1 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByTestId('pie-chart')).toBeInTheDocument()
})
it('should show chart at progress 99', () => {
const fileItem = createMockFileItem({ progress: 99 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByTestId('pie-chart')).toHaveAttribute('data-percentage', '99')
})
it('should not show chart at progress 100', () => {
const fileItem = createMockFileItem({ progress: 100 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('styling', () => {
it('should have proper shadow styling', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('shadow-xs')
})
it('should have proper border styling', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('border', 'border-components-panel-border')
})
it('should truncate long file names', () => {
const longFileName = 'this-is-a-very-long-file-name-that-should-be-truncated.pdf'
const fileItem = createMockFileItem({
file: createMockFile({ name: longFileName }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const nameElement = screen.getByText(longFileName)
expect(nameElement).toHaveClass('truncate')
})
})
})

View File

@ -1,89 +0,0 @@
'use client'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { RiDeleteBinLine, RiErrorWarningFill } from '@remixicon/react'
import dynamic from 'next/dynamic'
import { useMemo } from 'react'
import DocumentFileIcon from '@/app/components/datasets/common/document-file-icon'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import { formatFileSize, getFileExtension } from '@/utils/format'
import { PROGRESS_COMPLETE, PROGRESS_ERROR } from '../constants'
const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-chart'), { ssr: false })
export type FileListItemProps = {
fileItem: FileItem
onPreview: (file: File) => void
onRemove: (fileID: string) => void
}
const FileListItem = ({
fileItem,
onPreview,
onRemove,
}: FileListItemProps) => {
const { theme } = useTheme()
const chartColor = useMemo(() => theme === Theme.dark ? '#5289ff' : '#296dff', [theme])
const isUploading = fileItem.progress >= 0 && fileItem.progress < PROGRESS_COMPLETE
const isError = fileItem.progress === PROGRESS_ERROR
const handleClick = () => {
if (fileItem.file?.id)
onPreview(fileItem.file)
}
const handleRemove = (e: React.MouseEvent) => {
e.stopPropagation()
onRemove(fileItem.fileID)
}
return (
<div
onClick={handleClick}
className="flex h-12 max-w-[640px] items-center rounded-lg border border-components-panel-border bg-components-panel-on-panel-item-bg text-xs leading-3 text-text-tertiary shadow-xs"
>
<div className="flex w-12 shrink-0 items-center justify-center">
<DocumentFileIcon
size="xl"
className="shrink-0"
name={fileItem.file.name}
extension={getFileExtension(fileItem.file.name)}
/>
</div>
<div className="flex shrink grow flex-col gap-0.5">
<div className="flex w-full">
<div className="w-0 grow truncate text-sm leading-4 text-text-secondary">
{fileItem.file.name}
</div>
</div>
<div className="w-full truncate leading-3 text-text-tertiary">
<span className="uppercase">{getFileExtension(fileItem.file.name)}</span>
<span className="px-1 text-text-quaternary">·</span>
<span>{formatFileSize(fileItem.file.size)}</span>
</div>
</div>
<div className="flex w-16 shrink-0 items-center justify-end gap-1 pr-3">
{isUploading && (
<SimplePieChart
percentage={fileItem.progress}
stroke={chartColor}
fill={chartColor}
animationDuration={0}
/>
)}
{isError && (
<RiErrorWarningFill className="size-4 text-text-destructive" />
)}
<span
className="flex h-6 w-6 cursor-pointer items-center justify-center"
onClick={handleRemove}
>
<RiDeleteBinLine className="size-4 text-text-tertiary" />
</span>
</div>
</div>
)
}
export default FileListItem

View File

@ -1,210 +0,0 @@
import type { RefObject } from 'react'
import type { UploadDropzoneProps } from './upload-dropzone'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import UploadDropzone from './upload-dropzone'
// Helper to create mock ref objects for testing
const createMockRef = <T,>(value: T | null = null): RefObject<T | null> => ({ current: value })
// Mock react-i18next
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string, options?: Record<string, unknown>) => {
const translations: Record<string, string> = {
'stepOne.uploader.button': 'Drag and drop files, or',
'stepOne.uploader.buttonSingleFile': 'Drag and drop file, or',
'stepOne.uploader.browse': 'Browse',
'stepOne.uploader.tip': 'Supports {{supportTypes}}, Max {{size}}MB each, up to {{batchCount}} files at a time, {{totalCount}} files total',
}
let result = translations[key] || key
if (options && typeof options === 'object') {
Object.entries(options).forEach(([k, v]) => {
result = result.replace(`{{${k}}}`, String(v))
})
}
return result
},
}),
}))
describe('UploadDropzone', () => {
const defaultProps: UploadDropzoneProps = {
dropRef: createMockRef<HTMLDivElement>() as RefObject<HTMLDivElement | null>,
dragRef: createMockRef<HTMLDivElement>() as RefObject<HTMLDivElement | null>,
fileUploaderRef: createMockRef<HTMLInputElement>() as RefObject<HTMLInputElement | null>,
dragging: false,
supportBatchUpload: true,
supportTypesShowNames: 'PDF, DOCX, TXT',
fileUploadConfig: {
file_size_limit: 15,
batch_count_limit: 5,
file_upload_limit: 10,
},
acceptTypes: ['.pdf', '.docx', '.txt'],
onSelectFile: vi.fn(),
onFileChange: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render the dropzone container', () => {
const { container } = render(<UploadDropzone {...defaultProps} />)
const dropzone = container.querySelector('[class*="border-dashed"]')
expect(dropzone).toBeInTheDocument()
})
it('should render hidden file input', () => {
render(<UploadDropzone {...defaultProps} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toBeInTheDocument()
expect(input).toHaveClass('hidden')
expect(input).toHaveAttribute('type', 'file')
})
it('should render upload icon', () => {
render(<UploadDropzone {...defaultProps} />)
const icon = document.querySelector('svg')
expect(icon).toBeInTheDocument()
})
it('should render browse label when extensions are allowed', () => {
render(<UploadDropzone {...defaultProps} />)
expect(screen.getByText('Browse')).toBeInTheDocument()
})
it('should not render browse label when no extensions allowed', () => {
render(<UploadDropzone {...defaultProps} acceptTypes={[]} />)
expect(screen.queryByText('Browse')).not.toBeInTheDocument()
})
it('should render file size and count limits', () => {
render(<UploadDropzone {...defaultProps} />)
const tipText = screen.getByText(/Supports.*Max.*15MB/i)
expect(tipText).toBeInTheDocument()
})
})
describe('file input configuration', () => {
it('should allow multiple files when supportBatchUpload is true', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={true} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('multiple')
})
it('should not allow multiple files when supportBatchUpload is false', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={false} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).not.toHaveAttribute('multiple')
})
it('should set accept attribute with correct types', () => {
render(<UploadDropzone {...defaultProps} acceptTypes={['.pdf', '.docx']} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('accept', '.pdf,.docx')
})
})
describe('text content', () => {
it('should show batch upload text when supportBatchUpload is true', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={true} />)
expect(screen.getByText(/Drag and drop files/i)).toBeInTheDocument()
})
it('should show single file text when supportBatchUpload is false', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={false} />)
expect(screen.getByText(/Drag and drop file/i)).toBeInTheDocument()
})
})
describe('dragging state', () => {
it('should apply dragging styles when dragging is true', () => {
const { container } = render(<UploadDropzone {...defaultProps} dragging={true} />)
const dropzone = container.querySelector('[class*="border-components-dropzone-border-accent"]')
expect(dropzone).toBeInTheDocument()
})
it('should render drag overlay when dragging', () => {
const dragRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dragging={true} dragRef={dragRef as RefObject<HTMLDivElement | null>} />)
const overlay = document.querySelector('.absolute.left-0.top-0')
expect(overlay).toBeInTheDocument()
})
it('should not render drag overlay when not dragging', () => {
render(<UploadDropzone {...defaultProps} dragging={false} />)
const overlay = document.querySelector('.absolute.left-0.top-0')
expect(overlay).not.toBeInTheDocument()
})
})
describe('event handlers', () => {
it('should call onSelectFile when browse label is clicked', () => {
const onSelectFile = vi.fn()
render(<UploadDropzone {...defaultProps} onSelectFile={onSelectFile} />)
const browseLabel = screen.getByText('Browse')
fireEvent.click(browseLabel)
expect(onSelectFile).toHaveBeenCalledTimes(1)
})
it('should call onFileChange when files are selected', () => {
const onFileChange = vi.fn()
render(<UploadDropzone {...defaultProps} onFileChange={onFileChange} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
const file = new File(['content'], 'test.pdf', { type: 'application/pdf' })
fireEvent.change(input, { target: { files: [file] } })
expect(onFileChange).toHaveBeenCalledTimes(1)
})
})
describe('refs', () => {
it('should attach dropRef to drop container', () => {
const dropRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dropRef={dropRef as RefObject<HTMLDivElement | null>} />)
expect(dropRef.current).toBeInstanceOf(HTMLDivElement)
})
it('should attach fileUploaderRef to input element', () => {
const fileUploaderRef = createMockRef<HTMLInputElement>()
render(<UploadDropzone {...defaultProps} fileUploaderRef={fileUploaderRef as RefObject<HTMLInputElement | null>} />)
expect(fileUploaderRef.current).toBeInstanceOf(HTMLInputElement)
})
it('should attach dragRef to overlay when dragging', () => {
const dragRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dragging={true} dragRef={dragRef as RefObject<HTMLDivElement | null>} />)
expect(dragRef.current).toBeInstanceOf(HTMLDivElement)
})
})
describe('styling', () => {
it('should have base dropzone styling', () => {
const { container } = render(<UploadDropzone {...defaultProps} />)
const dropzone = container.querySelector('[class*="border-dashed"]')
expect(dropzone).toBeInTheDocument()
expect(dropzone).toHaveClass('rounded-xl')
})
it('should have cursor-pointer on browse label', () => {
render(<UploadDropzone {...defaultProps} />)
const browseLabel = screen.getByText('Browse')
expect(browseLabel).toHaveClass('cursor-pointer')
})
})
describe('accessibility', () => {
it('should have an accessible file input', () => {
render(<UploadDropzone {...defaultProps} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('id', 'fileUploader')
})
})
})

View File

@ -1,84 +0,0 @@
'use client'
import type { RefObject } from 'react'
import type { FileUploadConfig } from '../hooks/use-file-upload'
import { RiUploadCloud2Line } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import { cn } from '@/utils/classnames'
export type UploadDropzoneProps = {
dropRef: RefObject<HTMLDivElement | null>
dragRef: RefObject<HTMLDivElement | null>
fileUploaderRef: RefObject<HTMLInputElement | null>
dragging: boolean
supportBatchUpload: boolean
supportTypesShowNames: string
fileUploadConfig: FileUploadConfig
acceptTypes: string[]
onSelectFile: () => void
onFileChange: (e: React.ChangeEvent<HTMLInputElement>) => void
}
const UploadDropzone = ({
dropRef,
dragRef,
fileUploaderRef,
dragging,
supportBatchUpload,
supportTypesShowNames,
fileUploadConfig,
acceptTypes,
onSelectFile,
onFileChange,
}: UploadDropzoneProps) => {
const { t } = useTranslation()
return (
<>
<input
ref={fileUploaderRef}
id="fileUploader"
className="hidden"
type="file"
multiple={supportBatchUpload}
accept={acceptTypes.join(',')}
onChange={onFileChange}
/>
<div
ref={dropRef}
className={cn(
'relative mb-2 box-border flex min-h-20 max-w-[640px] flex-col items-center justify-center gap-1 rounded-xl border border-dashed border-components-dropzone-border bg-components-dropzone-bg px-4 py-3 text-xs leading-4 text-text-tertiary',
dragging && 'border-components-dropzone-border-accent bg-components-dropzone-bg-accent',
)}
>
<div className="flex min-h-5 items-center justify-center text-sm leading-4 text-text-secondary">
<RiUploadCloud2Line className="mr-2 size-5" />
<span>
{supportBatchUpload
? t('stepOne.uploader.button', { ns: 'datasetCreation' })
: t('stepOne.uploader.buttonSingleFile', { ns: 'datasetCreation' })}
{acceptTypes.length > 0 && (
<label
className="ml-1 cursor-pointer text-text-accent"
onClick={onSelectFile}
>
{t('stepOne.uploader.browse', { ns: 'datasetCreation' })}
</label>
)}
</span>
</div>
<div>
{t('stepOne.uploader.tip', {
ns: 'datasetCreation',
size: fileUploadConfig.file_size_limit,
supportTypes: supportTypesShowNames,
batchCount: fileUploadConfig.batch_count_limit,
totalCount: fileUploadConfig.file_upload_limit,
})}
</div>
{dragging && <div ref={dragRef} className="absolute left-0 top-0 h-full w-full" />}
</div>
</>
)
}
export default UploadDropzone

View File

@ -1,3 +0,0 @@
export const PROGRESS_NOT_STARTED = -1
export const PROGRESS_ERROR = -2
export const PROGRESS_COMPLETE = 100

View File

@ -1,921 +0,0 @@
import type { ReactNode } from 'react'
import type { CustomFile, FileItem } from '@/models/datasets'
import { act, render, renderHook, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { ToastContext } from '@/app/components/base/toast'
import { PROGRESS_COMPLETE, PROGRESS_ERROR, PROGRESS_NOT_STARTED } from '../constants'
// Import after mocks
import { useFileUpload } from './use-file-upload'
// Mock notify function
const mockNotify = vi.fn()
const mockClose = vi.fn()
// Mock ToastContext
vi.mock('use-context-selector', async () => {
const actual = await vi.importActual<typeof import('use-context-selector')>('use-context-selector')
return {
...actual,
useContext: vi.fn(() => ({ notify: mockNotify, close: mockClose })),
}
})
// Mock upload service
const mockUpload = vi.fn()
vi.mock('@/service/base', () => ({
upload: (...args: unknown[]) => mockUpload(...args),
}))
// Mock file upload config
const mockFileUploadConfig = {
file_size_limit: 15,
batch_count_limit: 5,
file_upload_limit: 10,
}
const mockSupportTypes = {
allowed_extensions: ['pdf', 'docx', 'txt', 'md'],
}
vi.mock('@/service/use-common', () => ({
useFileUploadConfig: () => ({ data: mockFileUploadConfig }),
useFileSupportTypes: () => ({ data: mockSupportTypes }),
}))
// Mock i18n
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => key,
}),
}))
// Mock locale
vi.mock('@/context/i18n', () => ({
useLocale: () => 'en-US',
}))
vi.mock('@/i18n-config/language', () => ({
LanguagesSupported: ['en-US', 'zh-Hans'],
}))
// Mock config
vi.mock('@/config', () => ({
IS_CE_EDITION: false,
}))
// Mock file upload error message
vi.mock('@/app/components/base/file-uploader/utils', () => ({
getFileUploadErrorMessage: (_e: unknown, defaultMsg: string) => defaultMsg,
}))
const createWrapper = () => {
return ({ children }: { children: ReactNode }) => (
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
{children}
</ToastContext.Provider>
)
}
describe('useFileUpload', () => {
const defaultOptions = {
fileList: [] as FileItem[],
prepareFileList: vi.fn(),
onFileUpdate: vi.fn(),
onFileListUpdate: vi.fn(),
onPreview: vi.fn(),
supportBatchUpload: true,
}
beforeEach(() => {
vi.clearAllMocks()
mockUpload.mockReset()
// Default mock to return a resolved promise to avoid unhandled rejections
mockUpload.mockResolvedValue({ id: 'default-id' })
mockNotify.mockReset()
})
describe('initialization', () => {
it('should initialize with default values', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
expect(result.current.dragging).toBe(false)
expect(result.current.hideUpload).toBe(false)
expect(result.current.dropRef.current).toBeNull()
expect(result.current.dragRef.current).toBeNull()
expect(result.current.fileUploaderRef.current).toBeNull()
})
it('should set hideUpload true when not batch upload and has files', () => {
const { result } = renderHook(
() => useFileUpload({
...defaultOptions,
supportBatchUpload: false,
fileList: [{ fileID: 'file-1', file: {} as CustomFile, progress: 100 }],
}),
{ wrapper: createWrapper() },
)
expect(result.current.hideUpload).toBe(true)
})
it('should compute acceptTypes correctly', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
expect(result.current.acceptTypes).toEqual(['.pdf', '.docx', '.txt', '.md'])
})
it('should compute supportTypesShowNames correctly', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
expect(result.current.supportTypesShowNames).toContain('PDF')
expect(result.current.supportTypesShowNames).toContain('DOCX')
expect(result.current.supportTypesShowNames).toContain('TXT')
// 'md' is mapped to 'markdown' in the extensionMap
expect(result.current.supportTypesShowNames).toContain('MARKDOWN')
})
it('should set batch limit to 1 when not batch upload', () => {
const { result } = renderHook(
() => useFileUpload({
...defaultOptions,
supportBatchUpload: false,
}),
{ wrapper: createWrapper() },
)
expect(result.current.fileUploadConfig.batch_count_limit).toBe(1)
expect(result.current.fileUploadConfig.file_upload_limit).toBe(1)
})
})
describe('selectHandle', () => {
it('should trigger click on file input', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
const mockClick = vi.fn()
const mockInput = { click: mockClick } as unknown as HTMLInputElement
Object.defineProperty(result.current.fileUploaderRef, 'current', {
value: mockInput,
writable: true,
})
act(() => {
result.current.selectHandle()
})
expect(mockClick).toHaveBeenCalled()
})
it('should do nothing when file input ref is null', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
expect(() => {
act(() => {
result.current.selectHandle()
})
}).not.toThrow()
})
})
describe('handlePreview', () => {
it('should call onPreview when file has id', () => {
const onPreview = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onPreview }),
{ wrapper: createWrapper() },
)
const mockFile = { id: 'file-123', name: 'test.pdf', size: 1024 } as CustomFile
act(() => {
result.current.handlePreview(mockFile)
})
expect(onPreview).toHaveBeenCalledWith(mockFile)
})
it('should not call onPreview when file has no id', () => {
const onPreview = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onPreview }),
{ wrapper: createWrapper() },
)
const mockFile = { name: 'test.pdf', size: 1024 } as CustomFile
act(() => {
result.current.handlePreview(mockFile)
})
expect(onPreview).not.toHaveBeenCalled()
})
})
describe('removeFile', () => {
it('should call onFileListUpdate with filtered list', () => {
const onFileListUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileListUpdate }),
{ wrapper: createWrapper() },
)
act(() => {
result.current.removeFile('file-to-remove')
})
expect(onFileListUpdate).toHaveBeenCalled()
})
it('should clear file input value', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
const mockInput = { value: 'some-file' } as HTMLInputElement
Object.defineProperty(result.current.fileUploaderRef, 'current', {
value: mockInput,
writable: true,
})
act(() => {
result.current.removeFile('file-123')
})
expect(mockInput.value).toBe('')
})
})
describe('fileChangeHandle', () => {
it('should handle valid files', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, prepareFileList }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(prepareFileList).toHaveBeenCalled()
})
})
it('should limit files to batch count', () => {
const prepareFileList = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, prepareFileList }),
{ wrapper: createWrapper() },
)
const files = Array.from({ length: 10 }, (_, i) =>
new File(['content'], `file${i}.pdf`, { type: 'application/pdf' }))
const event = {
target: { files },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
// Should be called with at most batch_count_limit files
if (prepareFileList.mock.calls.length > 0) {
const calledFiles = prepareFileList.mock.calls[0][0]
expect(calledFiles.length).toBeLessThanOrEqual(mockFileUploadConfig.batch_count_limit)
}
})
it('should reject invalid file types', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.exe', { type: 'application/x-msdownload' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
it('should reject files exceeding size limit', () => {
const { result } = renderHook(
() => useFileUpload(defaultOptions),
{ wrapper: createWrapper() },
)
// Create a file larger than the limit (15MB)
const largeFile = new File([new ArrayBuffer(20 * 1024 * 1024)], 'large.pdf', { type: 'application/pdf' })
const event = {
target: { files: [largeFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
it('should handle null files', () => {
const prepareFileList = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, prepareFileList }),
{ wrapper: createWrapper() },
)
const event = {
target: { files: null },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(prepareFileList).not.toHaveBeenCalled()
})
})
describe('drag and drop handlers', () => {
const TestDropzone = ({ options }: { options: typeof defaultOptions }) => {
const {
dropRef,
dragRef,
dragging,
} = useFileUpload(options)
return (
<div>
<div ref={dropRef} data-testid="dropzone">
{dragging && <div ref={dragRef} data-testid="drag-overlay" />}
</div>
<span data-testid="dragging">{String(dragging)}</span>
</div>
)
}
it('should set dragging true on dragenter', async () => {
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={defaultOptions} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
dropzone.dispatchEvent(dragEnterEvent)
})
expect(getByTestId('dragging').textContent).toBe('true')
})
it('should handle dragover event', async () => {
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={defaultOptions} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dragOverEvent = new Event('dragover', { bubbles: true, cancelable: true })
dropzone.dispatchEvent(dragOverEvent)
})
expect(dropzone).toBeInTheDocument()
})
it('should set dragging false on dragleave from drag overlay', async () => {
const { getByTestId, queryByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={defaultOptions} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
dropzone.dispatchEvent(dragEnterEvent)
})
expect(getByTestId('dragging').textContent).toBe('true')
const dragOverlay = queryByTestId('drag-overlay')
if (dragOverlay) {
await act(async () => {
const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true })
Object.defineProperty(dragLeaveEvent, 'target', { value: dragOverlay })
dropzone.dispatchEvent(dragLeaveEvent)
})
}
})
it('should handle drop with files', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => mockFile,
webkitGetAsEntry: () => null,
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
expect(prepareFileList).toHaveBeenCalled()
})
})
it('should handle drop without dataTransfer', async () => {
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', { value: null })
dropzone.dispatchEvent(dropEvent)
})
expect(prepareFileList).not.toHaveBeenCalled()
})
it('should limit to single file on drop when supportBatchUpload is false', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, supportBatchUpload: false, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
const files = [
new File(['content1'], 'test1.pdf', { type: 'application/pdf' }),
new File(['content2'], 'test2.pdf', { type: 'application/pdf' }),
]
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: files.map(f => ({
getAsFile: () => f,
webkitGetAsEntry: () => null,
})),
},
})
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
if (prepareFileList.mock.calls.length > 0) {
const calledFiles = prepareFileList.mock.calls[0][0]
expect(calledFiles.length).toBe(1)
}
})
})
it('should handle drop with FileSystemFileEntry', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => mockFile,
webkitGetAsEntry: () => ({
isFile: true,
isDirectory: false,
file: (callback: (file: File) => void) => callback(mockFile),
}),
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
expect(prepareFileList).toHaveBeenCalled()
})
})
it('should handle drop with FileSystemDirectoryEntry', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const prepareFileList = vi.fn()
const mockFile = new File(['content'], 'nested.pdf', { type: 'application/pdf' })
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
let callCount = 0
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => null,
webkitGetAsEntry: () => ({
isFile: false,
isDirectory: true,
name: 'folder',
createReader: () => ({
readEntries: (callback: (entries: Array<{ isFile: boolean, isDirectory: boolean, name?: string, file?: (cb: (f: File) => void) => void }>) => void) => {
// First call returns file entry, second call returns empty (signals end)
if (callCount === 0) {
callCount++
callback([{
isFile: true,
isDirectory: false,
name: 'nested.pdf',
file: (cb: (f: File) => void) => cb(mockFile),
}])
}
else {
callback([])
}
},
}),
}),
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
expect(prepareFileList).toHaveBeenCalled()
})
})
it('should handle drop with empty directory', async () => {
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => null,
webkitGetAsEntry: () => ({
isFile: false,
isDirectory: true,
name: 'empty-folder',
createReader: () => ({
readEntries: (callback: (entries: never[]) => void) => {
callback([])
},
}),
}),
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
// Should not prepare file list if no valid files
await new Promise(resolve => setTimeout(resolve, 100))
})
it('should handle entry that is neither file nor directory', async () => {
const prepareFileList = vi.fn()
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone options={{ ...defaultOptions, prepareFileList }} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: DataTransfer | null }
Object.defineProperty(dropEvent, 'dataTransfer', {
value: {
items: [{
getAsFile: () => null,
webkitGetAsEntry: () => ({
isFile: false,
isDirectory: false,
}),
}],
},
})
dropzone.dispatchEvent(dropEvent)
})
// Should not throw and should handle gracefully
await new Promise(resolve => setTimeout(resolve, 100))
})
})
describe('file upload', () => {
it('should call upload with correct parameters', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id', name: 'test.pdf' })
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockUpload).toHaveBeenCalled()
})
})
it('should update progress during upload', async () => {
let progressCallback: ((e: ProgressEvent) => void) | undefined
mockUpload.mockImplementation(async (options: { onprogress: (e: ProgressEvent) => void }) => {
progressCallback = options.onprogress
return { id: 'uploaded-id' }
})
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockUpload).toHaveBeenCalled()
})
if (progressCallback) {
act(() => {
progressCallback!({
lengthComputable: true,
loaded: 50,
total: 100,
} as ProgressEvent)
})
expect(onFileUpdate).toHaveBeenCalled()
}
})
it('should handle upload error', async () => {
mockUpload.mockRejectedValue(new Error('Upload failed'))
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
})
it('should update file with PROGRESS_COMPLETE on success', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id', name: 'test.pdf' })
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
const completeCalls = onFileUpdate.mock.calls.filter(
([, progress]) => progress === PROGRESS_COMPLETE,
)
expect(completeCalls.length).toBeGreaterThan(0)
})
})
it('should update file with PROGRESS_ERROR on failure', async () => {
mockUpload.mockRejectedValue(new Error('Upload failed'))
const onFileUpdate = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, onFileUpdate }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
const errorCalls = onFileUpdate.mock.calls.filter(
([, progress]) => progress === PROGRESS_ERROR,
)
expect(errorCalls.length).toBeGreaterThan(0)
})
})
})
describe('file count validation', () => {
it('should reject when total files exceed limit', () => {
const existingFiles: FileItem[] = Array.from({ length: 8 }, (_, i) => ({
fileID: `existing-${i}`,
file: { name: `existing-${i}.pdf`, size: 1024 } as CustomFile,
progress: 100,
}))
const { result } = renderHook(
() => useFileUpload({
...defaultOptions,
fileList: existingFiles,
}),
{ wrapper: createWrapper() },
)
const files = Array.from({ length: 5 }, (_, i) =>
new File(['content'], `new-${i}.pdf`, { type: 'application/pdf' }))
const event = {
target: { files },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
})
describe('progress constants', () => {
it('should use PROGRESS_NOT_STARTED for new files', async () => {
mockUpload.mockResolvedValue({ id: 'file-id' })
const prepareFileList = vi.fn()
const { result } = renderHook(
() => useFileUpload({ ...defaultOptions, prepareFileList }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
if (prepareFileList.mock.calls.length > 0) {
const files = prepareFileList.mock.calls[0][0]
expect(files[0].progress).toBe(PROGRESS_NOT_STARTED)
}
})
})
})
})

View File

@ -1,351 +0,0 @@
'use client'
import type { RefObject } from 'react'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { getFileUploadErrorMessage } from '@/app/components/base/file-uploader/utils'
import { ToastContext } from '@/app/components/base/toast'
import { IS_CE_EDITION } from '@/config'
import { useLocale } from '@/context/i18n'
import { LanguagesSupported } from '@/i18n-config/language'
import { upload } from '@/service/base'
import { useFileSupportTypes, useFileUploadConfig } from '@/service/use-common'
import { getFileExtension } from '@/utils/format'
import { PROGRESS_COMPLETE, PROGRESS_ERROR, PROGRESS_NOT_STARTED } from '../constants'
export type FileUploadConfig = {
file_size_limit: number
batch_count_limit: number
file_upload_limit: number
}
export type UseFileUploadOptions = {
fileList: FileItem[]
prepareFileList: (files: FileItem[]) => void
onFileUpdate: (fileItem: FileItem, progress: number, list: FileItem[]) => void
onFileListUpdate?: (files: FileItem[]) => void
onPreview: (file: File) => void
supportBatchUpload?: boolean
/**
* Optional list of allowed file extensions. If not provided, fetches from API.
* Pass this when you need custom extension filtering instead of using the global config.
*/
allowedExtensions?: string[]
}
export type UseFileUploadReturn = {
// Refs
dropRef: RefObject<HTMLDivElement | null>
dragRef: RefObject<HTMLDivElement | null>
fileUploaderRef: RefObject<HTMLInputElement | null>
// State
dragging: boolean
// Config
fileUploadConfig: FileUploadConfig
acceptTypes: string[]
supportTypesShowNames: string
hideUpload: boolean
// Handlers
selectHandle: () => void
fileChangeHandle: (e: React.ChangeEvent<HTMLInputElement>) => void
removeFile: (fileID: string) => void
handlePreview: (file: File) => void
}
type FileWithPath = {
relativePath?: string
} & File
export const useFileUpload = ({
fileList,
prepareFileList,
onFileUpdate,
onFileListUpdate,
onPreview,
supportBatchUpload = false,
allowedExtensions,
}: UseFileUploadOptions): UseFileUploadReturn => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const locale = useLocale()
const [dragging, setDragging] = useState(false)
const dropRef = useRef<HTMLDivElement>(null)
const dragRef = useRef<HTMLDivElement>(null)
const fileUploaderRef = useRef<HTMLInputElement>(null)
const fileListRef = useRef<FileItem[]>([])
const hideUpload = !supportBatchUpload && fileList.length > 0
const { data: fileUploadConfigResponse } = useFileUploadConfig()
const { data: supportFileTypesResponse } = useFileSupportTypes()
// Use provided allowedExtensions or fetch from API
const supportTypes = useMemo(
() => allowedExtensions ?? supportFileTypesResponse?.allowed_extensions ?? [],
[allowedExtensions, supportFileTypesResponse?.allowed_extensions],
)
const supportTypesShowNames = useMemo(() => {
const extensionMap: { [key: string]: string } = {
md: 'markdown',
pptx: 'pptx',
htm: 'html',
xlsx: 'xlsx',
docx: 'docx',
}
return [...supportTypes]
.map(item => extensionMap[item] || item)
.map(item => item.toLowerCase())
.filter((item, index, self) => self.indexOf(item) === index)
.map(item => item.toUpperCase())
.join(locale !== LanguagesSupported[1] ? ', ' : '、 ')
}, [supportTypes, locale])
const acceptTypes = useMemo(() => supportTypes.map((ext: string) => `.${ext}`), [supportTypes])
const fileUploadConfig = useMemo(() => ({
file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15,
batch_count_limit: supportBatchUpload ? (fileUploadConfigResponse?.batch_count_limit ?? 5) : 1,
file_upload_limit: supportBatchUpload ? (fileUploadConfigResponse?.file_upload_limit ?? 5) : 1,
}), [fileUploadConfigResponse, supportBatchUpload])
const isValid = useCallback((file: File) => {
const { size } = file
const ext = `.${getFileExtension(file.name)}`
const isValidType = acceptTypes.includes(ext.toLowerCase())
if (!isValidType)
notify({ type: 'error', message: t('stepOne.uploader.validation.typeError', { ns: 'datasetCreation' }) })
const isValidSize = size <= fileUploadConfig.file_size_limit * 1024 * 1024
if (!isValidSize)
notify({ type: 'error', message: t('stepOne.uploader.validation.size', { ns: 'datasetCreation', size: fileUploadConfig.file_size_limit }) })
return isValidType && isValidSize
}, [fileUploadConfig, notify, t, acceptTypes])
const fileUpload = useCallback(async (fileItem: FileItem): Promise<FileItem> => {
const formData = new FormData()
formData.append('file', fileItem.file)
const onProgress = (e: ProgressEvent) => {
if (e.lengthComputable) {
const percent = Math.floor(e.loaded / e.total * 100)
onFileUpdate(fileItem, percent, fileListRef.current)
}
}
return upload({
xhr: new XMLHttpRequest(),
data: formData,
onprogress: onProgress,
}, false, undefined, '?source=datasets')
.then((res) => {
const completeFile = {
fileID: fileItem.fileID,
file: res as unknown as File,
progress: PROGRESS_NOT_STARTED,
}
const index = fileListRef.current.findIndex(item => item.fileID === fileItem.fileID)
fileListRef.current[index] = completeFile
onFileUpdate(completeFile, PROGRESS_COMPLETE, fileListRef.current)
return Promise.resolve({ ...completeFile })
})
.catch((e) => {
const errorMessage = getFileUploadErrorMessage(e, t('stepOne.uploader.failed', { ns: 'datasetCreation' }), t)
notify({ type: 'error', message: errorMessage })
onFileUpdate(fileItem, PROGRESS_ERROR, fileListRef.current)
return Promise.resolve({ ...fileItem })
})
.finally()
}, [notify, onFileUpdate, t])
const uploadBatchFiles = useCallback((bFiles: FileItem[]) => {
bFiles.forEach(bf => (bf.progress = 0))
return Promise.all(bFiles.map(fileUpload))
}, [fileUpload])
const uploadMultipleFiles = useCallback(async (files: FileItem[]) => {
const batchCountLimit = fileUploadConfig.batch_count_limit
const length = files.length
let start = 0
let end = 0
while (start < length) {
if (start + batchCountLimit > length)
end = length
else
end = start + batchCountLimit
const bFiles = files.slice(start, end)
await uploadBatchFiles(bFiles)
start = end
}
}, [fileUploadConfig, uploadBatchFiles])
const initialUpload = useCallback((files: File[]) => {
const filesCountLimit = fileUploadConfig.file_upload_limit
if (!files.length)
return false
if (files.length + fileList.length > filesCountLimit && !IS_CE_EDITION) {
notify({ type: 'error', message: t('stepOne.uploader.validation.filesNumber', { ns: 'datasetCreation', filesNumber: filesCountLimit }) })
return false
}
const preparedFiles = files.map((file, index) => ({
fileID: `file${index}-${Date.now()}`,
file,
progress: PROGRESS_NOT_STARTED,
}))
const newFiles = [...fileListRef.current, ...preparedFiles]
prepareFileList(newFiles)
fileListRef.current = newFiles
uploadMultipleFiles(preparedFiles)
}, [prepareFileList, uploadMultipleFiles, notify, t, fileList, fileUploadConfig])
const traverseFileEntry = useCallback(
(entry: FileSystemEntry, prefix = ''): Promise<FileWithPath[]> => {
return new Promise((resolve) => {
if (entry.isFile) {
(entry as FileSystemFileEntry).file((file: FileWithPath) => {
file.relativePath = `${prefix}${file.name}`
resolve([file])
})
}
else if (entry.isDirectory) {
const reader = (entry as FileSystemDirectoryEntry).createReader()
const entries: FileSystemEntry[] = []
const read = () => {
reader.readEntries(async (results: FileSystemEntry[]) => {
if (!results.length) {
const files = await Promise.all(
entries.map(ent =>
traverseFileEntry(ent, `${prefix}${entry.name}/`),
),
)
resolve(files.flat())
}
else {
entries.push(...results)
read()
}
})
}
read()
}
else {
resolve([])
}
})
},
[],
)
const handleDragEnter = useCallback((e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target !== dragRef.current)
setDragging(true)
}, [])
const handleDragOver = useCallback((e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
}, [])
const handleDragLeave = useCallback((e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target === dragRef.current)
setDragging(false)
}, [])
const handleDrop = useCallback(
async (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
setDragging(false)
if (!e.dataTransfer)
return
const nested = await Promise.all(
Array.from(e.dataTransfer.items).map((it) => {
const entry = (it as DataTransferItem & { webkitGetAsEntry?: () => FileSystemEntry | null }).webkitGetAsEntry?.()
if (entry)
return traverseFileEntry(entry)
const f = it.getAsFile?.()
return f ? Promise.resolve([f as FileWithPath]) : Promise.resolve([])
}),
)
let files = nested.flat()
if (!supportBatchUpload)
files = files.slice(0, 1)
files = files.slice(0, fileUploadConfig.batch_count_limit)
const valid = files.filter(isValid)
initialUpload(valid)
},
[initialUpload, isValid, supportBatchUpload, traverseFileEntry, fileUploadConfig],
)
const selectHandle = useCallback(() => {
if (fileUploaderRef.current)
fileUploaderRef.current.click()
}, [])
const removeFile = useCallback((fileID: string) => {
if (fileUploaderRef.current)
fileUploaderRef.current.value = ''
fileListRef.current = fileListRef.current.filter(item => item.fileID !== fileID)
onFileListUpdate?.([...fileListRef.current])
}, [onFileListUpdate])
const fileChangeHandle = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
let files = Array.from(e.target.files ?? []) as File[]
files = files.slice(0, fileUploadConfig.batch_count_limit)
initialUpload(files.filter(isValid))
}, [isValid, initialUpload, fileUploadConfig])
const handlePreview = useCallback((file: File) => {
if (file?.id)
onPreview(file)
}, [onPreview])
useEffect(() => {
const dropArea = dropRef.current
dropArea?.addEventListener('dragenter', handleDragEnter)
dropArea?.addEventListener('dragover', handleDragOver)
dropArea?.addEventListener('dragleave', handleDragLeave)
dropArea?.addEventListener('drop', handleDrop)
return () => {
dropArea?.removeEventListener('dragenter', handleDragEnter)
dropArea?.removeEventListener('dragover', handleDragOver)
dropArea?.removeEventListener('dragleave', handleDragLeave)
dropArea?.removeEventListener('drop', handleDrop)
}
}, [handleDragEnter, handleDragOver, handleDragLeave, handleDrop])
return {
// Refs
dropRef,
dragRef,
fileUploaderRef,
// State
dragging,
// Config
fileUploadConfig,
acceptTypes,
supportTypesShowNames,
hideUpload,
// Handlers
selectHandle,
fileChangeHandle,
removeFile,
handlePreview,
}
}

View File

@ -1,278 +0,0 @@
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { PROGRESS_NOT_STARTED } from './constants'
import FileUploader from './index'
// Mock react-i18next
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => {
const translations: Record<string, string> = {
'stepOne.uploader.title': 'Upload Files',
'stepOne.uploader.button': 'Drag and drop files, or',
'stepOne.uploader.buttonSingleFile': 'Drag and drop file, or',
'stepOne.uploader.browse': 'Browse',
'stepOne.uploader.tip': 'Supports various file types',
}
return translations[key] || key
},
}),
}))
// Mock ToastContext
const mockNotify = vi.fn()
vi.mock('use-context-selector', async () => {
const actual = await vi.importActual<typeof import('use-context-selector')>('use-context-selector')
return {
...actual,
useContext: vi.fn(() => ({ notify: mockNotify })),
}
})
// Mock services
vi.mock('@/service/base', () => ({
upload: vi.fn().mockResolvedValue({ id: 'uploaded-id' }),
}))
vi.mock('@/service/use-common', () => ({
useFileUploadConfig: () => ({
data: { file_size_limit: 15, batch_count_limit: 5, file_upload_limit: 10 },
}),
useFileSupportTypes: () => ({
data: { allowed_extensions: ['pdf', 'docx', 'txt'] },
}),
}))
vi.mock('@/context/i18n', () => ({
useLocale: () => 'en-US',
}))
vi.mock('@/i18n-config/language', () => ({
LanguagesSupported: ['en-US', 'zh-Hans'],
}))
vi.mock('@/config', () => ({
IS_CE_EDITION: false,
}))
vi.mock('@/app/components/base/file-uploader/utils', () => ({
getFileUploadErrorMessage: () => 'Upload error',
}))
// Mock theme
vi.mock('@/hooks/use-theme', () => ({
default: () => ({ theme: 'light' }),
}))
vi.mock('@/types/app', () => ({
Theme: { dark: 'dark', light: 'light' },
}))
// Mock DocumentFileIcon - uses relative path from file-list-item.tsx
vi.mock('@/app/components/datasets/common/document-file-icon', () => ({
default: ({ extension }: { extension: string }) => <div data-testid="document-icon">{extension}</div>,
}))
// Mock SimplePieChart
vi.mock('next/dynamic', () => ({
default: () => {
const Component = ({ percentage }: { percentage: number }) => (
<div data-testid="pie-chart">
{percentage}
%
</div>
)
return Component
},
}))
describe('FileUploader', () => {
const createMockFile = (overrides: Partial<File> = {}): File => ({
name: 'test.pdf',
size: 1024,
type: 'application/pdf',
...overrides,
} as File)
const createMockFileItem = (overrides: Partial<FileItem> = {}): FileItem => ({
fileID: `file-${Date.now()}`,
file: createMockFile(overrides.file as Partial<File>),
progress: PROGRESS_NOT_STARTED,
...overrides,
})
const defaultProps = {
fileList: [] as FileItem[],
prepareFileList: vi.fn(),
onFileUpdate: vi.fn(),
onFileListUpdate: vi.fn(),
onPreview: vi.fn(),
supportBatchUpload: true,
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render the component', () => {
render(<FileUploader {...defaultProps} />)
expect(screen.getByText('Upload Files')).toBeInTheDocument()
})
it('should render dropzone when no files', () => {
render(<FileUploader {...defaultProps} />)
expect(screen.getByText(/Drag and drop files/i)).toBeInTheDocument()
})
it('should render browse button', () => {
render(<FileUploader {...defaultProps} />)
expect(screen.getByText('Browse')).toBeInTheDocument()
})
it('should apply custom title className', () => {
render(<FileUploader {...defaultProps} titleClassName="custom-class" />)
const title = screen.getByText('Upload Files')
expect(title).toHaveClass('custom-class')
})
})
describe('file list rendering', () => {
it('should render file items when fileList has items', () => {
const fileList = [
createMockFileItem({ file: createMockFile({ name: 'file1.pdf' }) }),
createMockFileItem({ file: createMockFile({ name: 'file2.pdf' }) }),
]
render(<FileUploader {...defaultProps} fileList={fileList} />)
expect(screen.getByText('file1.pdf')).toBeInTheDocument()
expect(screen.getByText('file2.pdf')).toBeInTheDocument()
})
it('should render document icons for files', () => {
const fileList = [createMockFileItem()]
render(<FileUploader {...defaultProps} fileList={fileList} />)
expect(screen.getByTestId('document-icon')).toBeInTheDocument()
})
})
describe('batch upload mode', () => {
it('should show dropzone with batch upload enabled', () => {
render(<FileUploader {...defaultProps} supportBatchUpload={true} />)
expect(screen.getByText(/Drag and drop files/i)).toBeInTheDocument()
})
it('should show single file text when batch upload disabled', () => {
render(<FileUploader {...defaultProps} supportBatchUpload={false} />)
expect(screen.getByText(/Drag and drop file/i)).toBeInTheDocument()
})
it('should hide dropzone when not batch upload and has files', () => {
const fileList = [createMockFileItem()]
render(<FileUploader {...defaultProps} supportBatchUpload={false} fileList={fileList} />)
expect(screen.queryByText(/Drag and drop/i)).not.toBeInTheDocument()
})
})
describe('event handlers', () => {
it('should handle file preview click', () => {
const onPreview = vi.fn()
const fileItem = createMockFileItem({
file: createMockFile({ id: 'file-id' } as Partial<File>),
})
const { container } = render(<FileUploader {...defaultProps} fileList={[fileItem]} onPreview={onPreview} />)
// Find the file list item container by its class pattern
const fileElement = container.querySelector('[class*="flex h-12"]')
if (fileElement)
fireEvent.click(fileElement)
expect(onPreview).toHaveBeenCalledWith(fileItem.file)
})
it('should handle file remove click', () => {
const onFileListUpdate = vi.fn()
const fileItem = createMockFileItem()
const { container } = render(
<FileUploader {...defaultProps} fileList={[fileItem]} onFileListUpdate={onFileListUpdate} />,
)
// Find the delete button (the span with cursor-pointer containing the icon)
const deleteButtons = container.querySelectorAll('[class*="cursor-pointer"]')
// Get the last one which should be the delete button (not the browse label)
const deleteButton = deleteButtons[deleteButtons.length - 1]
if (deleteButton)
fireEvent.click(deleteButton)
expect(onFileListUpdate).toHaveBeenCalled()
})
it('should handle browse button click', () => {
render(<FileUploader {...defaultProps} />)
// The browse label should trigger file input click
const browseLabel = screen.getByText('Browse')
expect(browseLabel).toHaveClass('cursor-pointer')
})
})
describe('upload progress', () => {
it('should show progress chart for uploading files', () => {
const fileItem = createMockFileItem({ progress: 50 })
render(<FileUploader {...defaultProps} fileList={[fileItem]} />)
expect(screen.getByTestId('pie-chart')).toBeInTheDocument()
expect(screen.getByText('50%')).toBeInTheDocument()
})
it('should not show progress chart for completed files', () => {
const fileItem = createMockFileItem({ progress: 100 })
render(<FileUploader {...defaultProps} fileList={[fileItem]} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
it('should not show progress chart for not started files', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_NOT_STARTED })
render(<FileUploader {...defaultProps} fileList={[fileItem]} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('multiple files', () => {
it('should render all files in the list', () => {
const fileList = [
createMockFileItem({ fileID: 'f1', file: createMockFile({ name: 'doc1.pdf' }) }),
createMockFileItem({ fileID: 'f2', file: createMockFile({ name: 'doc2.docx' }) }),
createMockFileItem({ fileID: 'f3', file: createMockFile({ name: 'doc3.txt' }) }),
]
render(<FileUploader {...defaultProps} fileList={fileList} />)
expect(screen.getByText('doc1.pdf')).toBeInTheDocument()
expect(screen.getByText('doc2.docx')).toBeInTheDocument()
expect(screen.getByText('doc3.txt')).toBeInTheDocument()
})
})
describe('styling', () => {
it('should have correct container width', () => {
const { container } = render(<FileUploader {...defaultProps} />)
const wrapper = container.firstChild as HTMLElement
expect(wrapper).toHaveClass('w-[640px]')
})
it('should have proper spacing', () => {
const { container } = render(<FileUploader {...defaultProps} />)
const wrapper = container.firstChild as HTMLElement
expect(wrapper).toHaveClass('mb-5')
})
})
})

View File

@ -1,10 +1,23 @@
'use client'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { RiDeleteBinLine, RiUploadCloud2Line } from '@remixicon/react'
import * as React from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { getFileUploadErrorMessage } from '@/app/components/base/file-uploader/utils'
import SimplePieChart from '@/app/components/base/simple-pie-chart'
import { ToastContext } from '@/app/components/base/toast'
import { IS_CE_EDITION } from '@/config'
import { useLocale } from '@/context/i18n'
import useTheme from '@/hooks/use-theme'
import { LanguagesSupported } from '@/i18n-config/language'
import { upload } from '@/service/base'
import { useFileSupportTypes, useFileUploadConfig } from '@/service/use-common'
import { Theme } from '@/types/app'
import { cn } from '@/utils/classnames'
import FileListItem from './components/file-list-item'
import UploadDropzone from './components/upload-dropzone'
import { useFileUpload } from './hooks/use-file-upload'
import DocumentFileIcon from '../../common/document-file-icon'
type IFileUploaderProps = {
fileList: FileItem[]
@ -26,62 +39,358 @@ const FileUploader = ({
supportBatchUpload = false,
}: IFileUploaderProps) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const locale = useLocale()
const [dragging, setDragging] = useState(false)
const dropRef = useRef<HTMLDivElement>(null)
const dragRef = useRef<HTMLDivElement>(null)
const fileUploader = useRef<HTMLInputElement>(null)
const hideUpload = !supportBatchUpload && fileList.length > 0
const {
dropRef,
dragRef,
fileUploaderRef,
dragging,
fileUploadConfig,
acceptTypes,
supportTypesShowNames,
hideUpload,
selectHandle,
fileChangeHandle,
removeFile,
handlePreview,
} = useFileUpload({
fileList,
prepareFileList,
onFileUpdate,
onFileListUpdate,
onPreview,
supportBatchUpload,
})
const { data: fileUploadConfigResponse } = useFileUploadConfig()
const { data: supportFileTypesResponse } = useFileSupportTypes()
const supportTypes = supportFileTypesResponse?.allowed_extensions || []
const supportTypesShowNames = (() => {
const extensionMap: { [key: string]: string } = {
md: 'markdown',
pptx: 'pptx',
htm: 'html',
xlsx: 'xlsx',
docx: 'docx',
}
return [...supportTypes]
.map(item => extensionMap[item] || item) // map to standardized extension
.map(item => item.toLowerCase()) // convert to lower case
.filter((item, index, self) => self.indexOf(item) === index) // remove duplicates
.map(item => item.toUpperCase()) // convert to upper case
.join(locale !== LanguagesSupported[1] ? ', ' : '、 ')
})()
const ACCEPTS = supportTypes.map((ext: string) => `.${ext}`)
const fileUploadConfig = useMemo(() => ({
file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15,
batch_count_limit: supportBatchUpload ? (fileUploadConfigResponse?.batch_count_limit ?? 5) : 1,
file_upload_limit: supportBatchUpload ? (fileUploadConfigResponse?.file_upload_limit ?? 5) : 1,
}), [fileUploadConfigResponse, supportBatchUpload])
const fileListRef = useRef<FileItem[]>([])
// utils
const getFileType = (currentFile: File) => {
if (!currentFile)
return ''
const arr = currentFile.name.split('.')
return arr[arr.length - 1]
}
const getFileSize = (size: number) => {
if (size / 1024 < 10)
return `${(size / 1024).toFixed(2)}KB`
return `${(size / 1024 / 1024).toFixed(2)}MB`
}
const isValid = useCallback((file: File) => {
const { size } = file
const ext = `.${getFileType(file)}`
const isValidType = ACCEPTS.includes(ext.toLowerCase())
if (!isValidType)
notify({ type: 'error', message: t('stepOne.uploader.validation.typeError', { ns: 'datasetCreation' }) })
const isValidSize = size <= fileUploadConfig.file_size_limit * 1024 * 1024
if (!isValidSize)
notify({ type: 'error', message: t('stepOne.uploader.validation.size', { ns: 'datasetCreation', size: fileUploadConfig.file_size_limit }) })
return isValidType && isValidSize
}, [fileUploadConfig, notify, t, ACCEPTS])
const fileUpload = useCallback(async (fileItem: FileItem): Promise<FileItem> => {
const formData = new FormData()
formData.append('file', fileItem.file)
const onProgress = (e: ProgressEvent) => {
if (e.lengthComputable) {
const percent = Math.floor(e.loaded / e.total * 100)
onFileUpdate(fileItem, percent, fileListRef.current)
}
}
return upload({
xhr: new XMLHttpRequest(),
data: formData,
onprogress: onProgress,
}, false, undefined, '?source=datasets')
.then((res) => {
const completeFile = {
fileID: fileItem.fileID,
file: res as unknown as File,
progress: -1,
}
const index = fileListRef.current.findIndex(item => item.fileID === fileItem.fileID)
fileListRef.current[index] = completeFile
onFileUpdate(completeFile, 100, fileListRef.current)
return Promise.resolve({ ...completeFile })
})
.catch((e) => {
const errorMessage = getFileUploadErrorMessage(e, t('stepOne.uploader.failed', { ns: 'datasetCreation' }), t)
notify({ type: 'error', message: errorMessage })
onFileUpdate(fileItem, -2, fileListRef.current)
return Promise.resolve({ ...fileItem })
})
.finally()
}, [fileListRef, notify, onFileUpdate, t])
const uploadBatchFiles = useCallback((bFiles: FileItem[]) => {
bFiles.forEach(bf => (bf.progress = 0))
return Promise.all(bFiles.map(fileUpload))
}, [fileUpload])
const uploadMultipleFiles = useCallback(async (files: FileItem[]) => {
const batchCountLimit = fileUploadConfig.batch_count_limit
const length = files.length
let start = 0
let end = 0
while (start < length) {
if (start + batchCountLimit > length)
end = length
else
end = start + batchCountLimit
const bFiles = files.slice(start, end)
await uploadBatchFiles(bFiles)
start = end
}
}, [fileUploadConfig, uploadBatchFiles])
const initialUpload = useCallback((files: File[]) => {
const filesCountLimit = fileUploadConfig.file_upload_limit
if (!files.length)
return false
if (files.length + fileList.length > filesCountLimit && !IS_CE_EDITION) {
notify({ type: 'error', message: t('stepOne.uploader.validation.filesNumber', { ns: 'datasetCreation', filesNumber: filesCountLimit }) })
return false
}
const preparedFiles = files.map((file, index) => ({
fileID: `file${index}-${Date.now()}`,
file,
progress: -1,
}))
const newFiles = [...fileListRef.current, ...preparedFiles]
prepareFileList(newFiles)
fileListRef.current = newFiles
uploadMultipleFiles(preparedFiles)
}, [prepareFileList, uploadMultipleFiles, notify, t, fileList, fileUploadConfig])
const handleDragEnter = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target !== dragRef.current)
setDragging(true)
}
const handleDragOver = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
}
const handleDragLeave = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target === dragRef.current)
setDragging(false)
}
type FileWithPath = {
relativePath?: string
} & File
const traverseFileEntry = useCallback(
(entry: any, prefix = ''): Promise<FileWithPath[]> => {
return new Promise((resolve) => {
if (entry.isFile) {
entry.file((file: FileWithPath) => {
file.relativePath = `${prefix}${file.name}`
resolve([file])
})
}
else if (entry.isDirectory) {
const reader = entry.createReader()
const entries: any[] = []
const read = () => {
reader.readEntries(async (results: FileSystemEntry[]) => {
if (!results.length) {
const files = await Promise.all(
entries.map(ent =>
traverseFileEntry(ent, `${prefix}${entry.name}/`),
),
)
resolve(files.flat())
}
else {
entries.push(...results)
read()
}
})
}
read()
}
else {
resolve([])
}
})
},
[],
)
const handleDrop = useCallback(
async (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
setDragging(false)
if (!e.dataTransfer)
return
const nested = await Promise.all(
Array.from(e.dataTransfer.items).map((it) => {
const entry = (it as any).webkitGetAsEntry?.()
if (entry)
return traverseFileEntry(entry)
const f = it.getAsFile?.()
return f ? Promise.resolve([f]) : Promise.resolve([])
}),
)
let files = nested.flat()
if (!supportBatchUpload)
files = files.slice(0, 1)
files = files.slice(0, fileUploadConfig.batch_count_limit)
const valid = files.filter(isValid)
initialUpload(valid)
},
[initialUpload, isValid, supportBatchUpload, traverseFileEntry, fileUploadConfig],
)
const selectHandle = () => {
if (fileUploader.current)
fileUploader.current.click()
}
const removeFile = (fileID: string) => {
if (fileUploader.current)
fileUploader.current.value = ''
fileListRef.current = fileListRef.current.filter(item => item.fileID !== fileID)
onFileListUpdate?.([...fileListRef.current])
}
const fileChangeHandle = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
let files = Array.from(e.target.files ?? []) as File[]
files = files.slice(0, fileUploadConfig.batch_count_limit)
initialUpload(files.filter(isValid))
}, [isValid, initialUpload, fileUploadConfig])
const { theme } = useTheme()
const chartColor = useMemo(() => theme === Theme.dark ? '#5289ff' : '#296dff', [theme])
useEffect(() => {
dropRef.current?.addEventListener('dragenter', handleDragEnter)
dropRef.current?.addEventListener('dragover', handleDragOver)
dropRef.current?.addEventListener('dragleave', handleDragLeave)
dropRef.current?.addEventListener('drop', handleDrop)
return () => {
dropRef.current?.removeEventListener('dragenter', handleDragEnter)
dropRef.current?.removeEventListener('dragover', handleDragOver)
dropRef.current?.removeEventListener('dragleave', handleDragLeave)
dropRef.current?.removeEventListener('drop', handleDrop)
}
}, [handleDrop])
return (
<div className="mb-5 w-[640px]">
<div className={cn('mb-1 text-sm font-semibold leading-6 text-text-secondary', titleClassName)}>
{t('stepOne.uploader.title', { ns: 'datasetCreation' })}
</div>
{!hideUpload && (
<UploadDropzone
dropRef={dropRef}
dragRef={dragRef}
fileUploaderRef={fileUploaderRef}
dragging={dragging}
supportBatchUpload={supportBatchUpload}
supportTypesShowNames={supportTypesShowNames}
fileUploadConfig={fileUploadConfig}
acceptTypes={acceptTypes}
onSelectFile={selectHandle}
onFileChange={fileChangeHandle}
<input
ref={fileUploader}
id="fileUploader"
className="hidden"
type="file"
multiple={supportBatchUpload}
accept={ACCEPTS.join(',')}
onChange={fileChangeHandle}
/>
)}
{fileList.length > 0 && (
<div className="max-w-[640px] cursor-default space-y-1">
{fileList.map(fileItem => (
<FileListItem
key={fileItem.fileID}
fileItem={fileItem}
onPreview={handlePreview}
onRemove={removeFile}
/>
))}
<div className={cn('mb-1 text-sm font-semibold leading-6 text-text-secondary', titleClassName)}>{t('stepOne.uploader.title', { ns: 'datasetCreation' })}</div>
{!hideUpload && (
<div ref={dropRef} className={cn('relative mb-2 box-border flex min-h-20 max-w-[640px] flex-col items-center justify-center gap-1 rounded-xl border border-dashed border-components-dropzone-border bg-components-dropzone-bg px-4 py-3 text-xs leading-4 text-text-tertiary', dragging && 'border-components-dropzone-border-accent bg-components-dropzone-bg-accent')}>
<div className="flex min-h-5 items-center justify-center text-sm leading-4 text-text-secondary">
<RiUploadCloud2Line className="mr-2 size-5" />
<span>
{supportBatchUpload ? t('stepOne.uploader.button', { ns: 'datasetCreation' }) : t('stepOne.uploader.buttonSingleFile', { ns: 'datasetCreation' })}
{supportTypes.length > 0 && (
<label className="ml-1 cursor-pointer text-text-accent" onClick={selectHandle}>{t('stepOne.uploader.browse', { ns: 'datasetCreation' })}</label>
)}
</span>
</div>
<div>
{t('stepOne.uploader.tip', {
ns: 'datasetCreation',
size: fileUploadConfig.file_size_limit,
supportTypes: supportTypesShowNames,
batchCount: fileUploadConfig.batch_count_limit,
totalCount: fileUploadConfig.file_upload_limit,
})}
</div>
{dragging && <div ref={dragRef} className="absolute left-0 top-0 h-full w-full" />}
</div>
)}
<div className="max-w-[640px] cursor-default space-y-1">
{fileList.map((fileItem, index) => (
<div
key={`${fileItem.fileID}-${index}`}
onClick={() => fileItem.file?.id && onPreview(fileItem.file)}
className={cn(
'flex h-12 max-w-[640px] items-center rounded-lg border border-components-panel-border bg-components-panel-on-panel-item-bg text-xs leading-3 text-text-tertiary shadow-xs',
// 'border-state-destructive-border bg-state-destructive-hover',
)}
>
<div className="flex w-12 shrink-0 items-center justify-center">
<DocumentFileIcon
size="xl"
className="shrink-0"
name={fileItem.file.name}
extension={getFileType(fileItem.file)}
/>
</div>
<div className="flex shrink grow flex-col gap-0.5">
<div className="flex w-full">
<div className="w-0 grow truncate text-sm leading-4 text-text-secondary">{fileItem.file.name}</div>
</div>
<div className="w-full truncate leading-3 text-text-tertiary">
<span className="uppercase">{getFileType(fileItem.file)}</span>
<span className="px-1 text-text-quaternary">·</span>
<span>{getFileSize(fileItem.file.size)}</span>
{/* <span className='px-1 text-text-quaternary'>·</span>
<span>10k characters</span> */}
</div>
</div>
<div className="flex w-16 shrink-0 items-center justify-end gap-1 pr-3">
{/* <span className="flex justify-center items-center w-6 h-6 cursor-pointer">
<RiErrorWarningFill className='size-4 text-text-warning' />
</span> */}
{(fileItem.progress < 100 && fileItem.progress >= 0) && (
// <div className={s.percent}>{`${fileItem.progress}%`}</div>
<SimplePieChart percentage={fileItem.progress} stroke={chartColor} fill={chartColor} animationDuration={0} />
)}
<span
className="flex h-6 w-6 cursor-pointer items-center justify-center"
onClick={(e) => {
e.stopPropagation()
removeFile(fileItem.fileID)
}}
>
<RiDeleteBinLine className="size-4 text-text-tertiary" />
</span>
</div>
</div>
))}
</div>
</div>
)
}

View File

@ -1,262 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { render } from '@testing-library/react'
import { describe, expect, it } from 'vitest'
import { DataSourceType } from '@/models/datasets'
import { DatasourceType } from '@/models/pipeline'
import DocumentSourceIcon from './document-source-icon'
const createMockDoc = (overrides: Record<string, unknown> = {}): SimpleDocumentDetail => ({
id: 'doc-1',
position: 1,
data_source_type: DataSourceType.FILE,
data_source_info: {},
data_source_detail_dict: {},
dataset_process_rule_id: 'rule-1',
dataset_id: 'dataset-1',
batch: 'batch-1',
name: 'test-document.txt',
created_from: 'web',
created_by: 'user-1',
created_at: Date.now(),
tokens: 100,
indexing_status: 'completed',
error: null,
enabled: true,
disabled_at: null,
disabled_by: null,
archived: false,
archived_reason: null,
archived_by: null,
archived_at: null,
updated_at: Date.now(),
doc_type: null,
doc_metadata: undefined,
doc_language: 'en',
display_status: 'available',
word_count: 100,
hit_count: 10,
doc_form: 'text_model',
...overrides,
}) as unknown as SimpleDocumentDetail
describe('DocumentSourceIcon', () => {
describe('Rendering', () => {
it('should render without crashing', () => {
const doc = createMockDoc()
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
})
describe('Local File Icon', () => {
it('should render FileTypeIcon for FILE data source type', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.FILE,
data_source_info: {
upload_file: { extension: 'pdf' },
},
})
const { container } = render(<DocumentSourceIcon doc={doc} fileType="pdf" />)
const icon = container.querySelector('svg, img')
expect(icon).toBeInTheDocument()
})
it('should render FileTypeIcon for localFile data source type', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.localFile,
created_from: 'rag-pipeline',
data_source_info: {
extension: 'docx',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
const icon = container.querySelector('svg, img')
expect(icon).toBeInTheDocument()
})
it('should use extension from upload_file for legacy data source', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.FILE,
created_from: 'web',
data_source_info: {
upload_file: { extension: 'txt' },
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should use fileType prop as fallback for extension', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.FILE,
created_from: 'web',
data_source_info: {},
})
const { container } = render(<DocumentSourceIcon doc={doc} fileType="csv" />)
expect(container.firstChild).toBeInTheDocument()
})
})
describe('Notion Icon', () => {
it('should render NotionIcon for NOTION data source type', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.NOTION,
created_from: 'web',
data_source_info: {
notion_page_icon: 'https://notion.so/icon.png',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should render NotionIcon for onlineDocument data source type', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDocument,
created_from: 'rag-pipeline',
data_source_info: {
page: { page_icon: 'https://notion.so/icon.png' },
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should use page_icon for rag-pipeline created documents', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.NOTION,
created_from: 'rag-pipeline',
data_source_info: {
page: { page_icon: 'https://notion.so/custom-icon.png' },
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
})
describe('Web Crawl Icon', () => {
it('should render globe icon for WEB data source type', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.WEB,
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
const icon = container.querySelector('svg')
expect(icon).toBeInTheDocument()
expect(icon).toHaveClass('mr-1.5')
expect(icon).toHaveClass('size-4')
})
it('should render globe icon for websiteCrawl data source type', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.websiteCrawl,
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
const icon = container.querySelector('svg')
expect(icon).toBeInTheDocument()
})
})
describe('Online Drive Icon', () => {
it('should render FileTypeIcon for onlineDrive data source type', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: 'document.xlsx',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should extract extension from file name', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: 'spreadsheet.xlsx',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should handle file name without extension', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: 'noextension',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should handle empty file name', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: '',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should handle hidden files (starting with dot)', () => {
const doc = createMockDoc({
data_source_type: DatasourceType.onlineDrive,
data_source_info: {
name: '.gitignore',
},
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
})
describe('Unknown Data Source Type', () => {
it('should return null for unknown data source type', () => {
const doc = createMockDoc({
data_source_type: 'unknown',
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeNull()
})
})
describe('Edge Cases', () => {
it('should handle undefined data_source_info', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.FILE,
data_source_info: undefined,
})
const { container } = render(<DocumentSourceIcon doc={doc} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should memoize the component', () => {
const doc = createMockDoc()
const { rerender, container } = render(<DocumentSourceIcon doc={doc} />)
const firstRender = container.innerHTML
rerender(<DocumentSourceIcon doc={doc} />)
expect(container.innerHTML).toBe(firstRender)
})
})
})

View File

@ -1,100 +0,0 @@
import type { FC } from 'react'
import type { LegacyDataSourceInfo, LocalFileInfo, OnlineDocumentInfo, OnlineDriveInfo, SimpleDocumentDetail } from '@/models/datasets'
import { RiGlobalLine } from '@remixicon/react'
import * as React from 'react'
import FileTypeIcon from '@/app/components/base/file-uploader/file-type-icon'
import NotionIcon from '@/app/components/base/notion-icon'
import { extensionToFileType } from '@/app/components/datasets/hit-testing/utils/extension-to-file-type'
import { DataSourceType } from '@/models/datasets'
import { DatasourceType } from '@/models/pipeline'
type DocumentSourceIconProps = {
doc: SimpleDocumentDetail
fileType?: string
}
const isLocalFile = (dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.localFile || dataSourceType === DataSourceType.FILE
}
const isOnlineDocument = (dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.onlineDocument || dataSourceType === DataSourceType.NOTION
}
const isWebsiteCrawl = (dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.websiteCrawl || dataSourceType === DataSourceType.WEB
}
const isOnlineDrive = (dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.onlineDrive
}
const isCreateFromRAGPipeline = (createdFrom: string) => {
return createdFrom === 'rag-pipeline'
}
const getFileExtension = (fileName: string): string => {
if (!fileName)
return ''
const parts = fileName.split('.')
if (parts.length <= 1 || (parts[0] === '' && parts.length === 2))
return ''
return parts[parts.length - 1].toLowerCase()
}
const DocumentSourceIcon: FC<DocumentSourceIconProps> = React.memo(({
doc,
fileType,
}) => {
if (isOnlineDocument(doc.data_source_type)) {
return (
<NotionIcon
className="mr-1.5"
type="page"
src={
isCreateFromRAGPipeline(doc.created_from)
? (doc.data_source_info as OnlineDocumentInfo).page.page_icon
: (doc.data_source_info as LegacyDataSourceInfo).notion_page_icon
}
/>
)
}
if (isLocalFile(doc.data_source_type)) {
return (
<FileTypeIcon
type={
extensionToFileType(
isCreateFromRAGPipeline(doc.created_from)
? (doc?.data_source_info as LocalFileInfo)?.extension
: ((doc?.data_source_info as LegacyDataSourceInfo)?.upload_file?.extension ?? fileType),
)
}
className="mr-1.5"
/>
)
}
if (isOnlineDrive(doc.data_source_type)) {
return (
<FileTypeIcon
type={
extensionToFileType(
getFileExtension((doc?.data_source_info as unknown as OnlineDriveInfo)?.name),
)
}
className="mr-1.5"
/>
)
}
if (isWebsiteCrawl(doc.data_source_type)) {
return <RiGlobalLine className="mr-1.5 size-4" />
}
return null
})
DocumentSourceIcon.displayName = 'DocumentSourceIcon'
export default DocumentSourceIcon

View File

@ -1,342 +0,0 @@
import type { ReactNode } from 'react'
import type { SimpleDocumentDetail } from '@/models/datasets'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { DataSourceType } from '@/models/datasets'
import DocumentTableRow from './document-table-row'
const mockPush = vi.fn()
vi.mock('next/navigation', () => ({
useRouter: () => ({
push: mockPush,
}),
}))
const createTestQueryClient = () => new QueryClient({
defaultOptions: {
queries: { retry: false, gcTime: 0 },
mutations: { retry: false },
},
})
const createWrapper = () => {
const queryClient = createTestQueryClient()
return ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
<table>
<tbody>
{children}
</tbody>
</table>
</QueryClientProvider>
)
}
type LocalDoc = SimpleDocumentDetail & { percent?: number }
const createMockDoc = (overrides: Record<string, unknown> = {}): LocalDoc => ({
id: 'doc-1',
position: 1,
data_source_type: DataSourceType.FILE,
data_source_info: {},
data_source_detail_dict: {
upload_file: { name: 'test.txt', extension: 'txt' },
},
dataset_process_rule_id: 'rule-1',
dataset_id: 'dataset-1',
batch: 'batch-1',
name: 'test-document.txt',
created_from: 'web',
created_by: 'user-1',
created_at: Date.now(),
tokens: 100,
indexing_status: 'completed',
error: null,
enabled: true,
disabled_at: null,
disabled_by: null,
archived: false,
archived_reason: null,
archived_by: null,
archived_at: null,
updated_at: Date.now(),
doc_type: null,
doc_metadata: undefined,
doc_language: 'en',
display_status: 'available',
word_count: 500,
hit_count: 10,
doc_form: 'text_model',
...overrides,
}) as unknown as LocalDoc
// Helper to find the custom checkbox div (Checkbox component renders as a div, not a native checkbox)
const findCheckbox = (container: HTMLElement): HTMLElement | null => {
return container.querySelector('[class*="shadow-xs"]')
}
describe('DocumentTableRow', () => {
const defaultProps = {
doc: createMockDoc(),
index: 0,
datasetId: 'dataset-1',
isSelected: false,
isGeneralMode: true,
isQAMode: false,
embeddingAvailable: true,
selectedIds: [],
onSelectOne: vi.fn(),
onSelectedIdChange: vi.fn(),
onShowRenameModal: vi.fn(),
onUpdate: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('Rendering', () => {
it('should render without crashing', () => {
render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByText('test-document.txt')).toBeInTheDocument()
})
it('should render index number correctly', () => {
render(<DocumentTableRow {...defaultProps} index={5} />, { wrapper: createWrapper() })
expect(screen.getByText('6')).toBeInTheDocument()
})
it('should render document name with tooltip', () => {
render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByText('test-document.txt')).toBeInTheDocument()
})
it('should render checkbox element', () => {
const { container } = render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
const checkbox = findCheckbox(container)
expect(checkbox).toBeInTheDocument()
})
})
describe('Selection', () => {
it('should show check icon when isSelected is true', () => {
const { container } = render(<DocumentTableRow {...defaultProps} isSelected />, { wrapper: createWrapper() })
// When selected, the checkbox should have a check icon (RiCheckLine svg)
const checkbox = findCheckbox(container)
expect(checkbox).toBeInTheDocument()
const checkIcon = checkbox?.querySelector('svg')
expect(checkIcon).toBeInTheDocument()
})
it('should not show check icon when isSelected is false', () => {
const { container } = render(<DocumentTableRow {...defaultProps} isSelected={false} />, { wrapper: createWrapper() })
const checkbox = findCheckbox(container)
expect(checkbox).toBeInTheDocument()
// When not selected, there should be no check icon inside the checkbox
const checkIcon = checkbox?.querySelector('svg')
expect(checkIcon).not.toBeInTheDocument()
})
it('should call onSelectOne when checkbox is clicked', () => {
const onSelectOne = vi.fn()
const { container } = render(<DocumentTableRow {...defaultProps} onSelectOne={onSelectOne} />, { wrapper: createWrapper() })
const checkbox = findCheckbox(container)
if (checkbox) {
fireEvent.click(checkbox)
expect(onSelectOne).toHaveBeenCalledWith('doc-1')
}
})
it('should stop propagation when checkbox container is clicked', () => {
const { container } = render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
// Click the div containing the checkbox (which has stopPropagation)
const checkboxContainer = container.querySelector('td')?.querySelector('div')
if (checkboxContainer) {
fireEvent.click(checkboxContainer)
expect(mockPush).not.toHaveBeenCalled()
}
})
})
describe('Row Navigation', () => {
it('should navigate to document detail on row click', () => {
render(<DocumentTableRow {...defaultProps} />, { wrapper: createWrapper() })
const row = screen.getByRole('row')
fireEvent.click(row)
expect(mockPush).toHaveBeenCalledWith('/datasets/dataset-1/documents/doc-1')
})
it('should navigate with correct datasetId and documentId', () => {
render(
<DocumentTableRow
{...defaultProps}
datasetId="custom-dataset"
doc={createMockDoc({ id: 'custom-doc' })}
/>,
{ wrapper: createWrapper() },
)
const row = screen.getByRole('row')
fireEvent.click(row)
expect(mockPush).toHaveBeenCalledWith('/datasets/custom-dataset/documents/custom-doc')
})
})
describe('Word Count Display', () => {
it('should display word count less than 1000 as is', () => {
const doc = createMockDoc({ word_count: 500 })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('500')).toBeInTheDocument()
})
it('should display word count 1000 or more in k format', () => {
const doc = createMockDoc({ word_count: 1500 })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('1.5k')).toBeInTheDocument()
})
it('should display 0 with empty style when word_count is 0', () => {
const doc = createMockDoc({ word_count: 0 })
const { container } = render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
const zeroCells = container.querySelectorAll('.text-text-tertiary')
expect(zeroCells.length).toBeGreaterThan(0)
})
it('should handle undefined word_count', () => {
const doc = createMockDoc({ word_count: undefined as unknown as number })
const { container } = render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(container).toBeInTheDocument()
})
})
describe('Hit Count Display', () => {
it('should display hit count less than 1000 as is', () => {
const doc = createMockDoc({ hit_count: 100 })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('100')).toBeInTheDocument()
})
it('should display hit count 1000 or more in k format', () => {
const doc = createMockDoc({ hit_count: 2500 })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('2.5k')).toBeInTheDocument()
})
it('should display 0 with empty style when hit_count is 0', () => {
const doc = createMockDoc({ hit_count: 0 })
const { container } = render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
const zeroCells = container.querySelectorAll('.text-text-tertiary')
expect(zeroCells.length).toBeGreaterThan(0)
})
})
describe('Chunking Mode', () => {
it('should render ChunkingModeLabel with general mode', () => {
render(<DocumentTableRow {...defaultProps} isGeneralMode isQAMode={false} />, { wrapper: createWrapper() })
// ChunkingModeLabel should be rendered
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should render ChunkingModeLabel with QA mode', () => {
render(<DocumentTableRow {...defaultProps} isGeneralMode={false} isQAMode />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
describe('Summary Status', () => {
it('should render SummaryStatus when summary_index_status is present', () => {
const doc = createMockDoc({ summary_index_status: 'completed' })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should not render SummaryStatus when summary_index_status is absent', () => {
const doc = createMockDoc({ summary_index_status: undefined })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
describe('Rename Action', () => {
it('should call onShowRenameModal when rename button is clicked', () => {
const onShowRenameModal = vi.fn()
const { container } = render(
<DocumentTableRow {...defaultProps} onShowRenameModal={onShowRenameModal} />,
{ wrapper: createWrapper() },
)
// Find the rename button by finding the RiEditLine icon's parent
const renameButtons = container.querySelectorAll('.cursor-pointer.rounded-md')
if (renameButtons.length > 0) {
fireEvent.click(renameButtons[0])
expect(onShowRenameModal).toHaveBeenCalledWith(defaultProps.doc)
expect(mockPush).not.toHaveBeenCalled()
}
})
})
describe('Operations', () => {
it('should pass selectedIds to Operations component', () => {
render(<DocumentTableRow {...defaultProps} selectedIds={['doc-1', 'doc-2']} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should pass onSelectedIdChange to Operations component', () => {
const onSelectedIdChange = vi.fn()
render(<DocumentTableRow {...defaultProps} onSelectedIdChange={onSelectedIdChange} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
describe('Document Source Icon', () => {
it('should render with FILE data source type', () => {
const doc = createMockDoc({ data_source_type: DataSourceType.FILE })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should render with NOTION data source type', () => {
const doc = createMockDoc({
data_source_type: DataSourceType.NOTION,
data_source_info: { notion_page_icon: 'icon.png' },
})
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should render with WEB data source type', () => {
const doc = createMockDoc({ data_source_type: DataSourceType.WEB })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
describe('Edge Cases', () => {
it('should handle document with very long name', () => {
const doc = createMockDoc({ name: `${'a'.repeat(500)}.txt` })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByRole('row')).toBeInTheDocument()
})
it('should handle document with special characters in name', () => {
const doc = createMockDoc({ name: '<script>test</script>.txt' })
render(<DocumentTableRow {...defaultProps} doc={doc} />, { wrapper: createWrapper() })
expect(screen.getByText('<script>test</script>.txt')).toBeInTheDocument()
})
it('should memoize the component', () => {
const wrapper = createWrapper()
const { rerender } = render(<DocumentTableRow {...defaultProps} />, { wrapper })
rerender(<DocumentTableRow {...defaultProps} />)
expect(screen.getByRole('row')).toBeInTheDocument()
})
})
})

View File

@ -1,152 +0,0 @@
import type { FC } from 'react'
import type { SimpleDocumentDetail } from '@/models/datasets'
import { RiEditLine } from '@remixicon/react'
import { pick } from 'es-toolkit/object'
import { useRouter } from 'next/navigation'
import * as React from 'react'
import { useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import Checkbox from '@/app/components/base/checkbox'
import Tooltip from '@/app/components/base/tooltip'
import ChunkingModeLabel from '@/app/components/datasets/common/chunking-mode-label'
import Operations from '@/app/components/datasets/documents/components/operations'
import SummaryStatus from '@/app/components/datasets/documents/detail/completed/common/summary-status'
import StatusItem from '@/app/components/datasets/documents/status-item'
import useTimestamp from '@/hooks/use-timestamp'
import { DataSourceType } from '@/models/datasets'
import { formatNumber } from '@/utils/format'
import DocumentSourceIcon from './document-source-icon'
import { renderTdValue } from './utils'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
type DocumentTableRowProps = {
doc: LocalDoc
index: number
datasetId: string
isSelected: boolean
isGeneralMode: boolean
isQAMode: boolean
embeddingAvailable: boolean
selectedIds: string[]
onSelectOne: (docId: string) => void
onSelectedIdChange: (ids: string[]) => void
onShowRenameModal: (doc: LocalDoc) => void
onUpdate: () => void
}
const renderCount = (count: number | undefined) => {
if (!count)
return renderTdValue(0, true)
if (count < 1000)
return count
return `${formatNumber((count / 1000).toFixed(1))}k`
}
const DocumentTableRow: FC<DocumentTableRowProps> = React.memo(({
doc,
index,
datasetId,
isSelected,
isGeneralMode,
isQAMode,
embeddingAvailable,
selectedIds,
onSelectOne,
onSelectedIdChange,
onShowRenameModal,
onUpdate,
}) => {
const { t } = useTranslation()
const { formatTime } = useTimestamp()
const router = useRouter()
const isFile = doc.data_source_type === DataSourceType.FILE
const fileType = isFile ? doc.data_source_detail_dict?.upload_file?.extension : ''
const handleRowClick = useCallback(() => {
router.push(`/datasets/${datasetId}/documents/${doc.id}`)
}, [router, datasetId, doc.id])
const handleCheckboxClick = useCallback((e: React.MouseEvent) => {
e.stopPropagation()
}, [])
const handleRenameClick = useCallback((e: React.MouseEvent) => {
e.stopPropagation()
onShowRenameModal(doc)
}, [doc, onShowRenameModal])
return (
<tr
className="h-8 cursor-pointer border-b border-divider-subtle hover:bg-background-default-hover"
onClick={handleRowClick}
>
<td className="text-left align-middle text-xs text-text-tertiary">
<div className="flex items-center" onClick={handleCheckboxClick}>
<Checkbox
className="mr-2 shrink-0"
checked={isSelected}
onCheck={() => onSelectOne(doc.id)}
/>
{index + 1}
</div>
</td>
<td>
<div className="group mr-6 flex max-w-[460px] items-center hover:mr-0">
<div className="flex shrink-0 items-center">
<DocumentSourceIcon doc={doc} fileType={fileType} />
</div>
<Tooltip popupContent={doc.name}>
<span className="grow-1 truncate text-sm">{doc.name}</span>
</Tooltip>
{doc.summary_index_status && (
<div className="ml-1 hidden shrink-0 group-hover:flex">
<SummaryStatus status={doc.summary_index_status} />
</div>
)}
<div className="hidden shrink-0 group-hover:ml-auto group-hover:flex">
<Tooltip popupContent={t('list.table.rename', { ns: 'datasetDocuments' })}>
<div
className="cursor-pointer rounded-md p-1 hover:bg-state-base-hover"
onClick={handleRenameClick}
>
<RiEditLine className="h-4 w-4 text-text-tertiary" />
</div>
</Tooltip>
</div>
</div>
</td>
<td>
<ChunkingModeLabel
isGeneralMode={isGeneralMode}
isQAMode={isQAMode}
/>
</td>
<td>{renderCount(doc.word_count)}</td>
<td>{renderCount(doc.hit_count)}</td>
<td className="text-[13px] text-text-secondary">
{formatTime(doc.created_at, t('dateTimeFormat', { ns: 'datasetHitTesting' }) as string)}
</td>
<td>
<StatusItem status={doc.display_status} />
</td>
<td>
<Operations
selectedIds={selectedIds}
onSelectedIdChange={onSelectedIdChange}
embeddingAvailable={embeddingAvailable}
datasetId={datasetId}
detail={pick(doc, ['name', 'enabled', 'archived', 'id', 'data_source_type', 'doc_form', 'display_status'])}
onUpdate={onUpdate}
/>
</td>
</tr>
)
})
DocumentTableRow.displayName = 'DocumentTableRow'
export default DocumentTableRow

View File

@ -1,4 +0,0 @@
export { default as DocumentSourceIcon } from './document-source-icon'
export { default as DocumentTableRow } from './document-table-row'
export { default as SortHeader } from './sort-header'
export { renderTdValue } from './utils'

View File

@ -1,124 +0,0 @@
import { fireEvent, render, screen } from '@testing-library/react'
import { describe, expect, it, vi } from 'vitest'
import SortHeader from './sort-header'
describe('SortHeader', () => {
const defaultProps = {
field: 'name' as const,
label: 'File Name',
currentSortField: null,
sortOrder: 'desc' as const,
onSort: vi.fn(),
}
describe('rendering', () => {
it('should render the label', () => {
render(<SortHeader {...defaultProps} />)
expect(screen.getByText('File Name')).toBeInTheDocument()
})
it('should render the sort icon', () => {
const { container } = render(<SortHeader {...defaultProps} />)
const icon = container.querySelector('svg')
expect(icon).toBeInTheDocument()
})
})
describe('inactive state', () => {
it('should have disabled text color when not active', () => {
const { container } = render(<SortHeader {...defaultProps} />)
const icon = container.querySelector('svg')
expect(icon).toHaveClass('text-text-disabled')
})
it('should not be rotated when not active', () => {
const { container } = render(<SortHeader {...defaultProps} />)
const icon = container.querySelector('svg')
expect(icon).not.toHaveClass('rotate-180')
})
})
describe('active state', () => {
it('should have tertiary text color when active', () => {
const { container } = render(
<SortHeader {...defaultProps} currentSortField="name" />,
)
const icon = container.querySelector('svg')
expect(icon).toHaveClass('text-text-tertiary')
})
it('should not be rotated when active and desc', () => {
const { container } = render(
<SortHeader {...defaultProps} currentSortField="name" sortOrder="desc" />,
)
const icon = container.querySelector('svg')
expect(icon).not.toHaveClass('rotate-180')
})
it('should be rotated when active and asc', () => {
const { container } = render(
<SortHeader {...defaultProps} currentSortField="name" sortOrder="asc" />,
)
const icon = container.querySelector('svg')
expect(icon).toHaveClass('rotate-180')
})
})
describe('interaction', () => {
it('should call onSort when clicked', () => {
const onSort = vi.fn()
render(<SortHeader {...defaultProps} onSort={onSort} />)
fireEvent.click(screen.getByText('File Name'))
expect(onSort).toHaveBeenCalledWith('name')
})
it('should call onSort with correct field', () => {
const onSort = vi.fn()
render(<SortHeader {...defaultProps} field="word_count" onSort={onSort} />)
fireEvent.click(screen.getByText('File Name'))
expect(onSort).toHaveBeenCalledWith('word_count')
})
})
describe('different fields', () => {
it('should work with word_count field', () => {
render(
<SortHeader
{...defaultProps}
field="word_count"
label="Words"
currentSortField="word_count"
/>,
)
expect(screen.getByText('Words')).toBeInTheDocument()
})
it('should work with hit_count field', () => {
render(
<SortHeader
{...defaultProps}
field="hit_count"
label="Hit Count"
currentSortField="hit_count"
/>,
)
expect(screen.getByText('Hit Count')).toBeInTheDocument()
})
it('should work with created_at field', () => {
render(
<SortHeader
{...defaultProps}
field="created_at"
label="Upload Time"
currentSortField="created_at"
/>,
)
expect(screen.getByText('Upload Time')).toBeInTheDocument()
})
})
})

View File

@ -1,44 +0,0 @@
import type { FC } from 'react'
import type { SortField, SortOrder } from '../hooks'
import { RiArrowDownLine } from '@remixicon/react'
import * as React from 'react'
import { cn } from '@/utils/classnames'
type SortHeaderProps = {
field: Exclude<SortField, null>
label: string
currentSortField: SortField
sortOrder: SortOrder
onSort: (field: SortField) => void
}
const SortHeader: FC<SortHeaderProps> = React.memo(({
field,
label,
currentSortField,
sortOrder,
onSort,
}) => {
const isActive = currentSortField === field
const isDesc = isActive && sortOrder === 'desc'
return (
<div
className="flex cursor-pointer items-center hover:text-text-secondary"
onClick={() => onSort(field)}
>
{label}
<RiArrowDownLine
className={cn(
'ml-0.5 h-3 w-3 transition-all',
isActive ? 'text-text-tertiary' : 'text-text-disabled',
isActive && !isDesc ? 'rotate-180' : '',
)}
/>
</div>
)
})
SortHeader.displayName = 'SortHeader'
export default SortHeader

View File

@ -1,90 +0,0 @@
import { render, screen } from '@testing-library/react'
import { describe, expect, it } from 'vitest'
import { renderTdValue } from './utils'
describe('renderTdValue', () => {
describe('Rendering', () => {
it('should render string value correctly', () => {
const { container } = render(<>{renderTdValue('test value')}</>)
expect(screen.getByText('test value')).toBeInTheDocument()
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
it('should render number value correctly', () => {
const { container } = render(<>{renderTdValue(42)}</>)
expect(screen.getByText('42')).toBeInTheDocument()
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
it('should render zero correctly', () => {
const { container } = render(<>{renderTdValue(0)}</>)
expect(screen.getByText('0')).toBeInTheDocument()
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
})
describe('Null and undefined handling', () => {
it('should render dash for null value', () => {
render(<>{renderTdValue(null)}</>)
expect(screen.getByText('-')).toBeInTheDocument()
})
it('should render dash for null value with empty style', () => {
const { container } = render(<>{renderTdValue(null, true)}</>)
expect(screen.getByText('-')).toBeInTheDocument()
expect(container.querySelector('div')).toHaveClass('text-text-tertiary')
})
})
describe('Empty style', () => {
it('should apply text-text-tertiary class when isEmptyStyle is true', () => {
const { container } = render(<>{renderTdValue('value', true)}</>)
expect(container.querySelector('div')).toHaveClass('text-text-tertiary')
})
it('should apply text-text-secondary class when isEmptyStyle is false', () => {
const { container } = render(<>{renderTdValue('value', false)}</>)
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
it('should apply text-text-secondary class when isEmptyStyle is not provided', () => {
const { container } = render(<>{renderTdValue('value')}</>)
expect(container.querySelector('div')).toHaveClass('text-text-secondary')
})
})
describe('Edge Cases', () => {
it('should handle empty string', () => {
render(<>{renderTdValue('')}</>)
// Empty string should still render but with no visible text
const div = document.querySelector('div')
expect(div).toBeInTheDocument()
})
it('should handle large numbers', () => {
render(<>{renderTdValue(1234567890)}</>)
expect(screen.getByText('1234567890')).toBeInTheDocument()
})
it('should handle negative numbers', () => {
render(<>{renderTdValue(-42)}</>)
expect(screen.getByText('-42')).toBeInTheDocument()
})
it('should handle special characters in string', () => {
render(<>{renderTdValue('<script>alert("xss")</script>')}</>)
expect(screen.getByText('<script>alert("xss")</script>')).toBeInTheDocument()
})
it('should handle unicode characters', () => {
render(<>{renderTdValue('Test Unicode: \u4E2D\u6587')}</>)
expect(screen.getByText('Test Unicode: \u4E2D\u6587')).toBeInTheDocument()
})
it('should handle very long strings', () => {
const longString = 'a'.repeat(1000)
render(<>{renderTdValue(longString)}</>)
expect(screen.getByText(longString)).toBeInTheDocument()
})
})
})

View File

@ -1,16 +0,0 @@
import type { ReactNode } from 'react'
import { cn } from '@/utils/classnames'
import s from '../../../style.module.css'
export const renderTdValue = (value: string | number | null, isEmptyStyle = false): ReactNode => {
const className = cn(
isEmptyStyle ? 'text-text-tertiary' : 'text-text-secondary',
s.tdValue,
)
return (
<div className={className}>
{value ?? '-'}
</div>
)
}

View File

@ -1,4 +0,0 @@
export { useDocumentActions } from './use-document-actions'
export { useDocumentSelection } from './use-document-selection'
export { useDocumentSort } from './use-document-sort'
export type { SortField, SortOrder } from './use-document-sort'

View File

@ -1,438 +0,0 @@
import type { ReactNode } from 'react'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { act, renderHook, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { DocumentActionType } from '@/models/datasets'
import * as useDocument from '@/service/knowledge/use-document'
import { useDocumentActions } from './use-document-actions'
vi.mock('@/service/knowledge/use-document')
const mockUseDocumentArchive = vi.mocked(useDocument.useDocumentArchive)
const mockUseDocumentSummary = vi.mocked(useDocument.useDocumentSummary)
const mockUseDocumentEnable = vi.mocked(useDocument.useDocumentEnable)
const mockUseDocumentDisable = vi.mocked(useDocument.useDocumentDisable)
const mockUseDocumentDelete = vi.mocked(useDocument.useDocumentDelete)
const mockUseDocumentBatchRetryIndex = vi.mocked(useDocument.useDocumentBatchRetryIndex)
const mockUseDocumentDownloadZip = vi.mocked(useDocument.useDocumentDownloadZip)
const createTestQueryClient = () => new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
})
const createWrapper = () => {
const queryClient = createTestQueryClient()
return ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
{children}
</QueryClientProvider>
)
}
describe('useDocumentActions', () => {
const mockMutateAsync = vi.fn()
beforeEach(() => {
vi.clearAllMocks()
// Setup all mocks with default values
const createMockMutation = () => ({
mutateAsync: mockMutateAsync,
isPending: false,
isError: false,
isSuccess: false,
isIdle: true,
data: undefined,
error: null,
mutate: vi.fn(),
reset: vi.fn(),
status: 'idle' as const,
variables: undefined,
context: undefined,
failureCount: 0,
failureReason: null,
submittedAt: 0,
})
mockUseDocumentArchive.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentArchive>)
mockUseDocumentSummary.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentSummary>)
mockUseDocumentEnable.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentEnable>)
mockUseDocumentDisable.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentDisable>)
mockUseDocumentDelete.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentDelete>)
mockUseDocumentBatchRetryIndex.mockReturnValue(createMockMutation() as unknown as ReturnType<typeof useDocument.useDocumentBatchRetryIndex>)
mockUseDocumentDownloadZip.mockReturnValue({
...createMockMutation(),
isPending: false,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
})
describe('handleAction', () => {
it('should call archive mutation when archive action is triggered', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.archive)()
})
expect(mockMutateAsync).toHaveBeenCalledWith({
datasetId: 'ds1',
documentIds: ['doc1'],
})
})
it('should call onUpdate on successful action', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.enable)()
})
await waitFor(() => {
expect(onUpdate).toHaveBeenCalled()
})
})
it('should call onClearSelection on delete action', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.delete)()
})
await waitFor(() => {
expect(onClearSelection).toHaveBeenCalled()
})
})
})
describe('handleBatchReIndex', () => {
it('should call retry index mutation', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1', 'doc2'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchReIndex()
})
expect(mockMutateAsync).toHaveBeenCalledWith({
datasetId: 'ds1',
documentIds: ['doc1', 'doc2'],
})
})
it('should call onClearSelection on success', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchReIndex()
})
await waitFor(() => {
expect(onClearSelection).toHaveBeenCalled()
expect(onUpdate).toHaveBeenCalled()
})
})
})
describe('handleBatchDownload', () => {
it('should not proceed when already downloading', async () => {
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: true,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: ['doc1'],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchDownload()
})
expect(mockMutateAsync).not.toHaveBeenCalled()
})
it('should call download mutation with downloadable ids', async () => {
const mockBlob = new Blob(['test'])
mockMutateAsync.mockResolvedValue(mockBlob)
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: false,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1', 'doc2'],
downloadableSelectedIds: ['doc1'],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchDownload()
})
expect(mockMutateAsync).toHaveBeenCalledWith({
datasetId: 'ds1',
documentIds: ['doc1'],
})
})
})
describe('isDownloadingZip', () => {
it('should reflect isPending state from mutation', () => {
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: true,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: [],
downloadableSelectedIds: [],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
expect(result.current.isDownloadingZip).toBe(true)
})
})
describe('error handling', () => {
it('should show error toast when handleAction fails', async () => {
mockMutateAsync.mockRejectedValue(new Error('Action failed'))
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.archive)()
})
// onUpdate should not be called on error
expect(onUpdate).not.toHaveBeenCalled()
})
it('should show error toast when handleBatchReIndex fails', async () => {
mockMutateAsync.mockRejectedValue(new Error('Re-index failed'))
const onUpdate = vi.fn()
const onClearSelection = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection,
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchReIndex()
})
// onUpdate and onClearSelection should not be called on error
expect(onUpdate).not.toHaveBeenCalled()
expect(onClearSelection).not.toHaveBeenCalled()
})
it('should show error toast when handleBatchDownload fails', async () => {
mockMutateAsync.mockRejectedValue(new Error('Download failed'))
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: false,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: ['doc1'],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchDownload()
})
// Mutation was called but failed
expect(mockMutateAsync).toHaveBeenCalled()
})
it('should show error toast when handleBatchDownload returns null blob', async () => {
mockMutateAsync.mockResolvedValue(null)
mockUseDocumentDownloadZip.mockReturnValue({
mutateAsync: mockMutateAsync,
isPending: false,
} as unknown as ReturnType<typeof useDocument.useDocumentDownloadZip>)
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: ['doc1'],
onUpdate: vi.fn(),
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleBatchDownload()
})
// Mutation was called but returned null
expect(mockMutateAsync).toHaveBeenCalled()
})
})
describe('all action types', () => {
it('should handle summary action', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.summary)()
})
expect(mockMutateAsync).toHaveBeenCalled()
await waitFor(() => {
expect(onUpdate).toHaveBeenCalled()
})
})
it('should handle disable action', async () => {
mockMutateAsync.mockResolvedValue({ result: 'success' })
const onUpdate = vi.fn()
const { result } = renderHook(
() => useDocumentActions({
datasetId: 'ds1',
selectedIds: ['doc1'],
downloadableSelectedIds: [],
onUpdate,
onClearSelection: vi.fn(),
}),
{ wrapper: createWrapper() },
)
await act(async () => {
await result.current.handleAction(DocumentActionType.disable)()
})
expect(mockMutateAsync).toHaveBeenCalled()
await waitFor(() => {
expect(onUpdate).toHaveBeenCalled()
})
})
})
})

View File

@ -1,126 +0,0 @@
import type { CommonResponse } from '@/models/common'
import { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import { DocumentActionType } from '@/models/datasets'
import {
useDocumentArchive,
useDocumentBatchRetryIndex,
useDocumentDelete,
useDocumentDisable,
useDocumentDownloadZip,
useDocumentEnable,
useDocumentSummary,
} from '@/service/knowledge/use-document'
import { asyncRunSafe } from '@/utils'
import { downloadBlob } from '@/utils/download'
type UseDocumentActionsOptions = {
datasetId: string
selectedIds: string[]
downloadableSelectedIds: string[]
onUpdate: () => void
onClearSelection: () => void
}
/**
* Generate a random ZIP filename for bulk document downloads.
* We intentionally avoid leaking dataset info in the exported archive name.
*/
const generateDocsZipFileName = (): string => {
const randomPart = (typeof crypto !== 'undefined' && typeof crypto.randomUUID === 'function')
? crypto.randomUUID()
: `${Date.now().toString(36)}${Math.random().toString(36).slice(2, 10)}`
return `${randomPart}-docs.zip`
}
export const useDocumentActions = ({
datasetId,
selectedIds,
downloadableSelectedIds,
onUpdate,
onClearSelection,
}: UseDocumentActionsOptions) => {
const { t } = useTranslation()
const { mutateAsync: archiveDocument } = useDocumentArchive()
const { mutateAsync: generateSummary } = useDocumentSummary()
const { mutateAsync: enableDocument } = useDocumentEnable()
const { mutateAsync: disableDocument } = useDocumentDisable()
const { mutateAsync: deleteDocument } = useDocumentDelete()
const { mutateAsync: retryIndexDocument } = useDocumentBatchRetryIndex()
const { mutateAsync: requestDocumentsZip, isPending: isDownloadingZip } = useDocumentDownloadZip()
type SupportedActionType
= | typeof DocumentActionType.archive
| typeof DocumentActionType.summary
| typeof DocumentActionType.enable
| typeof DocumentActionType.disable
| typeof DocumentActionType.delete
const actionMutationMap = useMemo(() => ({
[DocumentActionType.archive]: archiveDocument,
[DocumentActionType.summary]: generateSummary,
[DocumentActionType.enable]: enableDocument,
[DocumentActionType.disable]: disableDocument,
[DocumentActionType.delete]: deleteDocument,
} as const), [archiveDocument, generateSummary, enableDocument, disableDocument, deleteDocument])
const handleAction = useCallback((actionName: SupportedActionType) => {
return async () => {
const opApi = actionMutationMap[actionName]
if (!opApi)
return
const [e] = await asyncRunSafe<CommonResponse>(
opApi({ datasetId, documentIds: selectedIds }),
)
if (!e) {
if (actionName === DocumentActionType.delete)
onClearSelection()
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
onUpdate()
}
else {
Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
}
}
}, [actionMutationMap, datasetId, selectedIds, onClearSelection, onUpdate, t])
const handleBatchReIndex = useCallback(async () => {
const [e] = await asyncRunSafe<CommonResponse>(
retryIndexDocument({ datasetId, documentIds: selectedIds }),
)
if (!e) {
onClearSelection()
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
onUpdate()
}
else {
Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
}
}, [retryIndexDocument, datasetId, selectedIds, onClearSelection, onUpdate, t])
const handleBatchDownload = useCallback(async () => {
if (isDownloadingZip)
return
const [e, blob] = await asyncRunSafe(
requestDocumentsZip({ datasetId, documentIds: downloadableSelectedIds }),
)
if (e || !blob) {
Toast.notify({ type: 'error', message: t('actionMsg.downloadUnsuccessfully', { ns: 'common' }) })
return
}
downloadBlob({ data: blob, fileName: generateDocsZipFileName() })
}, [datasetId, downloadableSelectedIds, isDownloadingZip, requestDocumentsZip, t])
return {
handleAction,
handleBatchReIndex,
handleBatchDownload,
isDownloadingZip,
}
}

View File

@ -1,317 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { act, renderHook } from '@testing-library/react'
import { describe, expect, it, vi } from 'vitest'
import { DataSourceType } from '@/models/datasets'
import { useDocumentSelection } from './use-document-selection'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
const createMockDocument = (overrides: Partial<LocalDoc> = {}): LocalDoc => ({
id: 'doc1',
name: 'Test Document',
data_source_type: DataSourceType.FILE,
data_source_info: {},
data_source_detail_dict: {},
word_count: 100,
hit_count: 10,
created_at: 1000000,
position: 1,
doc_form: 'text_model',
enabled: true,
archived: false,
display_status: 'available',
created_from: 'api',
...overrides,
} as LocalDoc)
describe('useDocumentSelection', () => {
describe('isAllSelected', () => {
it('should return false when documents is empty', () => {
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: [],
selectedIds: [],
onSelectedIdChange,
}),
)
expect(result.current.isAllSelected).toBe(false)
})
it('should return true when all documents are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
expect(result.current.isAllSelected).toBe(true)
})
it('should return false when not all documents are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1'],
onSelectedIdChange,
}),
)
expect(result.current.isAllSelected).toBe(false)
})
})
describe('isSomeSelected', () => {
it('should return false when no documents are selected', () => {
const docs = [createMockDocument({ id: 'doc1' })]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: [],
onSelectedIdChange,
}),
)
expect(result.current.isSomeSelected).toBe(false)
})
it('should return true when some documents are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1'],
onSelectedIdChange,
}),
)
expect(result.current.isSomeSelected).toBe(true)
})
})
describe('onSelectAll', () => {
it('should select all documents when none are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: [],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectAll()
})
expect(onSelectedIdChange).toHaveBeenCalledWith(['doc1', 'doc2'])
})
it('should deselect all when all are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectAll()
})
expect(onSelectedIdChange).toHaveBeenCalledWith([])
})
it('should add to existing selection when some are selected', () => {
const docs = [
createMockDocument({ id: 'doc1' }),
createMockDocument({ id: 'doc2' }),
createMockDocument({ id: 'doc3' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1'],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectAll()
})
expect(onSelectedIdChange).toHaveBeenCalledWith(['doc1', 'doc2', 'doc3'])
})
})
describe('onSelectOne', () => {
it('should add document to selection when not selected', () => {
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: [],
selectedIds: [],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectOne('doc1')
})
expect(onSelectedIdChange).toHaveBeenCalledWith(['doc1'])
})
it('should remove document from selection when already selected', () => {
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: [],
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
act(() => {
result.current.onSelectOne('doc1')
})
expect(onSelectedIdChange).toHaveBeenCalledWith(['doc2'])
})
})
describe('hasErrorDocumentsSelected', () => {
it('should return false when no error documents are selected', () => {
const docs = [
createMockDocument({ id: 'doc1', display_status: 'available' }),
createMockDocument({ id: 'doc2', display_status: 'error' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1'],
onSelectedIdChange,
}),
)
expect(result.current.hasErrorDocumentsSelected).toBe(false)
})
it('should return true when an error document is selected', () => {
const docs = [
createMockDocument({ id: 'doc1', display_status: 'available' }),
createMockDocument({ id: 'doc2', display_status: 'error' }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc2'],
onSelectedIdChange,
}),
)
expect(result.current.hasErrorDocumentsSelected).toBe(true)
})
})
describe('downloadableSelectedIds', () => {
it('should return only FILE type documents from selection', () => {
const docs = [
createMockDocument({ id: 'doc1', data_source_type: DataSourceType.FILE }),
createMockDocument({ id: 'doc2', data_source_type: DataSourceType.NOTION }),
createMockDocument({ id: 'doc3', data_source_type: DataSourceType.FILE }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1', 'doc2', 'doc3'],
onSelectedIdChange,
}),
)
expect(result.current.downloadableSelectedIds).toEqual(['doc1', 'doc3'])
})
it('should return empty array when no FILE documents selected', () => {
const docs = [
createMockDocument({ id: 'doc1', data_source_type: DataSourceType.NOTION }),
createMockDocument({ id: 'doc2', data_source_type: DataSourceType.WEB }),
]
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: docs,
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
expect(result.current.downloadableSelectedIds).toEqual([])
})
})
describe('clearSelection', () => {
it('should call onSelectedIdChange with empty array', () => {
const onSelectedIdChange = vi.fn()
const { result } = renderHook(() =>
useDocumentSelection({
documents: [],
selectedIds: ['doc1', 'doc2'],
onSelectedIdChange,
}),
)
act(() => {
result.current.clearSelection()
})
expect(onSelectedIdChange).toHaveBeenCalledWith([])
})
})
})

View File

@ -1,66 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { uniq } from 'es-toolkit/array'
import { useCallback, useMemo } from 'react'
import { DataSourceType } from '@/models/datasets'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
type UseDocumentSelectionOptions = {
documents: LocalDoc[]
selectedIds: string[]
onSelectedIdChange: (selectedIds: string[]) => void
}
export const useDocumentSelection = ({
documents,
selectedIds,
onSelectedIdChange,
}: UseDocumentSelectionOptions) => {
const isAllSelected = useMemo(() => {
return documents.length > 0 && documents.every(doc => selectedIds.includes(doc.id))
}, [documents, selectedIds])
const isSomeSelected = useMemo(() => {
return documents.some(doc => selectedIds.includes(doc.id))
}, [documents, selectedIds])
const onSelectAll = useCallback(() => {
if (isAllSelected)
onSelectedIdChange([])
else
onSelectedIdChange(uniq([...selectedIds, ...documents.map(doc => doc.id)]))
}, [isAllSelected, documents, onSelectedIdChange, selectedIds])
const onSelectOne = useCallback((docId: string) => {
onSelectedIdChange(
selectedIds.includes(docId)
? selectedIds.filter(id => id !== docId)
: [...selectedIds, docId],
)
}, [selectedIds, onSelectedIdChange])
const hasErrorDocumentsSelected = useMemo(() => {
return documents.some(doc => selectedIds.includes(doc.id) && doc.display_status === 'error')
}, [documents, selectedIds])
const downloadableSelectedIds = useMemo(() => {
const selectedSet = new Set(selectedIds)
return documents
.filter(doc => selectedSet.has(doc.id) && doc.data_source_type === DataSourceType.FILE)
.map(doc => doc.id)
}, [documents, selectedIds])
const clearSelection = useCallback(() => {
onSelectedIdChange([])
}, [onSelectedIdChange])
return {
isAllSelected,
isSomeSelected,
onSelectAll,
onSelectOne,
hasErrorDocumentsSelected,
downloadableSelectedIds,
clearSelection,
}
}

View File

@ -1,340 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { act, renderHook } from '@testing-library/react'
import { describe, expect, it } from 'vitest'
import { useDocumentSort } from './use-document-sort'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
const createMockDocument = (overrides: Partial<LocalDoc> = {}): LocalDoc => ({
id: 'doc1',
name: 'Test Document',
data_source_type: 'upload_file',
data_source_info: {},
data_source_detail_dict: {},
word_count: 100,
hit_count: 10,
created_at: 1000000,
position: 1,
doc_form: 'text_model',
enabled: true,
archived: false,
display_status: 'available',
created_from: 'api',
...overrides,
} as LocalDoc)
describe('useDocumentSort', () => {
describe('initial state', () => {
it('should return null sortField initially', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
expect(result.current.sortField).toBeNull()
expect(result.current.sortOrder).toBe('desc')
})
it('should return documents unchanged when no sort is applied', () => {
const docs = [
createMockDocument({ id: 'doc1', name: 'B' }),
createMockDocument({ id: 'doc2', name: 'A' }),
]
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
expect(result.current.sortedDocuments).toEqual(docs)
})
})
describe('handleSort', () => {
it('should set sort field when called', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortField).toBe('name')
expect(result.current.sortOrder).toBe('desc')
})
it('should toggle sort order when same field is clicked twice', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortOrder).toBe('desc')
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortOrder).toBe('asc')
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortOrder).toBe('desc')
})
it('should reset to desc when different field is selected', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortOrder).toBe('asc')
act(() => {
result.current.handleSort('word_count')
})
expect(result.current.sortField).toBe('word_count')
expect(result.current.sortOrder).toBe('desc')
})
it('should not change state when null is passed', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort(null)
})
expect(result.current.sortField).toBeNull()
})
})
describe('sorting documents', () => {
const docs = [
createMockDocument({ id: 'doc1', name: 'Banana', word_count: 200, hit_count: 5, created_at: 3000 }),
createMockDocument({ id: 'doc2', name: 'Apple', word_count: 100, hit_count: 10, created_at: 1000 }),
createMockDocument({ id: 'doc3', name: 'Cherry', word_count: 300, hit_count: 1, created_at: 2000 }),
]
it('should sort by name descending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
const names = result.current.sortedDocuments.map(d => d.name)
expect(names).toEqual(['Cherry', 'Banana', 'Apple'])
})
it('should sort by name ascending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
act(() => {
result.current.handleSort('name')
})
const names = result.current.sortedDocuments.map(d => d.name)
expect(names).toEqual(['Apple', 'Banana', 'Cherry'])
})
it('should sort by word_count descending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('word_count')
})
const counts = result.current.sortedDocuments.map(d => d.word_count)
expect(counts).toEqual([300, 200, 100])
})
it('should sort by hit_count ascending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('hit_count')
})
act(() => {
result.current.handleSort('hit_count')
})
const counts = result.current.sortedDocuments.map(d => d.hit_count)
expect(counts).toEqual([1, 5, 10])
})
it('should sort by created_at descending', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('created_at')
})
const times = result.current.sortedDocuments.map(d => d.created_at)
expect(times).toEqual([3000, 2000, 1000])
})
})
describe('status filtering', () => {
const docs = [
createMockDocument({ id: 'doc1', display_status: 'available' }),
createMockDocument({ id: 'doc2', display_status: 'error' }),
createMockDocument({ id: 'doc3', display_status: 'available' }),
]
it('should not filter when statusFilterValue is empty', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
expect(result.current.sortedDocuments.length).toBe(3)
})
it('should not filter when statusFilterValue is all', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: 'all',
remoteSortValue: '',
}),
)
expect(result.current.sortedDocuments.length).toBe(3)
})
})
describe('remoteSortValue reset', () => {
it('should reset sort state when remoteSortValue changes', () => {
const { result, rerender } = renderHook(
({ remoteSortValue }) =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue,
}),
{ initialProps: { remoteSortValue: 'initial' } },
)
act(() => {
result.current.handleSort('name')
})
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortField).toBe('name')
expect(result.current.sortOrder).toBe('asc')
rerender({ remoteSortValue: 'changed' })
expect(result.current.sortField).toBeNull()
expect(result.current.sortOrder).toBe('desc')
})
})
describe('edge cases', () => {
it('should handle documents with missing values', () => {
const docs = [
createMockDocument({ id: 'doc1', name: undefined as unknown as string, word_count: undefined }),
createMockDocument({ id: 'doc2', name: 'Test', word_count: 100 }),
]
const { result } = renderHook(() =>
useDocumentSort({
documents: docs,
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortedDocuments.length).toBe(2)
})
it('should handle empty documents array', () => {
const { result } = renderHook(() =>
useDocumentSort({
documents: [],
statusFilterValue: '',
remoteSortValue: '',
}),
)
act(() => {
result.current.handleSort('name')
})
expect(result.current.sortedDocuments).toEqual([])
})
})
})

View File

@ -1,102 +0,0 @@
import type { SimpleDocumentDetail } from '@/models/datasets'
import { useCallback, useMemo, useRef, useState } from 'react'
import { normalizeStatusForQuery } from '@/app/components/datasets/documents/status-filter'
export type SortField = 'name' | 'word_count' | 'hit_count' | 'created_at' | null
export type SortOrder = 'asc' | 'desc'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
type UseDocumentSortOptions = {
documents: LocalDoc[]
statusFilterValue: string
remoteSortValue: string
}
export const useDocumentSort = ({
documents,
statusFilterValue,
remoteSortValue,
}: UseDocumentSortOptions) => {
const [sortField, setSortField] = useState<SortField>(null)
const [sortOrder, setSortOrder] = useState<SortOrder>('desc')
const prevRemoteSortValueRef = useRef(remoteSortValue)
// Reset sort when remote sort changes
if (prevRemoteSortValueRef.current !== remoteSortValue) {
prevRemoteSortValueRef.current = remoteSortValue
setSortField(null)
setSortOrder('desc')
}
const handleSort = useCallback((field: SortField) => {
if (field === null)
return
if (sortField === field) {
setSortOrder(prev => prev === 'asc' ? 'desc' : 'asc')
}
else {
setSortField(field)
setSortOrder('desc')
}
}, [sortField])
const sortedDocuments = useMemo(() => {
let filteredDocs = documents
if (statusFilterValue && statusFilterValue !== 'all') {
filteredDocs = filteredDocs.filter(doc =>
typeof doc.display_status === 'string'
&& normalizeStatusForQuery(doc.display_status) === statusFilterValue,
)
}
if (!sortField)
return filteredDocs
const sortedDocs = [...filteredDocs].sort((a, b) => {
let aValue: string | number
let bValue: string | number
switch (sortField) {
case 'name':
aValue = a.name?.toLowerCase() || ''
bValue = b.name?.toLowerCase() || ''
break
case 'word_count':
aValue = a.word_count || 0
bValue = b.word_count || 0
break
case 'hit_count':
aValue = a.hit_count || 0
bValue = b.hit_count || 0
break
case 'created_at':
aValue = a.created_at
bValue = b.created_at
break
default:
return 0
}
if (sortField === 'name') {
const result = (aValue as string).localeCompare(bValue as string)
return sortOrder === 'asc' ? result : -result
}
else {
const result = (aValue as number) - (bValue as number)
return sortOrder === 'asc' ? result : -result
}
})
return sortedDocs
}, [documents, sortField, sortOrder, statusFilterValue])
return {
sortField,
sortOrder,
handleSort,
sortedDocuments,
}
}

View File

@ -1,487 +0,0 @@
import type { ReactNode } from 'react'
import type { Props as PaginationProps } from '@/app/components/base/pagination'
import type { SimpleDocumentDetail } from '@/models/datasets'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { ChunkingMode, DataSourceType } from '@/models/datasets'
import DocumentList from '../list'
const mockPush = vi.fn()
vi.mock('next/navigation', () => ({
useRouter: () => ({
push: mockPush,
}),
}))
vi.mock('@/context/dataset-detail', () => ({
useDatasetDetailContextWithSelector: (selector: (state: { dataset: { doc_form: string } }) => unknown) =>
selector({ dataset: { doc_form: ChunkingMode.text } }),
}))
const createTestQueryClient = () => new QueryClient({
defaultOptions: {
queries: { retry: false, gcTime: 0 },
mutations: { retry: false },
},
})
const createWrapper = () => {
const queryClient = createTestQueryClient()
return ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
{children}
</QueryClientProvider>
)
}
const createMockDoc = (overrides: Partial<SimpleDocumentDetail> = {}): SimpleDocumentDetail => ({
id: `doc-${Math.random().toString(36).substr(2, 9)}`,
position: 1,
data_source_type: DataSourceType.FILE,
data_source_info: {},
data_source_detail_dict: {
upload_file: { name: 'test.txt', extension: 'txt' },
},
dataset_process_rule_id: 'rule-1',
batch: 'batch-1',
name: 'test-document.txt',
created_from: 'web',
created_by: 'user-1',
created_at: Date.now(),
tokens: 100,
indexing_status: 'completed',
error: null,
enabled: true,
disabled_at: null,
disabled_by: null,
archived: false,
archived_reason: null,
archived_by: null,
archived_at: null,
updated_at: Date.now(),
doc_type: null,
doc_metadata: undefined,
display_status: 'available',
word_count: 500,
hit_count: 10,
doc_form: 'text_model',
...overrides,
} as SimpleDocumentDetail)
const defaultPagination: PaginationProps = {
current: 1,
onChange: vi.fn(),
total: 100,
}
describe('DocumentList', () => {
const defaultProps = {
embeddingAvailable: true,
documents: [
createMockDoc({ id: 'doc-1', name: 'Document 1.txt', word_count: 100, hit_count: 5 }),
createMockDoc({ id: 'doc-2', name: 'Document 2.txt', word_count: 200, hit_count: 10 }),
createMockDoc({ id: 'doc-3', name: 'Document 3.txt', word_count: 300, hit_count: 15 }),
],
selectedIds: [] as string[],
onSelectedIdChange: vi.fn(),
datasetId: 'dataset-1',
pagination: defaultPagination,
onUpdate: vi.fn(),
onManageMetadata: vi.fn(),
statusFilterValue: '',
remoteSortValue: '',
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('Rendering', () => {
it('should render without crashing', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render all documents', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByText('Document 1.txt')).toBeInTheDocument()
expect(screen.getByText('Document 2.txt')).toBeInTheDocument()
expect(screen.getByText('Document 3.txt')).toBeInTheDocument()
})
it('should render table headers', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByText('#')).toBeInTheDocument()
})
it('should render pagination when total is provided', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// Pagination component should be present
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should not render pagination when total is 0', () => {
const props = {
...defaultProps,
pagination: { ...defaultPagination, total: 0 },
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render empty table when no documents', () => {
const props = { ...defaultProps, documents: [] }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Selection', () => {
// Helper to find checkboxes (custom div components, not native checkboxes)
const findCheckboxes = (container: HTMLElement): NodeListOf<Element> => {
return container.querySelectorAll('[class*="shadow-xs"]')
}
it('should render header checkbox when embeddingAvailable', () => {
const { container } = render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
const checkboxes = findCheckboxes(container)
expect(checkboxes.length).toBeGreaterThan(0)
})
it('should not render header checkbox when embedding not available', () => {
const props = { ...defaultProps, embeddingAvailable: false }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// Row checkboxes should still be there, but header checkbox should be hidden
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should call onSelectedIdChange when select all is clicked', () => {
const onSelectedIdChange = vi.fn()
const props = { ...defaultProps, onSelectedIdChange }
const { container } = render(<DocumentList {...props} />, { wrapper: createWrapper() })
const checkboxes = findCheckboxes(container)
if (checkboxes.length > 0) {
fireEvent.click(checkboxes[0])
expect(onSelectedIdChange).toHaveBeenCalled()
}
})
it('should show all checkboxes as checked when all are selected', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1', 'doc-2', 'doc-3'],
}
const { container } = render(<DocumentList {...props} />, { wrapper: createWrapper() })
const checkboxes = findCheckboxes(container)
// When checked, checkbox should have a check icon (svg) inside
checkboxes.forEach((checkbox) => {
const checkIcon = checkbox.querySelector('svg')
expect(checkIcon).toBeInTheDocument()
})
})
it('should show indeterminate state when some are selected', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
const { container } = render(<DocumentList {...props} />, { wrapper: createWrapper() })
// First checkbox is the header checkbox which should be indeterminate
const checkboxes = findCheckboxes(container)
expect(checkboxes.length).toBeGreaterThan(0)
// Header checkbox should show indeterminate icon, not check icon
// Just verify it's rendered
expect(checkboxes[0]).toBeInTheDocument()
})
it('should call onSelectedIdChange with single document when row checkbox is clicked', () => {
const onSelectedIdChange = vi.fn()
const props = { ...defaultProps, onSelectedIdChange }
const { container } = render(<DocumentList {...props} />, { wrapper: createWrapper() })
// Click the second checkbox (first row checkbox)
const checkboxes = findCheckboxes(container)
if (checkboxes.length > 1) {
fireEvent.click(checkboxes[1])
expect(onSelectedIdChange).toHaveBeenCalled()
}
})
})
describe('Sorting', () => {
it('should render sort headers for sortable columns', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// Find svg icons which indicate sortable columns
const sortIcons = document.querySelectorAll('svg')
expect(sortIcons.length).toBeGreaterThan(0)
})
it('should update sort order when sort header is clicked', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// Find and click a sort header by its parent div containing the label text
const sortableHeaders = document.querySelectorAll('[class*="cursor-pointer"]')
if (sortableHeaders.length > 0) {
fireEvent.click(sortableHeaders[0])
}
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Batch Actions', () => {
it('should show batch action bar when documents are selected', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1', 'doc-2'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// BatchAction component should be visible
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should not show batch action bar when no documents selected', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// BatchAction should not be present
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render batch action bar with archive option', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// BatchAction component should be visible when documents are selected
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render batch action bar with enable option', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render batch action bar with disable option', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render batch action bar with delete option', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should clear selection when cancel is clicked', () => {
const onSelectedIdChange = vi.fn()
const props = {
...defaultProps,
selectedIds: ['doc-1'],
onSelectedIdChange,
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
const cancelButton = screen.queryByRole('button', { name: /cancel/i })
if (cancelButton) {
fireEvent.click(cancelButton)
expect(onSelectedIdChange).toHaveBeenCalledWith([])
}
})
it('should show download option for downloadable documents', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
documents: [
createMockDoc({ id: 'doc-1', data_source_type: DataSourceType.FILE }),
],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// BatchAction should be visible
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should show re-index option for error documents', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
documents: [
createMockDoc({ id: 'doc-1', display_status: 'error' }),
],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// BatchAction with re-index should be present for error documents
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Row Click Navigation', () => {
it('should navigate to document detail when row is clicked', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
const rows = screen.getAllByRole('row')
// First row is header, second row is first document
if (rows.length > 1) {
fireEvent.click(rows[1])
expect(mockPush).toHaveBeenCalledWith('/datasets/dataset-1/documents/doc-1')
}
})
})
describe('Rename Modal', () => {
it('should not show rename modal initially', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// RenameModal should not be visible initially
const modal = screen.queryByRole('dialog')
expect(modal).not.toBeInTheDocument()
})
it('should show rename modal when rename button is clicked', () => {
const { container } = render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
// Find and click the rename button in the first row
const renameButtons = container.querySelectorAll('.cursor-pointer.rounded-md')
if (renameButtons.length > 0) {
fireEvent.click(renameButtons[0])
}
// After clicking rename, the modal should potentially be visible
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should call onUpdate when document is renamed', () => {
const onUpdate = vi.fn()
const props = { ...defaultProps, onUpdate }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// The handleRenamed callback wraps onUpdate
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Edit Metadata Modal', () => {
it('should handle edit metadata action', () => {
const props = {
...defaultProps,
selectedIds: ['doc-1'],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
const editButton = screen.queryByRole('button', { name: /metadata/i })
if (editButton) {
fireEvent.click(editButton)
}
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should call onManageMetadata when manage metadata is triggered', () => {
const onManageMetadata = vi.fn()
const props = {
...defaultProps,
selectedIds: ['doc-1'],
onManageMetadata,
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
// The onShowManage callback in EditMetadataBatchModal should call hideEditModal then onManageMetadata
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Chunking Mode', () => {
it('should render with general mode', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render with QA mode', () => {
// This test uses the default mock which returns ChunkingMode.text
// The component will compute isQAMode based on doc_form
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should render with parent-child mode', () => {
render(<DocumentList {...defaultProps} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
})
describe('Edge Cases', () => {
it('should handle empty documents array', () => {
const props = { ...defaultProps, documents: [] }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should handle documents with missing optional fields', () => {
const docWithMissingFields = createMockDoc({
word_count: undefined as unknown as number,
hit_count: undefined as unknown as number,
})
const props = {
...defaultProps,
documents: [docWithMissingFields],
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should handle status filter value', () => {
const props = {
...defaultProps,
statusFilterValue: 'completed',
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should handle remote sort value', () => {
const props = {
...defaultProps,
remoteSortValue: 'created_at',
}
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
})
it('should handle large number of documents', () => {
const manyDocs = Array.from({ length: 20 }, (_, i) =>
createMockDoc({ id: `doc-${i}`, name: `Document ${i}.txt` }))
const props = { ...defaultProps, documents: manyDocs }
render(<DocumentList {...props} />, { wrapper: createWrapper() })
expect(screen.getByRole('table')).toBeInTheDocument()
}, 10000)
})
})

View File

@ -1,3 +0,0 @@
// Re-export from parent for backwards compatibility
export { default } from '../list'
export { renderTdValue } from './components'

View File

@ -1,26 +1,67 @@
'use client'
import type { FC } from 'react'
import type { Props as PaginationProps } from '@/app/components/base/pagination'
import type { SimpleDocumentDetail } from '@/models/datasets'
import type { CommonResponse } from '@/models/common'
import type { LegacyDataSourceInfo, LocalFileInfo, OnlineDocumentInfo, OnlineDriveInfo, SimpleDocumentDetail } from '@/models/datasets'
import {
RiArrowDownLine,
RiEditLine,
RiGlobalLine,
} from '@remixicon/react'
import { useBoolean } from 'ahooks'
import { uniq } from 'es-toolkit/array'
import { pick } from 'es-toolkit/object'
import { useRouter } from 'next/navigation'
import * as React from 'react'
import { useCallback, useState } from 'react'
import { useCallback, useEffect, useMemo, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Checkbox from '@/app/components/base/checkbox'
import FileTypeIcon from '@/app/components/base/file-uploader/file-type-icon'
import NotionIcon from '@/app/components/base/notion-icon'
import Pagination from '@/app/components/base/pagination'
import Toast from '@/app/components/base/toast'
import Tooltip from '@/app/components/base/tooltip'
import ChunkingModeLabel from '@/app/components/datasets/common/chunking-mode-label'
import { normalizeStatusForQuery } from '@/app/components/datasets/documents/status-filter'
import { extensionToFileType } from '@/app/components/datasets/hit-testing/utils/extension-to-file-type'
import EditMetadataBatchModal from '@/app/components/datasets/metadata/edit-metadata-batch/modal'
import useBatchEditDocumentMetadata from '@/app/components/datasets/metadata/hooks/use-batch-edit-document-metadata'
import { useDatasetDetailContextWithSelector as useDatasetDetailContext } from '@/context/dataset-detail'
import { ChunkingMode, DocumentActionType } from '@/models/datasets'
import useTimestamp from '@/hooks/use-timestamp'
import { ChunkingMode, DataSourceType, DocumentActionType } from '@/models/datasets'
import { DatasourceType } from '@/models/pipeline'
import { useDocumentArchive, useDocumentBatchRetryIndex, useDocumentDelete, useDocumentDisable, useDocumentDownloadZip, useDocumentEnable, useDocumentSummary } from '@/service/knowledge/use-document'
import { asyncRunSafe } from '@/utils'
import { cn } from '@/utils/classnames'
import { downloadBlob } from '@/utils/download'
import { formatNumber } from '@/utils/format'
import BatchAction from '../detail/completed/common/batch-action'
import SummaryStatus from '../detail/completed/common/summary-status'
import StatusItem from '../status-item'
import s from '../style.module.css'
import { DocumentTableRow, renderTdValue, SortHeader } from './document-list/components'
import { useDocumentActions, useDocumentSelection, useDocumentSort } from './document-list/hooks'
import Operations from './operations'
import RenameModal from './rename-modal'
type LocalDoc = SimpleDocumentDetail & { percent?: number }
export const renderTdValue = (value: string | number | null, isEmptyStyle = false) => {
return (
<div className={cn(isEmptyStyle ? 'text-text-tertiary' : 'text-text-secondary', s.tdValue)}>
{value ?? '-'}
</div>
)
}
type DocumentListProps = {
const renderCount = (count: number | undefined) => {
if (!count)
return renderTdValue(0, true)
if (count < 1000)
return count
return `${formatNumber((count / 1000).toFixed(1))}k`
}
type LocalDoc = SimpleDocumentDetail & { percent?: number }
type IDocumentListProps = {
embeddingAvailable: boolean
documents: LocalDoc[]
selectedIds: string[]
@ -36,7 +77,7 @@ type DocumentListProps = {
/**
* Document list component including basic information
*/
const DocumentList: FC<DocumentListProps> = ({
const DocumentList: FC<IDocumentListProps> = ({
embeddingAvailable,
documents = [],
selectedIds,
@ -49,43 +90,20 @@ const DocumentList: FC<DocumentListProps> = ({
remoteSortValue,
}) => {
const { t } = useTranslation()
const { formatTime } = useTimestamp()
const router = useRouter()
const datasetConfig = useDatasetDetailContext(s => s.dataset)
const chunkingMode = datasetConfig?.doc_form
const isGeneralMode = chunkingMode !== ChunkingMode.parentChild
const isQAMode = chunkingMode === ChunkingMode.qa
const [sortField, setSortField] = useState<'name' | 'word_count' | 'hit_count' | 'created_at' | null>(null)
const [sortOrder, setSortOrder] = useState<'asc' | 'desc'>('desc')
// Sorting
const { sortField, sortOrder, handleSort, sortedDocuments } = useDocumentSort({
documents,
statusFilterValue,
remoteSortValue,
})
useEffect(() => {
setSortField(null)
setSortOrder('desc')
}, [remoteSortValue])
// Selection
const {
isAllSelected,
isSomeSelected,
onSelectAll,
onSelectOne,
hasErrorDocumentsSelected,
downloadableSelectedIds,
clearSelection,
} = useDocumentSelection({
documents: sortedDocuments,
selectedIds,
onSelectedIdChange,
})
// Actions
const { handleAction, handleBatchReIndex, handleBatchDownload } = useDocumentActions({
datasetId,
selectedIds,
downloadableSelectedIds,
onUpdate,
onClearSelection: clearSelection,
})
// Batch edit metadata
const {
isShowEditModal,
showEditModal,
@ -95,26 +113,233 @@ const DocumentList: FC<DocumentListProps> = ({
} = useBatchEditDocumentMetadata({
datasetId,
docList: documents.filter(doc => selectedIds.includes(doc.id)),
selectedDocumentIds: selectedIds,
selectedDocumentIds: selectedIds, // Pass all selected IDs separately
onUpdate,
})
// Rename modal
const localDocs = useMemo(() => {
let filteredDocs = documents
if (statusFilterValue && statusFilterValue !== 'all') {
filteredDocs = filteredDocs.filter(doc =>
typeof doc.display_status === 'string'
&& normalizeStatusForQuery(doc.display_status) === statusFilterValue,
)
}
if (!sortField)
return filteredDocs
const sortedDocs = [...filteredDocs].sort((a, b) => {
let aValue: any
let bValue: any
switch (sortField) {
case 'name':
aValue = a.name?.toLowerCase() || ''
bValue = b.name?.toLowerCase() || ''
break
case 'word_count':
aValue = a.word_count || 0
bValue = b.word_count || 0
break
case 'hit_count':
aValue = a.hit_count || 0
bValue = b.hit_count || 0
break
case 'created_at':
aValue = a.created_at
bValue = b.created_at
break
default:
return 0
}
if (sortField === 'name') {
const result = aValue.localeCompare(bValue)
return sortOrder === 'asc' ? result : -result
}
else {
const result = aValue - bValue
return sortOrder === 'asc' ? result : -result
}
})
return sortedDocs
}, [documents, sortField, sortOrder, statusFilterValue])
const handleSort = (field: 'name' | 'word_count' | 'hit_count' | 'created_at') => {
if (sortField === field) {
setSortOrder(sortOrder === 'asc' ? 'desc' : 'asc')
}
else {
setSortField(field)
setSortOrder('desc')
}
}
const renderSortHeader = (field: 'name' | 'word_count' | 'hit_count' | 'created_at', label: string) => {
const isActive = sortField === field
const isDesc = isActive && sortOrder === 'desc'
return (
<div className="flex cursor-pointer items-center hover:text-text-secondary" onClick={() => handleSort(field)}>
{label}
<RiArrowDownLine
className={cn('ml-0.5 h-3 w-3 transition-all', isActive ? 'text-text-tertiary' : 'text-text-disabled', isActive && !isDesc ? 'rotate-180' : '')}
/>
</div>
)
}
const [currDocument, setCurrDocument] = useState<LocalDoc | null>(null)
const [isShowRenameModal, {
setTrue: setShowRenameModalTrue,
setFalse: setShowRenameModalFalse,
}] = useBoolean(false)
const handleShowRenameModal = useCallback((doc: LocalDoc) => {
setCurrDocument(doc)
setShowRenameModalTrue()
}, [setShowRenameModalTrue])
const handleRenamed = useCallback(() => {
onUpdate()
}, [onUpdate])
const isAllSelected = useMemo(() => {
return localDocs.length > 0 && localDocs.every(doc => selectedIds.includes(doc.id))
}, [localDocs, selectedIds])
const isSomeSelected = useMemo(() => {
return localDocs.some(doc => selectedIds.includes(doc.id))
}, [localDocs, selectedIds])
const onSelectedAll = useCallback(() => {
if (isAllSelected)
onSelectedIdChange([])
else
onSelectedIdChange(uniq([...selectedIds, ...localDocs.map(doc => doc.id)]))
}, [isAllSelected, localDocs, onSelectedIdChange, selectedIds])
const { mutateAsync: archiveDocument } = useDocumentArchive()
const { mutateAsync: generateSummary } = useDocumentSummary()
const { mutateAsync: enableDocument } = useDocumentEnable()
const { mutateAsync: disableDocument } = useDocumentDisable()
const { mutateAsync: deleteDocument } = useDocumentDelete()
const { mutateAsync: retryIndexDocument } = useDocumentBatchRetryIndex()
const { mutateAsync: requestDocumentsZip, isPending: isDownloadingZip } = useDocumentDownloadZip()
const handleAction = (actionName: DocumentActionType) => {
return async () => {
let opApi
switch (actionName) {
case DocumentActionType.archive:
opApi = archiveDocument
break
case DocumentActionType.summary:
opApi = generateSummary
break
case DocumentActionType.enable:
opApi = enableDocument
break
case DocumentActionType.disable:
opApi = disableDocument
break
default:
opApi = deleteDocument
break
}
const [e] = await asyncRunSafe<CommonResponse>(opApi({ datasetId, documentIds: selectedIds }) as Promise<CommonResponse>)
if (!e) {
if (actionName === DocumentActionType.delete)
onSelectedIdChange([])
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
onUpdate()
}
else { Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) }) }
}
}
const handleBatchReIndex = async () => {
const [e] = await asyncRunSafe<CommonResponse>(retryIndexDocument({ datasetId, documentIds: selectedIds }))
if (!e) {
onSelectedIdChange([])
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
onUpdate()
}
else {
Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
}
}
const hasErrorDocumentsSelected = useMemo(() => {
return localDocs.some(doc => selectedIds.includes(doc.id) && doc.display_status === 'error')
}, [localDocs, selectedIds])
const getFileExtension = useCallback((fileName: string): string => {
if (!fileName)
return ''
const parts = fileName.split('.')
if (parts.length <= 1 || (parts[0] === '' && parts.length === 2))
return ''
return parts[parts.length - 1].toLowerCase()
}, [])
const isCreateFromRAGPipeline = useCallback((createdFrom: string) => {
return createdFrom === 'rag-pipeline'
}, [])
/**
* Calculate the data source type
* DataSourceType: FILE, NOTION, WEB (legacy)
* DatasourceType: localFile, onlineDocument, websiteCrawl, onlineDrive (new)
*/
const isLocalFile = useCallback((dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.localFile || dataSourceType === DataSourceType.FILE
}, [])
const isOnlineDocument = useCallback((dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.onlineDocument || dataSourceType === DataSourceType.NOTION
}, [])
const isWebsiteCrawl = useCallback((dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.websiteCrawl || dataSourceType === DataSourceType.WEB
}, [])
const isOnlineDrive = useCallback((dataSourceType: DataSourceType | DatasourceType) => {
return dataSourceType === DatasourceType.onlineDrive
}, [])
const downloadableSelectedIds = useMemo(() => {
const selectedSet = new Set(selectedIds)
return localDocs
.filter(doc => selectedSet.has(doc.id) && doc.data_source_type === DataSourceType.FILE)
.map(doc => doc.id)
}, [localDocs, selectedIds])
/**
* Generate a random ZIP filename for bulk document downloads.
* We intentionally avoid leaking dataset info in the exported archive name.
*/
const generateDocsZipFileName = useCallback((): string => {
// Prefer UUID for uniqueness; fall back to time+random when unavailable.
const randomPart = (typeof crypto !== 'undefined' && typeof crypto.randomUUID === 'function')
? crypto.randomUUID()
: `${Date.now().toString(36)}${Math.random().toString(36).slice(2, 10)}`
return `${randomPart}-docs.zip`
}, [])
const handleBatchDownload = useCallback(async () => {
if (isDownloadingZip)
return
// Download as a single ZIP to avoid browser caps on multiple automatic downloads.
const [e, blob] = await asyncRunSafe(requestDocumentsZip({ datasetId, documentIds: downloadableSelectedIds }))
if (e || !blob) {
Toast.notify({ type: 'error', message: t('actionMsg.downloadUnsuccessfully', { ns: 'common' }) })
return
}
downloadBlob({ data: blob, fileName: generateDocsZipFileName() })
}, [datasetId, downloadableSelectedIds, generateDocsZipFileName, isDownloadingZip, requestDocumentsZip, t])
return (
<div className="relative mt-3 flex h-full w-full flex-col">
<div className="relative h-0 grow overflow-x-auto">
@ -128,76 +353,157 @@ const DocumentList: FC<DocumentListProps> = ({
className="mr-2 shrink-0"
checked={isAllSelected}
indeterminate={!isAllSelected && isSomeSelected}
onCheck={onSelectAll}
onCheck={onSelectedAll}
/>
)}
#
</div>
</td>
<td>
<SortHeader
field="name"
label={t('list.table.header.fileName', { ns: 'datasetDocuments' })}
currentSortField={sortField}
sortOrder={sortOrder}
onSort={handleSort}
/>
{renderSortHeader('name', t('list.table.header.fileName', { ns: 'datasetDocuments' }))}
</td>
<td className="w-[130px]">{t('list.table.header.chunkingMode', { ns: 'datasetDocuments' })}</td>
<td className="w-24">
<SortHeader
field="word_count"
label={t('list.table.header.words', { ns: 'datasetDocuments' })}
currentSortField={sortField}
sortOrder={sortOrder}
onSort={handleSort}
/>
{renderSortHeader('word_count', t('list.table.header.words', { ns: 'datasetDocuments' }))}
</td>
<td className="w-44">
<SortHeader
field="hit_count"
label={t('list.table.header.hitCount', { ns: 'datasetDocuments' })}
currentSortField={sortField}
sortOrder={sortOrder}
onSort={handleSort}
/>
{renderSortHeader('hit_count', t('list.table.header.hitCount', { ns: 'datasetDocuments' }))}
</td>
<td className="w-44">
<SortHeader
field="created_at"
label={t('list.table.header.uploadTime', { ns: 'datasetDocuments' })}
currentSortField={sortField}
sortOrder={sortOrder}
onSort={handleSort}
/>
{renderSortHeader('created_at', t('list.table.header.uploadTime', { ns: 'datasetDocuments' }))}
</td>
<td className="w-40">{t('list.table.header.status', { ns: 'datasetDocuments' })}</td>
<td className="w-20">{t('list.table.header.action', { ns: 'datasetDocuments' })}</td>
</tr>
</thead>
<tbody className="text-text-secondary">
{sortedDocuments.map((doc, index) => (
<DocumentTableRow
key={doc.id}
doc={doc}
index={index}
datasetId={datasetId}
isSelected={selectedIds.includes(doc.id)}
isGeneralMode={isGeneralMode}
isQAMode={isQAMode}
embeddingAvailable={embeddingAvailable}
selectedIds={selectedIds}
onSelectOne={onSelectOne}
onSelectedIdChange={onSelectedIdChange}
onShowRenameModal={handleShowRenameModal}
onUpdate={onUpdate}
/>
))}
{localDocs.map((doc, index) => {
const isFile = isLocalFile(doc.data_source_type)
const fileType = isFile ? doc.data_source_detail_dict?.upload_file?.extension : ''
return (
<tr
key={doc.id}
className="h-8 cursor-pointer border-b border-divider-subtle hover:bg-background-default-hover"
onClick={() => {
router.push(`/datasets/${datasetId}/documents/${doc.id}`)
}}
>
<td className="text-left align-middle text-xs text-text-tertiary">
<div className="flex items-center" onClick={e => e.stopPropagation()}>
<Checkbox
className="mr-2 shrink-0"
checked={selectedIds.includes(doc.id)}
onCheck={() => {
onSelectedIdChange(
selectedIds.includes(doc.id)
? selectedIds.filter(id => id !== doc.id)
: [...selectedIds, doc.id],
)
}}
/>
{index + 1}
</div>
</td>
<td>
<div className="group mr-6 flex max-w-[460px] items-center hover:mr-0">
<div className="flex shrink-0 items-center">
{isOnlineDocument(doc.data_source_type) && (
<NotionIcon
className="mr-1.5"
type="page"
src={
isCreateFromRAGPipeline(doc.created_from)
? (doc.data_source_info as OnlineDocumentInfo).page.page_icon
: (doc.data_source_info as LegacyDataSourceInfo).notion_page_icon
}
/>
)}
{isLocalFile(doc.data_source_type) && (
<FileTypeIcon
type={
extensionToFileType(
isCreateFromRAGPipeline(doc.created_from)
? (doc?.data_source_info as LocalFileInfo)?.extension
: ((doc?.data_source_info as LegacyDataSourceInfo)?.upload_file?.extension ?? fileType),
)
}
className="mr-1.5"
/>
)}
{isOnlineDrive(doc.data_source_type) && (
<FileTypeIcon
type={
extensionToFileType(
getFileExtension((doc?.data_source_info as unknown as OnlineDriveInfo)?.name),
)
}
className="mr-1.5"
/>
)}
{isWebsiteCrawl(doc.data_source_type) && (
<RiGlobalLine className="mr-1.5 size-4" />
)}
</div>
<Tooltip
popupContent={doc.name}
>
<span className="grow-1 truncate text-sm">{doc.name}</span>
</Tooltip>
{
doc.summary_index_status && (
<div className="ml-1 hidden shrink-0 group-hover:flex">
<SummaryStatus status={doc.summary_index_status} />
</div>
)
}
<div className="hidden shrink-0 group-hover:ml-auto group-hover:flex">
<Tooltip
popupContent={t('list.table.rename', { ns: 'datasetDocuments' })}
>
<div
className="cursor-pointer rounded-md p-1 hover:bg-state-base-hover"
onClick={(e) => {
e.stopPropagation()
handleShowRenameModal(doc)
}}
>
<RiEditLine className="h-4 w-4 text-text-tertiary" />
</div>
</Tooltip>
</div>
</div>
</td>
<td>
<ChunkingModeLabel
isGeneralMode={isGeneralMode}
isQAMode={isQAMode}
/>
</td>
<td>{renderCount(doc.word_count)}</td>
<td>{renderCount(doc.hit_count)}</td>
<td className="text-[13px] text-text-secondary">
{formatTime(doc.created_at, t('dateTimeFormat', { ns: 'datasetHitTesting' }) as string)}
</td>
<td>
<StatusItem status={doc.display_status} />
</td>
<td>
<Operations
selectedIds={selectedIds}
onSelectedIdChange={onSelectedIdChange}
embeddingAvailable={embeddingAvailable}
datasetId={datasetId}
detail={pick(doc, ['name', 'enabled', 'archived', 'id', 'data_source_type', 'doc_form', 'display_status'])}
onUpdate={onUpdate}
/>
</td>
</tr>
)
})}
</tbody>
</table>
</div>
{selectedIds.length > 0 && (
{(selectedIds.length > 0) && (
<BatchAction
className="absolute bottom-16 left-0 z-20"
selectedIds={selectedIds}
@ -209,10 +515,12 @@ const DocumentList: FC<DocumentListProps> = ({
onBatchDelete={handleAction(DocumentActionType.delete)}
onEditMetadata={showEditModal}
onBatchReIndex={hasErrorDocumentsSelected ? handleBatchReIndex : undefined}
onCancel={clearSelection}
onCancel={() => {
onSelectedIdChange([])
}}
/>
)}
{/* Show Pagination only if the total is more than the limit */}
{!!pagination.total && (
<Pagination
{...pagination}
@ -248,5 +556,3 @@ const DocumentList: FC<DocumentListProps> = ({
}
export default DocumentList
export { renderTdValue }

View File

@ -1,351 +0,0 @@
import type { FileListItemProps } from './file-list-item'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { PROGRESS_ERROR, PROGRESS_NOT_STARTED } from '../constants'
import FileListItem from './file-list-item'
// Mock theme hook - can be changed per test
let mockTheme = 'light'
vi.mock('@/hooks/use-theme', () => ({
default: () => ({ theme: mockTheme }),
}))
// Mock theme types
vi.mock('@/types/app', () => ({
Theme: { dark: 'dark', light: 'light' },
}))
// Mock SimplePieChart with dynamic import handling
vi.mock('next/dynamic', () => ({
default: () => {
const DynamicComponent = ({ percentage, stroke, fill }: { percentage: number, stroke: string, fill: string }) => (
<div data-testid="pie-chart" data-percentage={percentage} data-stroke={stroke} data-fill={fill}>
Pie Chart:
{' '}
{percentage}
%
</div>
)
DynamicComponent.displayName = 'SimplePieChart'
return DynamicComponent
},
}))
// Mock DocumentFileIcon
vi.mock('@/app/components/datasets/common/document-file-icon', () => ({
default: ({ name, extension, size }: { name: string, extension: string, size: string }) => (
<div data-testid="document-icon" data-name={name} data-extension={extension} data-size={size}>
Document Icon
</div>
),
}))
describe('FileListItem', () => {
const createMockFile = (overrides: Partial<File> = {}): File => ({
name: 'test-document.pdf',
size: 1024 * 100, // 100KB
type: 'application/pdf',
lastModified: Date.now(),
...overrides,
} as File)
const createMockFileItem = (overrides: Partial<FileItem> = {}): FileItem => ({
fileID: 'file-123',
file: createMockFile(overrides.file as Partial<File>),
progress: PROGRESS_NOT_STARTED,
...overrides,
})
const defaultProps: FileListItemProps = {
fileItem: createMockFileItem(),
onPreview: vi.fn(),
onRemove: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render the file item container', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('flex', 'h-12', 'items-center', 'rounded-lg')
})
it('should render document icon with correct props', () => {
render(<FileListItem {...defaultProps} />)
const icon = screen.getByTestId('document-icon')
expect(icon).toBeInTheDocument()
expect(icon).toHaveAttribute('data-name', 'test-document.pdf')
expect(icon).toHaveAttribute('data-extension', 'pdf')
expect(icon).toHaveAttribute('data-size', 'lg')
})
it('should render file name', () => {
render(<FileListItem {...defaultProps} />)
expect(screen.getByText('test-document.pdf')).toBeInTheDocument()
})
it('should render file extension in uppercase via CSS class', () => {
render(<FileListItem {...defaultProps} />)
// Extension is rendered in lowercase but styled with uppercase CSS
const extensionSpan = screen.getByText('pdf')
expect(extensionSpan).toBeInTheDocument()
expect(extensionSpan).toHaveClass('uppercase')
})
it('should render file size', () => {
render(<FileListItem {...defaultProps} />)
// 100KB (102400 bytes) formatted with formatFileSize
expect(screen.getByText('100.00 KB')).toBeInTheDocument()
})
it('should render delete button', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const deleteButton = container.querySelector('.cursor-pointer')
expect(deleteButton).toBeInTheDocument()
})
})
describe('progress states', () => {
it('should show progress chart when uploading (0-99)', () => {
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toBeInTheDocument()
expect(pieChart).toHaveAttribute('data-percentage', '50')
})
it('should show progress chart at 0%', () => {
const fileItem = createMockFileItem({ progress: 0 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-percentage', '0')
})
it('should not show progress chart when complete (100)', () => {
const fileItem = createMockFileItem({ progress: 100 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
it('should not show progress chart when not started (-1)', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_NOT_STARTED })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('error state', () => {
it('should show error icon when progress is PROGRESS_ERROR', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_ERROR })
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const errorIcon = container.querySelector('.text-text-destructive')
expect(errorIcon).toBeInTheDocument()
})
it('should apply error styling to container', () => {
const fileItem = createMockFileItem({ progress: PROGRESS_ERROR })
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('border-state-destructive-border', 'bg-state-destructive-hover')
})
it('should not show error styling when not in error state', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).not.toHaveClass('border-state-destructive-border')
})
})
describe('theme handling', () => {
it('should use correct chart color for light theme', () => {
mockTheme = 'light'
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-stroke', '#296dff')
expect(pieChart).toHaveAttribute('data-fill', '#296dff')
})
it('should use correct chart color for dark theme', () => {
mockTheme = 'dark'
const fileItem = createMockFileItem({ progress: 50 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const pieChart = screen.getByTestId('pie-chart')
expect(pieChart).toHaveAttribute('data-stroke', '#5289ff')
expect(pieChart).toHaveAttribute('data-fill', '#5289ff')
})
})
describe('event handlers', () => {
it('should call onPreview when item is clicked', () => {
const onPreview = vi.fn()
const fileItem = createMockFileItem()
render(<FileListItem {...defaultProps} fileItem={fileItem} onPreview={onPreview} />)
const item = screen.getByText('test-document.pdf').closest('[class*="flex h-12"]')!
fireEvent.click(item)
expect(onPreview).toHaveBeenCalledTimes(1)
expect(onPreview).toHaveBeenCalledWith(fileItem.file)
})
it('should call onRemove when delete button is clicked', () => {
const onRemove = vi.fn()
const fileItem = createMockFileItem()
const { container } = render(<FileListItem {...defaultProps} fileItem={fileItem} onRemove={onRemove} />)
const deleteButton = container.querySelector('.cursor-pointer')!
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledTimes(1)
expect(onRemove).toHaveBeenCalledWith('file-123')
})
it('should stop propagation when delete button is clicked', () => {
const onPreview = vi.fn()
const onRemove = vi.fn()
const { container } = render(<FileListItem {...defaultProps} onPreview={onPreview} onRemove={onRemove} />)
const deleteButton = container.querySelector('.cursor-pointer')!
fireEvent.click(deleteButton)
expect(onRemove).toHaveBeenCalledTimes(1)
expect(onPreview).not.toHaveBeenCalled()
})
})
describe('file type handling', () => {
it('should handle files with multiple dots in name', () => {
const fileItem = createMockFileItem({
file: createMockFile({ name: 'my.document.file.docx' }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('my.document.file.docx')).toBeInTheDocument()
// Extension is lowercase with uppercase CSS class
expect(screen.getByText('docx')).toBeInTheDocument()
})
it('should handle files without extension', () => {
const fileItem = createMockFileItem({
file: createMockFile({ name: 'README' }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
// getFileType returns 'README' when there's no extension (last part after split)
expect(screen.getAllByText('README')).toHaveLength(2) // filename and extension
})
it('should handle various file extensions', () => {
const extensions = ['txt', 'md', 'json', 'csv', 'xlsx']
extensions.forEach((ext) => {
const fileItem = createMockFileItem({
file: createMockFile({ name: `file.${ext}` }),
})
const { unmount } = render(<FileListItem {...defaultProps} fileItem={fileItem} />)
// Extension is rendered in lowercase with uppercase CSS class
expect(screen.getByText(ext)).toBeInTheDocument()
unmount()
})
})
})
describe('file size display', () => {
it('should display size in KB for small files', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 5 * 1024 }), // 5KB
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('5.00 KB')).toBeInTheDocument()
})
it('should display size in MB for larger files', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 5 * 1024 * 1024 }), // 5MB
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('5.00 MB')).toBeInTheDocument()
})
it('should display size at threshold (10KB)', () => {
const fileItem = createMockFileItem({
file: createMockFile({ size: 10 * 1024 }), // 10KB
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByText('10.00 KB')).toBeInTheDocument()
})
})
describe('upload progress values', () => {
it('should show chart at progress 1', () => {
const fileItem = createMockFileItem({ progress: 1 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByTestId('pie-chart')).toBeInTheDocument()
})
it('should show chart at progress 99', () => {
const fileItem = createMockFileItem({ progress: 99 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.getByTestId('pie-chart')).toHaveAttribute('data-percentage', '99')
})
it('should not show chart at progress 100', () => {
const fileItem = createMockFileItem({ progress: 100 })
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
expect(screen.queryByTestId('pie-chart')).not.toBeInTheDocument()
})
})
describe('styling', () => {
it('should have proper shadow styling', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('shadow-xs')
})
it('should have proper border styling', () => {
const { container } = render(<FileListItem {...defaultProps} />)
const item = container.firstChild as HTMLElement
expect(item).toHaveClass('border', 'border-components-panel-border')
})
it('should truncate long file names', () => {
const longFileName = 'this-is-a-very-long-file-name-that-should-be-truncated.pdf'
const fileItem = createMockFileItem({
file: createMockFile({ name: longFileName }),
})
render(<FileListItem {...defaultProps} fileItem={fileItem} />)
const nameElement = screen.getByText(longFileName)
expect(nameElement).toHaveClass('truncate')
})
})
})

View File

@ -1,85 +0,0 @@
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { RiDeleteBinLine, RiErrorWarningFill } from '@remixicon/react'
import dynamic from 'next/dynamic'
import { useMemo } from 'react'
import DocumentFileIcon from '@/app/components/datasets/common/document-file-icon'
import { getFileType } from '@/app/components/datasets/common/image-uploader/utils'
import useTheme from '@/hooks/use-theme'
import { Theme } from '@/types/app'
import { cn } from '@/utils/classnames'
import { formatFileSize } from '@/utils/format'
import { PROGRESS_ERROR } from '../constants'
const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-chart'), { ssr: false })
export type FileListItemProps = {
fileItem: FileItem
onPreview: (file: File) => void
onRemove: (fileID: string) => void
}
const FileListItem = ({
fileItem,
onPreview,
onRemove,
}: FileListItemProps) => {
const { theme } = useTheme()
const chartColor = useMemo(() => theme === Theme.dark ? '#5289ff' : '#296dff', [theme])
const isUploading = fileItem.progress >= 0 && fileItem.progress < 100
const isError = fileItem.progress === PROGRESS_ERROR
const handleClick = () => {
onPreview(fileItem.file)
}
const handleRemove = (e: React.MouseEvent) => {
e.stopPropagation()
onRemove(fileItem.fileID)
}
return (
<div
onClick={handleClick}
className={cn(
'flex h-12 items-center rounded-lg border border-components-panel-border bg-components-panel-on-panel-item-bg shadow-xs shadow-shadow-shadow-4',
isError && 'border-state-destructive-border bg-state-destructive-hover',
)}
>
<div className="flex w-12 shrink-0 items-center justify-center">
<DocumentFileIcon
size="lg"
className="shrink-0"
name={fileItem.file.name}
extension={getFileType(fileItem.file)}
/>
</div>
<div className="flex shrink grow flex-col gap-0.5">
<div className="flex w-full">
<div className="w-0 grow truncate text-xs text-text-secondary">{fileItem.file.name}</div>
</div>
<div className="w-full truncate text-2xs leading-3 text-text-tertiary">
<span className="uppercase">{getFileType(fileItem.file)}</span>
<span className="px-1 text-text-quaternary">·</span>
<span>{formatFileSize(fileItem.file.size)}</span>
</div>
</div>
<div className="flex w-16 shrink-0 items-center justify-end gap-1 pr-3">
{isUploading && (
<SimplePieChart percentage={fileItem.progress} stroke={chartColor} fill={chartColor} animationDuration={0} />
)}
{isError && (
<RiErrorWarningFill className="size-4 text-text-destructive" />
)}
<span
className="flex h-6 w-6 cursor-pointer items-center justify-center"
onClick={handleRemove}
>
<RiDeleteBinLine className="size-4 text-text-tertiary" />
</span>
</div>
</div>
)
}
export default FileListItem

View File

@ -1,231 +0,0 @@
import type { RefObject } from 'react'
import type { UploadDropzoneProps } from './upload-dropzone'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import UploadDropzone from './upload-dropzone'
// Helper to create mock ref objects for testing
const createMockRef = <T,>(value: T | null = null): RefObject<T | null> => ({ current: value })
// Mock react-i18next
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string, options?: { ns?: string }) => {
const translations: Record<string, string> = {
'stepOne.uploader.button': 'Drag and drop files, or',
'stepOne.uploader.buttonSingleFile': 'Drag and drop file, or',
'stepOne.uploader.browse': 'Browse',
'stepOne.uploader.tip': 'Supports {{supportTypes}}, Max {{size}}MB each, up to {{batchCount}} files at a time, {{totalCount}} files total',
}
let result = translations[key] || key
if (options && typeof options === 'object') {
Object.entries(options).forEach(([k, v]) => {
result = result.replace(`{{${k}}}`, String(v))
})
}
return result
},
}),
}))
describe('UploadDropzone', () => {
const defaultProps: UploadDropzoneProps = {
dropRef: createMockRef<HTMLDivElement>() as RefObject<HTMLDivElement | null>,
dragRef: createMockRef<HTMLDivElement>() as RefObject<HTMLDivElement | null>,
fileUploaderRef: createMockRef<HTMLInputElement>() as RefObject<HTMLInputElement | null>,
dragging: false,
supportBatchUpload: true,
supportTypesShowNames: 'PDF, DOCX, TXT',
fileUploadConfig: {
file_size_limit: 15,
batch_count_limit: 5,
file_upload_limit: 10,
},
acceptTypes: ['.pdf', '.docx', '.txt'],
onSelectFile: vi.fn(),
onFileChange: vi.fn(),
allowedExtensions: ['pdf', 'docx', 'txt'],
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('rendering', () => {
it('should render the dropzone container', () => {
const { container } = render(<UploadDropzone {...defaultProps} />)
const dropzone = container.querySelector('[class*="border-dashed"]')
expect(dropzone).toBeInTheDocument()
})
it('should render hidden file input', () => {
render(<UploadDropzone {...defaultProps} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toBeInTheDocument()
expect(input).toHaveClass('hidden')
expect(input).toHaveAttribute('type', 'file')
})
it('should render upload icon', () => {
render(<UploadDropzone {...defaultProps} />)
const icon = document.querySelector('svg')
expect(icon).toBeInTheDocument()
})
it('should render browse label when extensions are allowed', () => {
render(<UploadDropzone {...defaultProps} />)
expect(screen.getByText('Browse')).toBeInTheDocument()
})
it('should not render browse label when no extensions allowed', () => {
render(<UploadDropzone {...defaultProps} allowedExtensions={[]} />)
expect(screen.queryByText('Browse')).not.toBeInTheDocument()
})
it('should render file size and count limits', () => {
render(<UploadDropzone {...defaultProps} />)
const tipText = screen.getByText(/Supports.*Max.*15MB/i)
expect(tipText).toBeInTheDocument()
})
})
describe('file input configuration', () => {
it('should allow multiple files when supportBatchUpload is true', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={true} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('multiple')
})
it('should not allow multiple files when supportBatchUpload is false', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={false} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).not.toHaveAttribute('multiple')
})
it('should set accept attribute with correct types', () => {
render(<UploadDropzone {...defaultProps} acceptTypes={['.pdf', '.docx']} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('accept', '.pdf,.docx')
})
})
describe('text content', () => {
it('should show batch upload text when supportBatchUpload is true', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={true} />)
expect(screen.getByText(/Drag and drop files/i)).toBeInTheDocument()
})
it('should show single file text when supportBatchUpload is false', () => {
render(<UploadDropzone {...defaultProps} supportBatchUpload={false} />)
expect(screen.getByText(/Drag and drop file/i)).toBeInTheDocument()
})
})
describe('dragging state', () => {
it('should apply dragging styles when dragging is true', () => {
const { container } = render(<UploadDropzone {...defaultProps} dragging={true} />)
const dropzone = container.querySelector('[class*="border-components-dropzone-border-accent"]')
expect(dropzone).toBeInTheDocument()
})
it('should render drag overlay when dragging', () => {
const dragRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dragging={true} dragRef={dragRef as RefObject<HTMLDivElement | null>} />)
const overlay = document.querySelector('.absolute.left-0.top-0')
expect(overlay).toBeInTheDocument()
})
it('should not render drag overlay when not dragging', () => {
render(<UploadDropzone {...defaultProps} dragging={false} />)
const overlay = document.querySelector('.absolute.left-0.top-0')
expect(overlay).not.toBeInTheDocument()
})
})
describe('event handlers', () => {
it('should call onSelectFile when browse label is clicked', () => {
const onSelectFile = vi.fn()
render(<UploadDropzone {...defaultProps} onSelectFile={onSelectFile} />)
const browseLabel = screen.getByText('Browse')
fireEvent.click(browseLabel)
expect(onSelectFile).toHaveBeenCalledTimes(1)
})
it('should call onFileChange when files are selected', () => {
const onFileChange = vi.fn()
render(<UploadDropzone {...defaultProps} onFileChange={onFileChange} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
const file = new File(['content'], 'test.pdf', { type: 'application/pdf' })
fireEvent.change(input, { target: { files: [file] } })
expect(onFileChange).toHaveBeenCalledTimes(1)
})
})
describe('refs', () => {
it('should attach dropRef to drop container', () => {
const dropRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dropRef={dropRef as RefObject<HTMLDivElement | null>} />)
expect(dropRef.current).toBeInstanceOf(HTMLDivElement)
})
it('should attach fileUploaderRef to input element', () => {
const fileUploaderRef = createMockRef<HTMLInputElement>()
render(<UploadDropzone {...defaultProps} fileUploaderRef={fileUploaderRef as RefObject<HTMLInputElement | null>} />)
expect(fileUploaderRef.current).toBeInstanceOf(HTMLInputElement)
})
it('should attach dragRef to overlay when dragging', () => {
const dragRef = createMockRef<HTMLDivElement>()
render(<UploadDropzone {...defaultProps} dragging={true} dragRef={dragRef as RefObject<HTMLDivElement | null>} />)
expect(dragRef.current).toBeInstanceOf(HTMLDivElement)
})
})
describe('styling', () => {
it('should have base dropzone styling', () => {
const { container } = render(<UploadDropzone {...defaultProps} />)
const dropzone = container.querySelector('[class*="border-dashed"]')
expect(dropzone).toBeInTheDocument()
expect(dropzone).toHaveClass('rounded-xl')
})
it('should have cursor-pointer on browse label', () => {
render(<UploadDropzone {...defaultProps} />)
const browseLabel = screen.getByText('Browse')
expect(browseLabel).toHaveClass('cursor-pointer')
})
})
describe('accessibility', () => {
it('should have an accessible file input', () => {
render(<UploadDropzone {...defaultProps} />)
const input = document.getElementById('fileUploader') as HTMLInputElement
expect(input).toHaveAttribute('id', 'fileUploader')
})
})
})

View File

@ -1,83 +0,0 @@
import type { ChangeEvent, RefObject } from 'react'
import { RiUploadCloud2Line } from '@remixicon/react'
import { useTranslation } from 'react-i18next'
import { cn } from '@/utils/classnames'
type FileUploadConfig = {
file_size_limit: number
batch_count_limit: number
file_upload_limit: number
}
export type UploadDropzoneProps = {
dropRef: RefObject<HTMLDivElement | null>
dragRef: RefObject<HTMLDivElement | null>
fileUploaderRef: RefObject<HTMLInputElement | null>
dragging: boolean
supportBatchUpload: boolean
supportTypesShowNames: string
fileUploadConfig: FileUploadConfig
acceptTypes: string[]
onSelectFile: () => void
onFileChange: (e: ChangeEvent<HTMLInputElement>) => void
allowedExtensions: string[]
}
const UploadDropzone = ({
dropRef,
dragRef,
fileUploaderRef,
dragging,
supportBatchUpload,
supportTypesShowNames,
fileUploadConfig,
acceptTypes,
onSelectFile,
onFileChange,
allowedExtensions,
}: UploadDropzoneProps) => {
const { t } = useTranslation()
return (
<>
<input
ref={fileUploaderRef}
id="fileUploader"
className="hidden"
type="file"
multiple={supportBatchUpload}
accept={acceptTypes.join(',')}
onChange={onFileChange}
/>
<div
ref={dropRef}
className={cn(
'relative box-border flex min-h-20 flex-col items-center justify-center gap-1 rounded-xl border border-dashed border-components-dropzone-border bg-components-dropzone-bg px-4 py-3 text-xs leading-4 text-text-tertiary',
dragging && 'border-components-dropzone-border-accent bg-components-dropzone-bg-accent',
)}
>
<div className="flex min-h-5 items-center justify-center text-sm leading-4 text-text-secondary">
<RiUploadCloud2Line className="mr-2 size-5" />
<span>
{supportBatchUpload ? t('stepOne.uploader.button', { ns: 'datasetCreation' }) : t('stepOne.uploader.buttonSingleFile', { ns: 'datasetCreation' })}
{allowedExtensions.length > 0 && (
<label className="ml-1 cursor-pointer text-text-accent" onClick={onSelectFile}>{t('stepOne.uploader.browse', { ns: 'datasetCreation' })}</label>
)}
</span>
</div>
<div>
{t('stepOne.uploader.tip', {
ns: 'datasetCreation',
size: fileUploadConfig.file_size_limit,
supportTypes: supportTypesShowNames,
batchCount: fileUploadConfig.batch_count_limit,
totalCount: fileUploadConfig.file_upload_limit,
})}
</div>
{dragging && <div ref={dragRef} className="absolute left-0 top-0 h-full w-full" />}
</div>
</>
)
}
export default UploadDropzone

View File

@ -1,3 +0,0 @@
export const PROGRESS_NOT_STARTED = -1
export const PROGRESS_ERROR = -2
export const PROGRESS_COMPLETE = 100

View File

@ -1,911 +0,0 @@
import type { ReactNode } from 'react'
import type { CustomFile, FileItem } from '@/models/datasets'
import { act, render, renderHook, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { PROGRESS_ERROR, PROGRESS_NOT_STARTED } from '../constants'
// Mock notify function - defined before mocks
const mockNotify = vi.fn()
const mockClose = vi.fn()
// Mock ToastContext with factory function
vi.mock('@/app/components/base/toast', async () => {
const { createContext, useContext } = await import('use-context-selector')
const context = createContext({ notify: mockNotify, close: mockClose })
return {
ToastContext: context,
useToastContext: () => useContext(context),
}
})
// Mock file uploader utils
vi.mock('@/app/components/base/file-uploader/utils', () => ({
getFileUploadErrorMessage: (e: Error, defaultMsg: string) => e.message || defaultMsg,
}))
// Mock format utils used by the shared hook
vi.mock('@/utils/format', () => ({
getFileExtension: (filename: string) => {
const parts = filename.split('.')
return parts[parts.length - 1] || ''
},
}))
// Mock react-i18next
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => key,
}),
}))
// Mock locale context
vi.mock('@/context/i18n', () => ({
useLocale: () => 'en-US',
}))
// Mock i18n config
vi.mock('@/i18n-config/language', () => ({
LanguagesSupported: ['en-US', 'zh-Hans'],
}))
// Mock config
vi.mock('@/config', () => ({
IS_CE_EDITION: false,
}))
// Mock store functions
const mockSetLocalFileList = vi.fn()
const mockSetCurrentLocalFile = vi.fn()
const mockGetState = vi.fn(() => ({
setLocalFileList: mockSetLocalFileList,
setCurrentLocalFile: mockSetCurrentLocalFile,
}))
const mockStore = { getState: mockGetState }
vi.mock('../../store', () => ({
useDataSourceStoreWithSelector: vi.fn((selector: (state: { localFileList: FileItem[] }) => FileItem[]) =>
selector({ localFileList: [] }),
),
useDataSourceStore: vi.fn(() => mockStore),
}))
// Mock file upload config
vi.mock('@/service/use-common', () => ({
useFileUploadConfig: vi.fn(() => ({
data: {
file_size_limit: 15,
batch_count_limit: 5,
file_upload_limit: 10,
},
})),
// Required by the shared useFileUpload hook
useFileSupportTypes: vi.fn(() => ({
data: {
allowed_extensions: ['pdf', 'docx', 'txt'],
},
})),
}))
// Mock upload service
const mockUpload = vi.fn()
vi.mock('@/service/base', () => ({
upload: (...args: unknown[]) => mockUpload(...args),
}))
// Import after all mocks are set up
const { useLocalFileUpload } = await import('./use-local-file-upload')
const { ToastContext } = await import('@/app/components/base/toast')
const createWrapper = () => {
return ({ children }: { children: ReactNode }) => (
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
{children}
</ToastContext.Provider>
)
}
describe('useLocalFileUpload', () => {
beforeEach(() => {
vi.clearAllMocks()
mockUpload.mockReset()
})
describe('initialization', () => {
it('should initialize with default values', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf', 'docx'] }),
{ wrapper: createWrapper() },
)
expect(result.current.dragging).toBe(false)
expect(result.current.localFileList).toEqual([])
expect(result.current.hideUpload).toBe(false)
})
it('should create refs for dropzone, drag area, and file uploader', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
expect(result.current.dropRef).toBeDefined()
expect(result.current.dragRef).toBeDefined()
expect(result.current.fileUploaderRef).toBeDefined()
})
it('should compute acceptTypes from allowedExtensions', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf', 'docx', 'txt'] }),
{ wrapper: createWrapper() },
)
expect(result.current.acceptTypes).toEqual(['.pdf', '.docx', '.txt'])
})
it('should compute supportTypesShowNames correctly', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf', 'docx', 'md'] }),
{ wrapper: createWrapper() },
)
expect(result.current.supportTypesShowNames).toContain('PDF')
expect(result.current.supportTypesShowNames).toContain('DOCX')
expect(result.current.supportTypesShowNames).toContain('MARKDOWN')
})
it('should provide file upload config with defaults', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
expect(result.current.fileUploadConfig.file_size_limit).toBe(15)
expect(result.current.fileUploadConfig.batch_count_limit).toBe(5)
expect(result.current.fileUploadConfig.file_upload_limit).toBe(10)
})
})
describe('supportBatchUpload option', () => {
it('should use batch limits when supportBatchUpload is true', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'], supportBatchUpload: true }),
{ wrapper: createWrapper() },
)
expect(result.current.fileUploadConfig.batch_count_limit).toBe(5)
expect(result.current.fileUploadConfig.file_upload_limit).toBe(10)
})
it('should use single file limits when supportBatchUpload is false', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'], supportBatchUpload: false }),
{ wrapper: createWrapper() },
)
expect(result.current.fileUploadConfig.batch_count_limit).toBe(1)
expect(result.current.fileUploadConfig.file_upload_limit).toBe(1)
})
})
describe('selectHandle', () => {
it('should trigger file input click', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockClick = vi.fn()
const mockInput = { click: mockClick } as unknown as HTMLInputElement
Object.defineProperty(result.current.fileUploaderRef, 'current', {
value: mockInput,
writable: true,
})
act(() => {
result.current.selectHandle()
})
expect(mockClick).toHaveBeenCalled()
})
it('should handle null fileUploaderRef gracefully', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
expect(() => {
act(() => {
result.current.selectHandle()
})
}).not.toThrow()
})
})
describe('removeFile', () => {
it('should remove file from list', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
act(() => {
result.current.removeFile('file-id-123')
})
expect(mockSetLocalFileList).toHaveBeenCalled()
})
it('should clear file input value when removing', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockInput = { value: 'some-file.pdf' } as HTMLInputElement
Object.defineProperty(result.current.fileUploaderRef, 'current', {
value: mockInput,
writable: true,
})
act(() => {
result.current.removeFile('file-id')
})
expect(mockInput.value).toBe('')
})
})
describe('handlePreview', () => {
it('should set current local file when file has id', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = { id: 'file-123', name: 'test.pdf', size: 1024 }
act(() => {
result.current.handlePreview(mockFile as unknown as CustomFile)
})
expect(mockSetCurrentLocalFile).toHaveBeenCalledWith(mockFile)
})
it('should not set current file when file has no id', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = { name: 'test.pdf', size: 1024 }
act(() => {
result.current.handlePreview(mockFile as unknown as CustomFile)
})
expect(mockSetCurrentLocalFile).not.toHaveBeenCalled()
})
})
describe('fileChangeHandle', () => {
it('should handle valid files', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: {
files: [mockFile],
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockSetLocalFileList).toHaveBeenCalled()
})
})
it('should handle empty file list', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const event = {
target: {
files: null,
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(mockSetLocalFileList).not.toHaveBeenCalled()
})
it('should reject files with invalid type', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.exe', { type: 'application/exe' })
const event = {
target: {
files: [mockFile],
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
it('should reject files exceeding size limit', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
// Create a mock file larger than 15MB
const largeSize = 20 * 1024 * 1024
const mockFile = new File([''], 'large.pdf', { type: 'application/pdf' })
Object.defineProperty(mockFile, 'size', { value: largeSize })
const event = {
target: {
files: [mockFile],
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
it('should limit files to batch count limit', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
// Create 10 files but batch limit is 5
const files = Array.from({ length: 10 }, (_, i) =>
new File(['content'], `file${i}.pdf`, { type: 'application/pdf' }))
const event = {
target: {
files,
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockSetLocalFileList).toHaveBeenCalled()
})
// Should only process first 5 files (batch_count_limit)
const firstCall = mockSetLocalFileList.mock.calls[0]
expect(firstCall[0].length).toBeLessThanOrEqual(5)
})
})
describe('upload handling', () => {
it('should handle successful upload', async () => {
const uploadedResponse = { id: 'server-file-id' }
mockUpload.mockResolvedValue(uploadedResponse)
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: {
files: [mockFile],
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockUpload).toHaveBeenCalled()
})
})
it('should handle upload error', async () => {
mockUpload.mockRejectedValue(new Error('Upload failed'))
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: {
files: [mockFile],
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
})
})
it('should call upload with correct parameters', async () => {
mockUpload.mockResolvedValue({ id: 'file-id' })
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: {
files: [mockFile],
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockUpload).toHaveBeenCalledWith(
expect.objectContaining({
xhr: expect.any(XMLHttpRequest),
data: expect.any(FormData),
}),
false,
undefined,
'?source=datasets',
)
})
})
})
describe('extension mapping', () => {
it('should map md to markdown', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['md'] }),
{ wrapper: createWrapper() },
)
expect(result.current.supportTypesShowNames).toContain('MARKDOWN')
})
it('should map htm to html', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['htm'] }),
{ wrapper: createWrapper() },
)
expect(result.current.supportTypesShowNames).toContain('HTML')
})
it('should preserve unmapped extensions', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf', 'txt'] }),
{ wrapper: createWrapper() },
)
expect(result.current.supportTypesShowNames).toContain('PDF')
expect(result.current.supportTypesShowNames).toContain('TXT')
})
it('should remove duplicate extensions', () => {
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf', 'pdf', 'PDF'] }),
{ wrapper: createWrapper() },
)
const count = (result.current.supportTypesShowNames.match(/PDF/g) || []).length
expect(count).toBe(1)
})
})
describe('drag and drop handlers', () => {
// Helper component that renders with the hook and connects refs
const TestDropzone = ({ allowedExtensions, supportBatchUpload = true }: {
allowedExtensions: string[]
supportBatchUpload?: boolean
}) => {
const {
dropRef,
dragRef,
dragging,
} = useLocalFileUpload({ allowedExtensions, supportBatchUpload })
return (
<div>
<div ref={dropRef} data-testid="dropzone">
{dragging && <div ref={dragRef} data-testid="drag-overlay" />}
</div>
<span data-testid="dragging">{String(dragging)}</span>
</div>
)
}
it('should set dragging true on dragenter', async () => {
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone allowedExtensions={['pdf']} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
dropzone.dispatchEvent(dragEnterEvent)
})
expect(getByTestId('dragging').textContent).toBe('true')
})
it('should handle dragover event', async () => {
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone allowedExtensions={['pdf']} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
await act(async () => {
const dragOverEvent = new Event('dragover', { bubbles: true, cancelable: true })
dropzone.dispatchEvent(dragOverEvent)
})
// dragover should not throw
expect(dropzone).toBeInTheDocument()
})
it('should set dragging false on dragleave from drag overlay', async () => {
const { getByTestId, queryByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone allowedExtensions={['pdf']} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
// First trigger dragenter to set dragging true
await act(async () => {
const dragEnterEvent = new Event('dragenter', { bubbles: true, cancelable: true })
dropzone.dispatchEvent(dragEnterEvent)
})
expect(getByTestId('dragging').textContent).toBe('true')
// Now the drag overlay should be rendered
const dragOverlay = queryByTestId('drag-overlay')
if (dragOverlay) {
await act(async () => {
const dragLeaveEvent = new Event('dragleave', { bubbles: true, cancelable: true })
Object.defineProperty(dragLeaveEvent, 'target', { value: dragOverlay })
dropzone.dispatchEvent(dragLeaveEvent)
})
}
})
it('should handle drop with files', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone allowedExtensions={['pdf']} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & {
dataTransfer: { items: DataTransferItem[], files: File[] } | null
}
// Mock dataTransfer with items array (used by the shared hook for directory traversal)
dropEvent.dataTransfer = {
items: [{
kind: 'file',
getAsFile: () => mockFile,
}] as unknown as DataTransferItem[],
files: [mockFile],
}
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
expect(mockSetLocalFileList).toHaveBeenCalled()
})
})
it('should handle drop without dataTransfer', async () => {
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone allowedExtensions={['pdf']} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
mockSetLocalFileList.mockClear()
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & { dataTransfer: { files: File[] } | null }
dropEvent.dataTransfer = null
dropzone.dispatchEvent(dropEvent)
})
// Should not upload when no dataTransfer
expect(mockSetLocalFileList).not.toHaveBeenCalled()
})
it('should limit to single file on drop when supportBatchUpload is false', async () => {
mockUpload.mockResolvedValue({ id: 'uploaded-id' })
const { getByTestId } = await act(async () =>
render(
<ToastContext.Provider value={{ notify: mockNotify, close: mockClose }}>
<TestDropzone allowedExtensions={['pdf']} supportBatchUpload={false} />
</ToastContext.Provider>,
),
)
const dropzone = getByTestId('dropzone')
const files = [
new File(['content1'], 'test1.pdf', { type: 'application/pdf' }),
new File(['content2'], 'test2.pdf', { type: 'application/pdf' }),
]
await act(async () => {
const dropEvent = new Event('drop', { bubbles: true, cancelable: true }) as Event & {
dataTransfer: { items: DataTransferItem[], files: File[] } | null
}
// Mock dataTransfer with items array (used by the shared hook for directory traversal)
dropEvent.dataTransfer = {
items: files.map(f => ({
kind: 'file',
getAsFile: () => f,
})) as unknown as DataTransferItem[],
files,
}
dropzone.dispatchEvent(dropEvent)
})
await waitFor(() => {
expect(mockSetLocalFileList).toHaveBeenCalled()
// Should only have 1 file (limited by supportBatchUpload: false)
const callArgs = mockSetLocalFileList.mock.calls[0][0]
expect(callArgs.length).toBe(1)
})
})
})
describe('file upload limit', () => {
it('should reject files exceeding total file upload limit', async () => {
// Mock store to return existing files
const { useDataSourceStoreWithSelector } = vi.mocked(await import('../../store'))
const existingFiles: FileItem[] = Array.from({ length: 8 }, (_, i) => ({
fileID: `existing-${i}`,
file: { name: `existing-${i}.pdf`, size: 1024 } as CustomFile,
progress: 100,
}))
vi.mocked(useDataSourceStoreWithSelector).mockImplementation(selector =>
selector({ localFileList: existingFiles } as Parameters<typeof selector>[0]),
)
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
// Try to add 5 more files when limit is 10 and we already have 8
const files = Array.from({ length: 5 }, (_, i) =>
new File(['content'], `new-${i}.pdf`, { type: 'application/pdf' }))
const event = {
target: { files },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
// Should show error about files number limit
expect(mockNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
// Reset mock for other tests
vi.mocked(useDataSourceStoreWithSelector).mockImplementation(selector =>
selector({ localFileList: [] as FileItem[] } as Parameters<typeof selector>[0]),
)
})
})
describe('upload progress tracking', () => {
it('should track upload progress', async () => {
let progressCallback: ((e: ProgressEvent) => void) | undefined
mockUpload.mockImplementation(async (options: { onprogress: (e: ProgressEvent) => void }) => {
progressCallback = options.onprogress
return { id: 'uploaded-id' }
})
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockUpload).toHaveBeenCalled()
})
// Simulate progress event
if (progressCallback) {
act(() => {
progressCallback!({
lengthComputable: true,
loaded: 50,
total: 100,
} as ProgressEvent)
})
expect(mockSetLocalFileList).toHaveBeenCalled()
}
})
it('should not update progress when not lengthComputable', async () => {
let progressCallback: ((e: ProgressEvent) => void) | undefined
const uploadCallCount = { value: 0 }
mockUpload.mockImplementation(async (options: { onprogress: (e: ProgressEvent) => void }) => {
progressCallback = options.onprogress
uploadCallCount.value++
return { id: 'uploaded-id' }
})
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: { files: [mockFile] },
} as unknown as React.ChangeEvent<HTMLInputElement>
mockSetLocalFileList.mockClear()
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
expect(mockUpload).toHaveBeenCalled()
})
const callsBeforeProgress = mockSetLocalFileList.mock.calls.length
// Simulate progress event without lengthComputable
if (progressCallback) {
act(() => {
progressCallback!({
lengthComputable: false,
loaded: 50,
total: 100,
} as ProgressEvent)
})
// Should not have additional calls
expect(mockSetLocalFileList.mock.calls.length).toBe(callsBeforeProgress)
}
})
})
describe('file progress constants', () => {
it('should use PROGRESS_NOT_STARTED for new files', async () => {
mockUpload.mockResolvedValue({ id: 'file-id' })
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: {
files: [mockFile],
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
const callArgs = mockSetLocalFileList.mock.calls[0][0]
expect(callArgs[0].progress).toBe(PROGRESS_NOT_STARTED)
})
})
it('should set PROGRESS_ERROR on upload failure', async () => {
mockUpload.mockRejectedValue(new Error('Upload failed'))
const { result } = renderHook(
() => useLocalFileUpload({ allowedExtensions: ['pdf'] }),
{ wrapper: createWrapper() },
)
const mockFile = new File(['content'], 'test.pdf', { type: 'application/pdf' })
const event = {
target: {
files: [mockFile],
},
} as unknown as React.ChangeEvent<HTMLInputElement>
act(() => {
result.current.fileChangeHandle(event)
})
await waitFor(() => {
const calls = mockSetLocalFileList.mock.calls
const lastCall = calls[calls.length - 1][0]
expect(lastCall.some((f: FileItem) => f.progress === PROGRESS_ERROR)).toBe(true)
})
})
})
})

View File

@ -1,105 +0,0 @@
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { produce } from 'immer'
import { useCallback, useRef } from 'react'
import { useFileUpload } from '@/app/components/datasets/create/file-uploader/hooks/use-file-upload'
import { useDataSourceStore, useDataSourceStoreWithSelector } from '../../store'
export type UseLocalFileUploadOptions = {
allowedExtensions: string[]
supportBatchUpload?: boolean
}
/**
* Hook for handling local file uploads in the create-from-pipeline flow.
* This is a thin wrapper around the generic useFileUpload hook that provides
* Zustand store integration for state management.
*/
export const useLocalFileUpload = ({
allowedExtensions,
supportBatchUpload = true,
}: UseLocalFileUploadOptions) => {
const localFileList = useDataSourceStoreWithSelector(state => state.localFileList)
const dataSourceStore = useDataSourceStore()
const fileListRef = useRef<FileItem[]>([])
// Sync fileListRef with localFileList for internal tracking
fileListRef.current = localFileList
const prepareFileList = useCallback((files: FileItem[]) => {
const { setLocalFileList } = dataSourceStore.getState()
setLocalFileList(files)
fileListRef.current = files
}, [dataSourceStore])
const onFileUpdate = useCallback((fileItem: FileItem, progress: number, list: FileItem[]) => {
const { setLocalFileList } = dataSourceStore.getState()
const newList = produce(list, (draft) => {
const targetIndex = draft.findIndex(file => file.fileID === fileItem.fileID)
if (targetIndex !== -1) {
draft[targetIndex] = {
...draft[targetIndex],
...fileItem,
progress,
}
}
})
setLocalFileList(newList)
}, [dataSourceStore])
const onFileListUpdate = useCallback((files: FileItem[]) => {
const { setLocalFileList } = dataSourceStore.getState()
setLocalFileList(files)
fileListRef.current = files
}, [dataSourceStore])
const onPreview = useCallback((file: File) => {
const { setCurrentLocalFile } = dataSourceStore.getState()
setCurrentLocalFile(file)
}, [dataSourceStore])
const {
dropRef,
dragRef,
fileUploaderRef,
dragging,
fileUploadConfig,
acceptTypes,
supportTypesShowNames,
hideUpload,
selectHandle,
fileChangeHandle,
removeFile,
handlePreview,
} = useFileUpload({
fileList: localFileList,
prepareFileList,
onFileUpdate,
onFileListUpdate,
onPreview,
supportBatchUpload,
allowedExtensions,
})
return {
// Refs
dropRef,
dragRef,
fileUploaderRef,
// State
dragging,
localFileList,
// Config
fileUploadConfig,
acceptTypes,
supportTypesShowNames,
hideUpload,
// Handlers
selectHandle,
fileChangeHandle,
removeFile,
handlePreview,
}
}

View File

@ -1,398 +0,0 @@
import type { FileItem } from '@/models/datasets'
import { render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import LocalFile from './index'
// Mock the hook
const mockUseLocalFileUpload = vi.fn()
vi.mock('./hooks/use-local-file-upload', () => ({
useLocalFileUpload: (...args: unknown[]) => mockUseLocalFileUpload(...args),
}))
// Mock react-i18next for sub-components
vi.mock('react-i18next', () => ({
useTranslation: () => ({
t: (key: string) => key,
}),
}))
// Mock theme hook for sub-components
vi.mock('@/hooks/use-theme', () => ({
default: () => ({ theme: 'light' }),
}))
// Mock theme types
vi.mock('@/types/app', () => ({
Theme: { dark: 'dark', light: 'light' },
}))
// Mock DocumentFileIcon
vi.mock('@/app/components/datasets/common/document-file-icon', () => ({
default: ({ name }: { name: string }) => <div data-testid="document-icon">{name}</div>,
}))
// Mock SimplePieChart
vi.mock('next/dynamic', () => ({
default: () => {
const Component = ({ percentage }: { percentage: number }) => (
<div data-testid="pie-chart">
{percentage}
%
</div>
)
return Component
},
}))
describe('LocalFile', () => {
const mockDropRef = { current: null }
const mockDragRef = { current: null }
const mockFileUploaderRef = { current: null }
const defaultHookReturn = {
dropRef: mockDropRef,
dragRef: mockDragRef,
fileUploaderRef: mockFileUploaderRef,
dragging: false,
localFileList: [] as FileItem[],
fileUploadConfig: {
file_size_limit: 15,
batch_count_limit: 5,
file_upload_limit: 10,
},
acceptTypes: ['.pdf', '.docx'],
supportTypesShowNames: 'PDF, DOCX',
hideUpload: false,
selectHandle: vi.fn(),
fileChangeHandle: vi.fn(),
removeFile: vi.fn(),
handlePreview: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
mockUseLocalFileUpload.mockReturnValue(defaultHookReturn)
})
describe('rendering', () => {
it('should render the component container', () => {
const { container } = render(
<LocalFile allowedExtensions={['pdf', 'docx']} />,
)
expect(container.firstChild).toHaveClass('flex', 'flex-col')
})
it('should render UploadDropzone when hideUpload is false', () => {
render(<LocalFile allowedExtensions={['pdf']} />)
const fileInput = document.getElementById('fileUploader')
expect(fileInput).toBeInTheDocument()
})
it('should not render UploadDropzone when hideUpload is true', () => {
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
hideUpload: true,
})
render(<LocalFile allowedExtensions={['pdf']} />)
const fileInput = document.getElementById('fileUploader')
expect(fileInput).not.toBeInTheDocument()
})
})
describe('file list rendering', () => {
it('should not render file list when empty', () => {
render(<LocalFile allowedExtensions={['pdf']} />)
expect(screen.queryByTestId('document-icon')).not.toBeInTheDocument()
})
it('should render file list when files exist', () => {
const mockFile = {
name: 'test.pdf',
size: 1024,
type: 'application/pdf',
lastModified: Date.now(),
} as File
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
localFileList: [
{
fileID: 'file-1',
file: mockFile,
progress: -1,
},
],
})
render(<LocalFile allowedExtensions={['pdf']} />)
expect(screen.getByTestId('document-icon')).toBeInTheDocument()
})
it('should render multiple file items', () => {
const createMockFile = (name: string) => ({
name,
size: 1024,
type: 'application/pdf',
lastModified: Date.now(),
}) as File
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
localFileList: [
{ fileID: 'file-1', file: createMockFile('doc1.pdf'), progress: -1 },
{ fileID: 'file-2', file: createMockFile('doc2.pdf'), progress: -1 },
{ fileID: 'file-3', file: createMockFile('doc3.pdf'), progress: -1 },
],
})
render(<LocalFile allowedExtensions={['pdf']} />)
const icons = screen.getAllByTestId('document-icon')
expect(icons).toHaveLength(3)
})
it('should use correct key for file items', () => {
const mockFile = {
name: 'test.pdf',
size: 1024,
type: 'application/pdf',
lastModified: Date.now(),
} as File
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
localFileList: [
{ fileID: 'unique-id-123', file: mockFile, progress: -1 },
],
})
render(<LocalFile allowedExtensions={['pdf']} />)
// The component should render without errors (key is used internally)
expect(screen.getByTestId('document-icon')).toBeInTheDocument()
})
})
describe('hook integration', () => {
it('should pass allowedExtensions to hook', () => {
render(<LocalFile allowedExtensions={['pdf', 'docx', 'txt']} />)
expect(mockUseLocalFileUpload).toHaveBeenCalledWith({
allowedExtensions: ['pdf', 'docx', 'txt'],
supportBatchUpload: true,
})
})
it('should pass supportBatchUpload true by default', () => {
render(<LocalFile allowedExtensions={['pdf']} />)
expect(mockUseLocalFileUpload).toHaveBeenCalledWith(
expect.objectContaining({ supportBatchUpload: true }),
)
})
it('should pass supportBatchUpload false when specified', () => {
render(<LocalFile allowedExtensions={['pdf']} supportBatchUpload={false} />)
expect(mockUseLocalFileUpload).toHaveBeenCalledWith(
expect.objectContaining({ supportBatchUpload: false }),
)
})
})
describe('props passed to UploadDropzone', () => {
it('should pass all required props to UploadDropzone', () => {
const selectHandle = vi.fn()
const fileChangeHandle = vi.fn()
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
selectHandle,
fileChangeHandle,
supportTypesShowNames: 'PDF, DOCX',
acceptTypes: ['.pdf', '.docx'],
fileUploadConfig: {
file_size_limit: 20,
batch_count_limit: 10,
file_upload_limit: 50,
},
})
render(<LocalFile allowedExtensions={['pdf', 'docx']} supportBatchUpload={true} />)
// Verify the dropzone is rendered with correct configuration
const fileInput = document.getElementById('fileUploader')
expect(fileInput).toBeInTheDocument()
expect(fileInput).toHaveAttribute('accept', '.pdf,.docx')
expect(fileInput).toHaveAttribute('multiple')
})
})
describe('props passed to FileListItem', () => {
it('should pass correct props to file items', () => {
const handlePreview = vi.fn()
const removeFile = vi.fn()
const mockFile = {
name: 'document.pdf',
size: 2048,
type: 'application/pdf',
lastModified: Date.now(),
} as File
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
handlePreview,
removeFile,
localFileList: [
{ fileID: 'test-id', file: mockFile, progress: 50 },
],
})
render(<LocalFile allowedExtensions={['pdf']} />)
expect(screen.getByTestId('document-icon')).toHaveTextContent('document.pdf')
})
})
describe('conditional rendering', () => {
it('should show both dropzone and file list when files exist and hideUpload is false', () => {
const mockFile = {
name: 'test.pdf',
size: 1024,
type: 'application/pdf',
lastModified: Date.now(),
} as File
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
hideUpload: false,
localFileList: [
{ fileID: 'file-1', file: mockFile, progress: -1 },
],
})
render(<LocalFile allowedExtensions={['pdf']} />)
expect(document.getElementById('fileUploader')).toBeInTheDocument()
expect(screen.getByTestId('document-icon')).toBeInTheDocument()
})
it('should show only file list when hideUpload is true', () => {
const mockFile = {
name: 'test.pdf',
size: 1024,
type: 'application/pdf',
lastModified: Date.now(),
} as File
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
hideUpload: true,
localFileList: [
{ fileID: 'file-1', file: mockFile, progress: -1 },
],
})
render(<LocalFile allowedExtensions={['pdf']} />)
expect(document.getElementById('fileUploader')).not.toBeInTheDocument()
expect(screen.getByTestId('document-icon')).toBeInTheDocument()
})
})
describe('file list container styling', () => {
it('should apply correct container classes for file list', () => {
const mockFile = {
name: 'test.pdf',
size: 1024,
type: 'application/pdf',
lastModified: Date.now(),
} as File
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
localFileList: [
{ fileID: 'file-1', file: mockFile, progress: -1 },
],
})
const { container } = render(<LocalFile allowedExtensions={['pdf']} />)
const fileListContainer = container.querySelector('.mt-1.flex.flex-col.gap-y-1')
expect(fileListContainer).toBeInTheDocument()
})
})
describe('edge cases', () => {
it('should handle empty allowedExtensions', () => {
render(<LocalFile allowedExtensions={[]} />)
expect(mockUseLocalFileUpload).toHaveBeenCalledWith({
allowedExtensions: [],
supportBatchUpload: true,
})
})
it('should handle files with same fileID but different index', () => {
const mockFile = {
name: 'test.pdf',
size: 1024,
type: 'application/pdf',
lastModified: Date.now(),
} as File
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
localFileList: [
{ fileID: 'same-id', file: { ...mockFile, name: 'doc1.pdf' } as File, progress: -1 },
{ fileID: 'same-id', file: { ...mockFile, name: 'doc2.pdf' } as File, progress: -1 },
],
})
// Should render without key collision errors due to index in key
render(<LocalFile allowedExtensions={['pdf']} />)
const icons = screen.getAllByTestId('document-icon')
expect(icons).toHaveLength(2)
})
})
describe('component integration', () => {
it('should render complete component tree', () => {
const mockFile = {
name: 'complete-test.pdf',
size: 5 * 1024,
type: 'application/pdf',
lastModified: Date.now(),
} as File
mockUseLocalFileUpload.mockReturnValue({
...defaultHookReturn,
hideUpload: false,
localFileList: [
{ fileID: 'file-1', file: mockFile, progress: 50 },
],
dragging: false,
})
const { container } = render(
<LocalFile allowedExtensions={['pdf', 'docx']} supportBatchUpload={true} />,
)
// Main container
expect(container.firstChild).toHaveClass('flex', 'flex-col')
// Dropzone exists
expect(document.getElementById('fileUploader')).toBeInTheDocument()
// File list exists
expect(screen.getByTestId('document-icon')).toBeInTheDocument()
})
})
})

View File

@ -1,7 +1,26 @@
'use client'
import FileListItem from './components/file-list-item'
import UploadDropzone from './components/upload-dropzone'
import { useLocalFileUpload } from './hooks/use-local-file-upload'
import type { CustomFile as File, FileItem } from '@/models/datasets'
import { RiDeleteBinLine, RiErrorWarningFill, RiUploadCloud2Line } from '@remixicon/react'
import { produce } from 'immer'
import dynamic from 'next/dynamic'
import * as React from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import { getFileUploadErrorMessage } from '@/app/components/base/file-uploader/utils'
import { ToastContext } from '@/app/components/base/toast'
import DocumentFileIcon from '@/app/components/datasets/common/document-file-icon'
import { IS_CE_EDITION } from '@/config'
import { useLocale } from '@/context/i18n'
import useTheme from '@/hooks/use-theme'
import { LanguagesSupported } from '@/i18n-config/language'
import { upload } from '@/service/base'
import { useFileUploadConfig } from '@/service/use-common'
import { Theme } from '@/types/app'
import { cn } from '@/utils/classnames'
import { useDataSourceStore, useDataSourceStoreWithSelector } from '../store'
const SimplePieChart = dynamic(() => import('@/app/components/base/simple-pie-chart'), { ssr: false })
export type LocalFileProps = {
allowedExtensions: string[]
@ -12,49 +31,345 @@ const LocalFile = ({
allowedExtensions,
supportBatchUpload = true,
}: LocalFileProps) => {
const {
dropRef,
dragRef,
fileUploaderRef,
dragging,
localFileList,
fileUploadConfig,
acceptTypes,
supportTypesShowNames,
hideUpload,
selectHandle,
fileChangeHandle,
removeFile,
handlePreview,
} = useLocalFileUpload({ allowedExtensions, supportBatchUpload })
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const locale = useLocale()
const localFileList = useDataSourceStoreWithSelector(state => state.localFileList)
const dataSourceStore = useDataSourceStore()
const [dragging, setDragging] = useState(false)
const dropRef = useRef<HTMLDivElement>(null)
const dragRef = useRef<HTMLDivElement>(null)
const fileUploader = useRef<HTMLInputElement>(null)
const fileListRef = useRef<FileItem[]>([])
const hideUpload = !supportBatchUpload && localFileList.length > 0
const { data: fileUploadConfigResponse } = useFileUploadConfig()
const supportTypesShowNames = useMemo(() => {
const extensionMap: { [key: string]: string } = {
md: 'markdown',
pptx: 'pptx',
htm: 'html',
xlsx: 'xlsx',
docx: 'docx',
}
return allowedExtensions
.map(item => extensionMap[item] || item) // map to standardized extension
.map(item => item.toLowerCase()) // convert to lower case
.filter((item, index, self) => self.indexOf(item) === index) // remove duplicates
.map(item => item.toUpperCase()) // convert to upper case
.join(locale !== LanguagesSupported[1] ? ', ' : '、 ')
}, [locale, allowedExtensions])
const ACCEPTS = allowedExtensions.map((ext: string) => `.${ext}`)
const fileUploadConfig = useMemo(() => ({
file_size_limit: fileUploadConfigResponse?.file_size_limit ?? 15,
batch_count_limit: supportBatchUpload ? (fileUploadConfigResponse?.batch_count_limit ?? 5) : 1,
file_upload_limit: supportBatchUpload ? (fileUploadConfigResponse?.file_upload_limit ?? 5) : 1,
}), [fileUploadConfigResponse, supportBatchUpload])
const updateFile = useCallback((fileItem: FileItem, progress: number, list: FileItem[]) => {
const { setLocalFileList } = dataSourceStore.getState()
const newList = produce(list, (draft) => {
const targetIndex = draft.findIndex(file => file.fileID === fileItem.fileID)
draft[targetIndex] = {
...draft[targetIndex],
progress,
}
})
setLocalFileList(newList)
}, [dataSourceStore])
const updateFileList = useCallback((preparedFiles: FileItem[]) => {
const { setLocalFileList } = dataSourceStore.getState()
setLocalFileList(preparedFiles)
}, [dataSourceStore])
const handlePreview = useCallback((file: File) => {
const { setCurrentLocalFile } = dataSourceStore.getState()
if (file.id)
setCurrentLocalFile(file)
}, [dataSourceStore])
// utils
const getFileType = (currentFile: File) => {
if (!currentFile)
return ''
const arr = currentFile.name.split('.')
return arr[arr.length - 1]
}
const getFileSize = (size: number) => {
if (size / 1024 < 10)
return `${(size / 1024).toFixed(2)}KB`
return `${(size / 1024 / 1024).toFixed(2)}MB`
}
const isValid = useCallback((file: File) => {
const { size } = file
const ext = `.${getFileType(file)}`
const isValidType = ACCEPTS.includes(ext.toLowerCase())
if (!isValidType)
notify({ type: 'error', message: t('stepOne.uploader.validation.typeError', { ns: 'datasetCreation' }) })
const isValidSize = size <= fileUploadConfig.file_size_limit * 1024 * 1024
if (!isValidSize)
notify({ type: 'error', message: t('stepOne.uploader.validation.size', { ns: 'datasetCreation', size: fileUploadConfig.file_size_limit }) })
return isValidType && isValidSize
}, [notify, t, ACCEPTS, fileUploadConfig.file_size_limit])
type UploadResult = Awaited<ReturnType<typeof upload>>
const fileUpload = useCallback(async (fileItem: FileItem): Promise<FileItem> => {
const formData = new FormData()
formData.append('file', fileItem.file)
const onProgress = (e: ProgressEvent) => {
if (e.lengthComputable) {
const percent = Math.floor(e.loaded / e.total * 100)
updateFile(fileItem, percent, fileListRef.current)
}
}
return upload({
xhr: new XMLHttpRequest(),
data: formData,
onprogress: onProgress,
}, false, undefined, '?source=datasets')
.then((res: UploadResult) => {
const updatedFile = Object.assign({}, fileItem.file, {
id: res.id,
...(res as Partial<File>),
}) as File
const completeFile: FileItem = {
fileID: fileItem.fileID,
file: updatedFile,
progress: -1,
}
const index = fileListRef.current.findIndex(item => item.fileID === fileItem.fileID)
fileListRef.current[index] = completeFile
updateFile(completeFile, 100, fileListRef.current)
return Promise.resolve({ ...completeFile })
})
.catch((e) => {
const errorMessage = getFileUploadErrorMessage(e, t('stepOne.uploader.failed', { ns: 'datasetCreation' }), t)
notify({ type: 'error', message: errorMessage })
updateFile(fileItem, -2, fileListRef.current)
return Promise.resolve({ ...fileItem })
})
.finally()
}, [fileListRef, notify, updateFile, t])
const uploadBatchFiles = useCallback((bFiles: FileItem[]) => {
bFiles.forEach(bf => (bf.progress = 0))
return Promise.all(bFiles.map(fileUpload))
}, [fileUpload])
const uploadMultipleFiles = useCallback(async (files: FileItem[]) => {
const batchCountLimit = fileUploadConfig.batch_count_limit
const length = files.length
let start = 0
let end = 0
while (start < length) {
if (start + batchCountLimit > length)
end = length
else
end = start + batchCountLimit
const bFiles = files.slice(start, end)
await uploadBatchFiles(bFiles)
start = end
}
}, [fileUploadConfig, uploadBatchFiles])
const initialUpload = useCallback((files: File[]) => {
const filesCountLimit = fileUploadConfig.file_upload_limit
if (!files.length)
return false
if (files.length + localFileList.length > filesCountLimit && !IS_CE_EDITION) {
notify({ type: 'error', message: t('stepOne.uploader.validation.filesNumber', { ns: 'datasetCreation', filesNumber: filesCountLimit }) })
return false
}
const preparedFiles = files.map((file, index) => ({
fileID: `file${index}-${Date.now()}`,
file,
progress: -1,
}))
const newFiles = [...fileListRef.current, ...preparedFiles]
updateFileList(newFiles)
fileListRef.current = newFiles
uploadMultipleFiles(preparedFiles)
}, [fileUploadConfig.file_upload_limit, localFileList.length, updateFileList, uploadMultipleFiles, notify, t])
const handleDragEnter = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target !== dragRef.current)
setDragging(true)
}
const handleDragOver = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
}
const handleDragLeave = (e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
if (e.target === dragRef.current)
setDragging(false)
}
const handleDrop = useCallback((e: DragEvent) => {
e.preventDefault()
e.stopPropagation()
setDragging(false)
if (!e.dataTransfer)
return
let files = Array.from(e.dataTransfer.files) as File[]
if (!supportBatchUpload)
files = files.slice(0, 1)
const validFiles = files.filter(isValid)
initialUpload(validFiles)
}, [initialUpload, isValid, supportBatchUpload])
const selectHandle = useCallback(() => {
if (fileUploader.current)
fileUploader.current.click()
}, [])
const removeFile = (fileID: string) => {
if (fileUploader.current)
fileUploader.current.value = ''
fileListRef.current = fileListRef.current.filter(item => item.fileID !== fileID)
updateFileList([...fileListRef.current])
}
const fileChangeHandle = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
let files = Array.from(e.target.files ?? []) as File[]
files = files.slice(0, fileUploadConfig.batch_count_limit)
initialUpload(files.filter(isValid))
}, [isValid, initialUpload, fileUploadConfig.batch_count_limit])
const { theme } = useTheme()
const chartColor = useMemo(() => theme === Theme.dark ? '#5289ff' : '#296dff', [theme])
useEffect(() => {
const dropElement = dropRef.current
dropElement?.addEventListener('dragenter', handleDragEnter)
dropElement?.addEventListener('dragover', handleDragOver)
dropElement?.addEventListener('dragleave', handleDragLeave)
dropElement?.addEventListener('drop', handleDrop)
return () => {
dropElement?.removeEventListener('dragenter', handleDragEnter)
dropElement?.removeEventListener('dragover', handleDragOver)
dropElement?.removeEventListener('dragleave', handleDragLeave)
dropElement?.removeEventListener('drop', handleDrop)
}
}, [handleDrop])
return (
<div className="flex flex-col">
{!hideUpload && (
<UploadDropzone
dropRef={dropRef}
dragRef={dragRef}
fileUploaderRef={fileUploaderRef}
dragging={dragging}
supportBatchUpload={supportBatchUpload}
supportTypesShowNames={supportTypesShowNames}
fileUploadConfig={fileUploadConfig}
acceptTypes={acceptTypes}
onSelectFile={selectHandle}
onFileChange={fileChangeHandle}
allowedExtensions={allowedExtensions}
<input
ref={fileUploader}
id="fileUploader"
className="hidden"
type="file"
multiple={supportBatchUpload}
accept={ACCEPTS.join(',')}
onChange={fileChangeHandle}
/>
)}
{!hideUpload && (
<div
ref={dropRef}
className={cn(
'relative box-border flex min-h-20 flex-col items-center justify-center gap-1 rounded-xl border border-dashed border-components-dropzone-border bg-components-dropzone-bg px-4 py-3 text-xs leading-4 text-text-tertiary',
dragging && 'border-components-dropzone-border-accent bg-components-dropzone-bg-accent',
)}
>
<div className="flex min-h-5 items-center justify-center text-sm leading-4 text-text-secondary">
<RiUploadCloud2Line className="mr-2 size-5" />
<span>
{supportBatchUpload ? t('stepOne.uploader.button', { ns: 'datasetCreation' }) : t('stepOne.uploader.buttonSingleFile', { ns: 'datasetCreation' })}
{allowedExtensions.length > 0 && (
<label className="ml-1 cursor-pointer text-text-accent" onClick={selectHandle}>{t('stepOne.uploader.browse', { ns: 'datasetCreation' })}</label>
)}
</span>
</div>
<div>
{t('stepOne.uploader.tip', {
ns: 'datasetCreation',
size: fileUploadConfig.file_size_limit,
supportTypes: supportTypesShowNames,
batchCount: fileUploadConfig.batch_count_limit,
totalCount: fileUploadConfig.file_upload_limit,
})}
</div>
{dragging && <div ref={dragRef} className="absolute left-0 top-0 h-full w-full" />}
</div>
)}
{localFileList.length > 0 && (
<div className="mt-1 flex flex-col gap-y-1">
{localFileList.map((fileItem, index) => (
<FileListItem
key={`${fileItem.fileID}-${index}`}
fileItem={fileItem}
onPreview={handlePreview}
onRemove={removeFile}
/>
))}
{localFileList.map((fileItem, index) => {
const isUploading = fileItem.progress >= 0 && fileItem.progress < 100
const isError = fileItem.progress === -2
return (
<div
key={`${fileItem.fileID}-${index}`}
onClick={handlePreview.bind(null, fileItem.file)}
className={cn(
'flex h-12 items-center rounded-lg border border-components-panel-border bg-components-panel-on-panel-item-bg shadow-xs shadow-shadow-shadow-4',
isError && 'border-state-destructive-border bg-state-destructive-hover',
)}
>
<div className="flex w-12 shrink-0 items-center justify-center">
<DocumentFileIcon
size="lg"
className="shrink-0"
name={fileItem.file.name}
extension={getFileType(fileItem.file)}
/>
</div>
<div className="flex shrink grow flex-col gap-0.5">
<div className="flex w-full">
<div className="w-0 grow truncate text-xs text-text-secondary">{fileItem.file.name}</div>
</div>
<div className="w-full truncate text-2xs leading-3 text-text-tertiary">
<span className="uppercase">{getFileType(fileItem.file)}</span>
<span className="px-1 text-text-quaternary">·</span>
<span>{getFileSize(fileItem.file.size)}</span>
</div>
</div>
<div className="flex w-16 shrink-0 items-center justify-end gap-1 pr-3">
{isUploading && (
<SimplePieChart percentage={fileItem.progress} stroke={chartColor} fill={chartColor} animationDuration={0} />
)}
{
isError && (
<RiErrorWarningFill className="size-4 text-text-destructive" />
)
}
<span
className="flex h-6 w-6 cursor-pointer items-center justify-center"
onClick={(e) => {
e.stopPropagation()
removeFile(fileItem.fileID)
}}
>
<RiDeleteBinLine className="size-4 text-text-tertiary" />
</span>
</div>
</div>
)
})}
</div>
)}
</div>

View File

@ -1,4 +0,0 @@
export { default as ProgressBar } from './progress-bar'
export { default as RuleDetail } from './rule-detail'
export { default as SegmentProgress } from './segment-progress'
export { default as StatusHeader } from './status-header'

View File

@ -1,159 +0,0 @@
import { render } from '@testing-library/react'
import { describe, expect, it } from 'vitest'
import ProgressBar from './progress-bar'
describe('ProgressBar', () => {
const defaultProps = {
percent: 50,
isEmbedding: false,
isCompleted: false,
isPaused: false,
isError: false,
}
const getProgressElements = (container: HTMLElement) => {
const wrapper = container.firstChild as HTMLElement
const progressBar = wrapper.firstChild as HTMLElement
return { wrapper, progressBar }
}
describe('Rendering', () => {
it('should render without crashing', () => {
const { container } = render(<ProgressBar {...defaultProps} />)
const { wrapper, progressBar } = getProgressElements(container)
expect(wrapper).toBeInTheDocument()
expect(progressBar).toBeInTheDocument()
})
it('should render progress bar container with correct classes', () => {
const { container } = render(<ProgressBar {...defaultProps} />)
const { wrapper } = getProgressElements(container)
expect(wrapper).toHaveClass('flex', 'h-2', 'w-full', 'items-center', 'overflow-hidden', 'rounded-md')
})
it('should render inner progress bar with transition classes', () => {
const { container } = render(<ProgressBar {...defaultProps} />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveClass('h-full', 'transition-all', 'duration-300')
})
})
describe('Progress Width', () => {
it('should set progress width to 0%', () => {
const { container } = render(<ProgressBar {...defaultProps} percent={0} />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveStyle({ width: '0%' })
})
it('should set progress width to 50%', () => {
const { container } = render(<ProgressBar {...defaultProps} percent={50} />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveStyle({ width: '50%' })
})
it('should set progress width to 100%', () => {
const { container } = render(<ProgressBar {...defaultProps} percent={100} />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveStyle({ width: '100%' })
})
it('should set progress width to 75%', () => {
const { container } = render(<ProgressBar {...defaultProps} percent={75} />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveStyle({ width: '75%' })
})
})
describe('Container Background States', () => {
it('should apply semi-transparent background when isEmbedding is true', () => {
const { container } = render(<ProgressBar {...defaultProps} isEmbedding />)
const { wrapper } = getProgressElements(container)
expect(wrapper).toHaveClass('bg-components-progress-bar-bg/50')
})
it('should apply default background when isEmbedding is false', () => {
const { container } = render(<ProgressBar {...defaultProps} isEmbedding={false} />)
const { wrapper } = getProgressElements(container)
expect(wrapper).toHaveClass('bg-components-progress-bar-bg')
expect(wrapper).not.toHaveClass('bg-components-progress-bar-bg/50')
})
})
describe('Progress Bar Fill States', () => {
it('should apply solid progress style when isEmbedding is true', () => {
const { container } = render(<ProgressBar {...defaultProps} isEmbedding />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveClass('bg-components-progress-bar-progress-solid')
})
it('should apply solid progress style when isCompleted is true', () => {
const { container } = render(<ProgressBar {...defaultProps} isCompleted />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveClass('bg-components-progress-bar-progress-solid')
})
it('should apply highlight style when isPaused is true', () => {
const { container } = render(<ProgressBar {...defaultProps} isPaused />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveClass('bg-components-progress-bar-progress-highlight')
})
it('should apply highlight style when isError is true', () => {
const { container } = render(<ProgressBar {...defaultProps} isError />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveClass('bg-components-progress-bar-progress-highlight')
})
it('should not apply fill styles when no status flags are set', () => {
const { container } = render(<ProgressBar {...defaultProps} />)
const { progressBar } = getProgressElements(container)
expect(progressBar).not.toHaveClass('bg-components-progress-bar-progress-solid')
expect(progressBar).not.toHaveClass('bg-components-progress-bar-progress-highlight')
})
})
describe('Combined States', () => {
it('should apply highlight when isEmbedding and isPaused', () => {
const { container } = render(<ProgressBar {...defaultProps} isEmbedding isPaused />)
const { progressBar } = getProgressElements(container)
// highlight takes precedence since isPaused condition is separate
expect(progressBar).toHaveClass('bg-components-progress-bar-progress-highlight')
})
it('should apply highlight when isCompleted and isError', () => {
const { container } = render(<ProgressBar {...defaultProps} isCompleted isError />)
const { progressBar } = getProgressElements(container)
// highlight takes precedence since isError condition is separate
expect(progressBar).toHaveClass('bg-components-progress-bar-progress-highlight')
})
it('should apply semi-transparent bg for embedding and highlight for paused', () => {
const { container } = render(<ProgressBar {...defaultProps} isEmbedding isPaused />)
const { wrapper } = getProgressElements(container)
expect(wrapper).toHaveClass('bg-components-progress-bar-bg/50')
})
})
describe('Edge Cases', () => {
it('should handle all props set to false', () => {
const { container } = render(
<ProgressBar
percent={0}
isEmbedding={false}
isCompleted={false}
isPaused={false}
isError={false}
/>,
)
const { wrapper, progressBar } = getProgressElements(container)
expect(wrapper).toBeInTheDocument()
expect(progressBar).toHaveStyle({ width: '0%' })
})
it('should handle decimal percent values', () => {
const { container } = render(<ProgressBar {...defaultProps} percent={33.33} />)
const { progressBar } = getProgressElements(container)
expect(progressBar).toHaveStyle({ width: '33.33%' })
})
})
})

View File

@ -1,44 +0,0 @@
import type { FC } from 'react'
import * as React from 'react'
import { cn } from '@/utils/classnames'
type ProgressBarProps = {
percent: number
isEmbedding: boolean
isCompleted: boolean
isPaused: boolean
isError: boolean
}
const ProgressBar: FC<ProgressBarProps> = React.memo(({
percent,
isEmbedding,
isCompleted,
isPaused,
isError,
}) => {
const isActive = isEmbedding || isCompleted
const isHighlighted = isPaused || isError
return (
<div
className={cn(
'flex h-2 w-full items-center overflow-hidden rounded-md border border-components-progress-bar-border',
isEmbedding ? 'bg-components-progress-bar-bg/50' : 'bg-components-progress-bar-bg',
)}
>
<div
className={cn(
'h-full transition-all duration-300',
isActive && 'bg-components-progress-bar-progress-solid',
isHighlighted && 'bg-components-progress-bar-progress-highlight',
)}
style={{ width: `${percent}%` }}
/>
</div>
)
})
ProgressBar.displayName = 'ProgressBar'
export default ProgressBar

View File

@ -1,203 +0,0 @@
import type { ProcessRuleResponse } from '@/models/datasets'
import { render, screen } from '@testing-library/react'
import { describe, expect, it } from 'vitest'
import { ProcessMode } from '@/models/datasets'
import { RETRIEVE_METHOD } from '@/types/app'
import { IndexingType } from '../../../../create/step-two'
import RuleDetail from './rule-detail'
describe('RuleDetail', () => {
const defaultProps = {
indexingType: IndexingType.QUALIFIED,
retrievalMethod: RETRIEVE_METHOD.semantic,
}
const createSourceData = (overrides: Partial<ProcessRuleResponse> = {}): ProcessRuleResponse => ({
mode: ProcessMode.general,
rules: {
segmentation: {
separator: '\n',
max_tokens: 500,
chunk_overlap: 50,
},
pre_processing_rules: [
{ id: 'remove_extra_spaces', enabled: true },
{ id: 'remove_urls_emails', enabled: false },
],
parent_mode: 'full-doc',
subchunk_segmentation: {
separator: '\n',
max_tokens: 200,
chunk_overlap: 20,
},
},
limits: { indexing_max_segmentation_tokens_length: 4000 },
...overrides,
})
describe('Rendering', () => {
it('should render without crashing', () => {
render(<RuleDetail {...defaultProps} />)
expect(screen.getByText(/stepTwo\.indexMode/i)).toBeInTheDocument()
})
it('should render with sourceData', () => {
const sourceData = createSourceData()
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
expect(screen.getByText(/embedding\.mode/i)).toBeInTheDocument()
})
it('should render all segmentation rule fields', () => {
const sourceData = createSourceData()
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
expect(screen.getByText(/embedding\.mode/i)).toBeInTheDocument()
expect(screen.getByText(/embedding\.segmentLength/i)).toBeInTheDocument()
expect(screen.getByText(/embedding\.textCleaning/i)).toBeInTheDocument()
})
})
describe('Mode Display', () => {
it('should display custom mode for general process mode', () => {
const sourceData = createSourceData({ mode: ProcessMode.general })
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
expect(screen.getByText(/embedding\.custom/i)).toBeInTheDocument()
})
it('should display mode label field', () => {
const sourceData = createSourceData()
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
expect(screen.getByText(/embedding\.mode/i)).toBeInTheDocument()
})
})
describe('Segment Length Display', () => {
it('should display max tokens for general mode', () => {
const sourceData = createSourceData({
mode: ProcessMode.general,
rules: {
segmentation: { separator: '\n', max_tokens: 500, chunk_overlap: 50 },
pre_processing_rules: [],
parent_mode: 'full-doc',
subchunk_segmentation: { separator: '\n', max_tokens: 200, chunk_overlap: 20 },
},
})
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
expect(screen.getByText('500')).toBeInTheDocument()
})
it('should display segment length label', () => {
const sourceData = createSourceData()
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
expect(screen.getByText(/embedding\.segmentLength/i)).toBeInTheDocument()
})
})
describe('Text Cleaning Display', () => {
it('should display enabled pre-processing rules', () => {
const sourceData = createSourceData({
rules: {
segmentation: { separator: '\n', max_tokens: 500, chunk_overlap: 50 },
pre_processing_rules: [
{ id: 'remove_extra_spaces', enabled: true },
{ id: 'remove_urls_emails', enabled: true },
],
parent_mode: 'full-doc',
subchunk_segmentation: { separator: '\n', max_tokens: 200, chunk_overlap: 20 },
},
})
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
expect(screen.getByText(/removeExtraSpaces/i)).toBeInTheDocument()
expect(screen.getByText(/removeUrlEmails/i)).toBeInTheDocument()
})
it('should display text cleaning label', () => {
const sourceData = createSourceData()
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
expect(screen.getByText(/embedding\.textCleaning/i)).toBeInTheDocument()
})
})
describe('Index Mode Display', () => {
it('should display economical mode when indexingType is ECONOMICAL', () => {
render(<RuleDetail {...defaultProps} indexingType={IndexingType.ECONOMICAL} />)
expect(screen.getByText(/stepTwo\.economical/i)).toBeInTheDocument()
})
it('should display qualified mode when indexingType is QUALIFIED', () => {
render(<RuleDetail {...defaultProps} indexingType={IndexingType.QUALIFIED} />)
expect(screen.getByText(/stepTwo\.qualified/i)).toBeInTheDocument()
})
})
describe('Retrieval Method Display', () => {
it('should display keyword search for economical mode', () => {
render(<RuleDetail {...defaultProps} indexingType={IndexingType.ECONOMICAL} />)
expect(screen.getByText(/retrieval\.keyword_search\.title/i)).toBeInTheDocument()
})
it('should display semantic search as default for qualified mode', () => {
render(<RuleDetail {...defaultProps} indexingType={IndexingType.QUALIFIED} />)
expect(screen.getByText(/retrieval\.semantic_search\.title/i)).toBeInTheDocument()
})
it('should display full text search when retrievalMethod is fullText', () => {
render(<RuleDetail {...defaultProps} retrievalMethod={RETRIEVE_METHOD.fullText} />)
expect(screen.getByText(/retrieval\.full_text_search\.title/i)).toBeInTheDocument()
})
it('should display hybrid search when retrievalMethod is hybrid', () => {
render(<RuleDetail {...defaultProps} retrievalMethod={RETRIEVE_METHOD.hybrid} />)
expect(screen.getByText(/retrieval\.hybrid_search\.title/i)).toBeInTheDocument()
})
})
describe('Edge Cases', () => {
it('should display dash for missing sourceData', () => {
render(<RuleDetail {...defaultProps} />)
const dashes = screen.getAllByText('-')
expect(dashes.length).toBeGreaterThan(0)
})
it('should display dash when mode is undefined', () => {
const sourceData = { rules: {} } as ProcessRuleResponse
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
const dashes = screen.getAllByText('-')
expect(dashes.length).toBeGreaterThan(0)
})
it('should handle undefined retrievalMethod', () => {
render(<RuleDetail indexingType={IndexingType.QUALIFIED} />)
expect(screen.getByText(/retrieval\.semantic_search\.title/i)).toBeInTheDocument()
})
it('should handle empty pre_processing_rules array', () => {
const sourceData = createSourceData({
rules: {
segmentation: { separator: '\n', max_tokens: 500, chunk_overlap: 50 },
pre_processing_rules: [],
parent_mode: 'full-doc',
subchunk_segmentation: { separator: '\n', max_tokens: 200, chunk_overlap: 20 },
},
})
render(<RuleDetail {...defaultProps} sourceData={sourceData} />)
expect(screen.getByText(/embedding\.textCleaning/i)).toBeInTheDocument()
})
it('should render container with correct structure', () => {
const { container } = render(<RuleDetail {...defaultProps} />)
const wrapper = container.firstChild as HTMLElement
expect(wrapper).toHaveClass('py-3')
})
it('should handle undefined indexingType', () => {
render(<RuleDetail retrievalMethod={RETRIEVE_METHOD.semantic} />)
expect(screen.getByText(/stepTwo\.indexMode/i)).toBeInTheDocument()
})
it('should render divider between sections', () => {
const { container } = render(<RuleDetail {...defaultProps} />)
const dividers = container.querySelectorAll('.bg-divider-subtle')
expect(dividers.length).toBeGreaterThan(0)
})
})
})

View File

@ -1,128 +0,0 @@
import type { FC } from 'react'
import type { ProcessRuleResponse } from '@/models/datasets'
import type { RETRIEVE_METHOD } from '@/types/app'
import Image from 'next/image'
import * as React from 'react'
import { useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import Divider from '@/app/components/base/divider'
import { ProcessMode } from '@/models/datasets'
import { indexMethodIcon, retrievalIcon } from '../../../../create/icons'
import { IndexingType } from '../../../../create/step-two'
import { FieldInfo } from '../../metadata'
type RuleDetailProps = {
sourceData?: ProcessRuleResponse
indexingType?: IndexingType
retrievalMethod?: RETRIEVE_METHOD
}
const getRetrievalIcon = (method?: RETRIEVE_METHOD) => {
if (method === 'full_text_search')
return retrievalIcon.fullText
if (method === 'hybrid_search')
return retrievalIcon.hybrid
return retrievalIcon.vector
}
const RuleDetail: FC<RuleDetailProps> = React.memo(({
sourceData,
indexingType,
retrievalMethod,
}) => {
const { t } = useTranslation()
const segmentationRuleMap = {
mode: t('embedding.mode', { ns: 'datasetDocuments' }),
segmentLength: t('embedding.segmentLength', { ns: 'datasetDocuments' }),
textCleaning: t('embedding.textCleaning', { ns: 'datasetDocuments' }),
}
const getRuleName = useCallback((key: string) => {
const ruleNameMap: Record<string, string> = {
remove_extra_spaces: t('stepTwo.removeExtraSpaces', { ns: 'datasetCreation' }),
remove_urls_emails: t('stepTwo.removeUrlEmails', { ns: 'datasetCreation' }),
remove_stopwords: t('stepTwo.removeStopwords', { ns: 'datasetCreation' }),
}
return ruleNameMap[key]
}, [t])
const getValue = useCallback((field: string) => {
const defaultValue = '-'
if (!sourceData?.mode)
return defaultValue
const maxTokens = typeof sourceData?.rules?.segmentation?.max_tokens === 'number'
? sourceData.rules.segmentation.max_tokens
: defaultValue
const childMaxTokens = typeof sourceData?.rules?.subchunk_segmentation?.max_tokens === 'number'
? sourceData.rules.subchunk_segmentation.max_tokens
: defaultValue
const isGeneralMode = sourceData.mode === ProcessMode.general
const fieldValueMap: Record<string, string | number> = {
mode: isGeneralMode
? t('embedding.custom', { ns: 'datasetDocuments' })
: `${t('embedding.hierarchical', { ns: 'datasetDocuments' })} · ${
sourceData?.rules?.parent_mode === 'paragraph'
? t('parentMode.paragraph', { ns: 'dataset' })
: t('parentMode.fullDoc', { ns: 'dataset' })
}`,
segmentLength: isGeneralMode
? maxTokens
: `${t('embedding.parentMaxTokens', { ns: 'datasetDocuments' })} ${maxTokens}; ${t('embedding.childMaxTokens', { ns: 'datasetDocuments' })} ${childMaxTokens}`,
textCleaning: sourceData?.rules?.pre_processing_rules
?.filter(rule => rule.enabled)
.map(rule => getRuleName(rule.id))
.join(',') || defaultValue,
}
return fieldValueMap[field] ?? defaultValue
}, [sourceData, t, getRuleName])
const isEconomical = indexingType === IndexingType.ECONOMICAL
return (
<div className="py-3">
<div className="flex flex-col gap-y-1">
{Object.keys(segmentationRuleMap).map(field => (
<FieldInfo
key={field}
label={segmentationRuleMap[field as keyof typeof segmentationRuleMap]}
displayedValue={String(getValue(field))}
/>
))}
</div>
<Divider type="horizontal" className="bg-divider-subtle" />
<FieldInfo
label={t('stepTwo.indexMode', { ns: 'datasetCreation' })}
displayedValue={t(`stepTwo.${isEconomical ? 'economical' : 'qualified'}`, { ns: 'datasetCreation' }) as string}
valueIcon={(
<Image
className="size-4"
src={isEconomical ? indexMethodIcon.economical : indexMethodIcon.high_quality}
alt=""
/>
)}
/>
<FieldInfo
label={t('form.retrievalSetting.title', { ns: 'datasetSettings' })}
displayedValue={t(`retrieval.${isEconomical ? 'keyword_search' : retrievalMethod ?? 'semantic_search'}.title`, { ns: 'dataset' })}
valueIcon={(
<Image
className="size-4"
src={getRetrievalIcon(retrievalMethod)}
alt=""
/>
)}
/>
</div>
)
})
RuleDetail.displayName = 'RuleDetail'
export default RuleDetail

View File

@ -1,81 +0,0 @@
import { render, screen } from '@testing-library/react'
import { describe, expect, it } from 'vitest'
import SegmentProgress from './segment-progress'
describe('SegmentProgress', () => {
const defaultProps = {
completedSegments: 50,
totalSegments: 100,
percent: 50,
}
describe('Rendering', () => {
it('should render without crashing', () => {
render(<SegmentProgress {...defaultProps} />)
expect(screen.getByText(/segments/i)).toBeInTheDocument()
})
it('should render with correct CSS classes', () => {
const { container } = render(<SegmentProgress {...defaultProps} />)
const wrapper = container.firstChild as HTMLElement
expect(wrapper).toHaveClass('flex', 'w-full', 'items-center')
})
it('should render text with correct styling class', () => {
render(<SegmentProgress {...defaultProps} />)
const text = screen.getByText(/segments/i)
expect(text).toHaveClass('system-xs-medium', 'text-text-secondary')
})
})
describe('Progress Display', () => {
it('should display completed and total segments', () => {
render(<SegmentProgress completedSegments={50} totalSegments={100} percent={50} />)
expect(screen.getByText(/50\/100/)).toBeInTheDocument()
})
it('should display percent value', () => {
render(<SegmentProgress completedSegments={50} totalSegments={100} percent={50} />)
expect(screen.getByText(/50%/)).toBeInTheDocument()
})
it('should display 0/0 when segments are 0', () => {
render(<SegmentProgress completedSegments={0} totalSegments={0} percent={0} />)
expect(screen.getByText(/0\/0/)).toBeInTheDocument()
expect(screen.getByText(/0%/)).toBeInTheDocument()
})
it('should display 100% when completed', () => {
render(<SegmentProgress completedSegments={100} totalSegments={100} percent={100} />)
expect(screen.getByText(/100\/100/)).toBeInTheDocument()
expect(screen.getByText(/100%/)).toBeInTheDocument()
})
})
describe('Edge Cases', () => {
it('should display -- when completedSegments is undefined', () => {
render(<SegmentProgress totalSegments={100} percent={0} />)
expect(screen.getByText(/--\/100/)).toBeInTheDocument()
})
it('should display -- when totalSegments is undefined', () => {
render(<SegmentProgress completedSegments={50} percent={50} />)
expect(screen.getByText(/50\/--/)).toBeInTheDocument()
})
it('should display --/-- when both segments are undefined', () => {
render(<SegmentProgress percent={0} />)
expect(screen.getByText(/--\/--/)).toBeInTheDocument()
})
it('should handle large numbers', () => {
render(<SegmentProgress completedSegments={999999} totalSegments={1000000} percent={99} />)
expect(screen.getByText(/999999\/1000000/)).toBeInTheDocument()
})
it('should handle decimal percent', () => {
render(<SegmentProgress completedSegments={33} totalSegments={100} percent={33.33} />)
expect(screen.getByText(/33.33%/)).toBeInTheDocument()
})
})
})

View File

@ -1,32 +0,0 @@
import type { FC } from 'react'
import * as React from 'react'
import { useTranslation } from 'react-i18next'
type SegmentProgressProps = {
completedSegments?: number
totalSegments?: number
percent: number
}
const SegmentProgress: FC<SegmentProgressProps> = React.memo(({
completedSegments,
totalSegments,
percent,
}) => {
const { t } = useTranslation()
const completed = completedSegments ?? '--'
const total = totalSegments ?? '--'
return (
<div className="flex w-full items-center">
<span className="system-xs-medium text-text-secondary">
{`${t('embedding.segments', { ns: 'datasetDocuments' })} ${completed}/${total} · ${percent}%`}
</span>
</div>
)
})
SegmentProgress.displayName = 'SegmentProgress'
export default SegmentProgress

View File

@ -1,155 +0,0 @@
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import StatusHeader from './status-header'
describe('StatusHeader', () => {
const defaultProps = {
isEmbedding: false,
isCompleted: false,
isPaused: false,
isError: false,
onPause: vi.fn(),
onResume: vi.fn(),
}
beforeEach(() => {
vi.clearAllMocks()
})
describe('Rendering', () => {
it('should render without crashing', () => {
const { container } = render(<StatusHeader {...defaultProps} />)
expect(container.firstChild).toBeInTheDocument()
})
it('should render with correct container classes', () => {
const { container } = render(<StatusHeader {...defaultProps} />)
const wrapper = container.firstChild as HTMLElement
expect(wrapper).toHaveClass('flex', 'h-6', 'items-center', 'gap-x-1')
})
})
describe('Status Text', () => {
it('should display processing text when isEmbedding is true', () => {
render(<StatusHeader {...defaultProps} isEmbedding />)
expect(screen.getByText(/embedding\.processing/i)).toBeInTheDocument()
})
it('should display completed text when isCompleted is true', () => {
render(<StatusHeader {...defaultProps} isCompleted />)
expect(screen.getByText(/embedding\.completed/i)).toBeInTheDocument()
})
it('should display paused text when isPaused is true', () => {
render(<StatusHeader {...defaultProps} isPaused />)
expect(screen.getByText(/embedding\.paused/i)).toBeInTheDocument()
})
it('should display error text when isError is true', () => {
render(<StatusHeader {...defaultProps} isError />)
expect(screen.getByText(/embedding\.error/i)).toBeInTheDocument()
})
it('should display empty text when no status flags are set', () => {
render(<StatusHeader {...defaultProps} />)
const statusText = screen.getByText('', { selector: 'span.system-md-semibold-uppercase' })
expect(statusText).toBeInTheDocument()
})
})
describe('Loading Spinner', () => {
it('should show loading spinner when isEmbedding is true', () => {
const { container } = render(<StatusHeader {...defaultProps} isEmbedding />)
const spinner = container.querySelector('svg.animate-spin')
expect(spinner).toBeInTheDocument()
})
it('should not show loading spinner when isEmbedding is false', () => {
const { container } = render(<StatusHeader {...defaultProps} isEmbedding={false} />)
const spinner = container.querySelector('svg.animate-spin')
expect(spinner).not.toBeInTheDocument()
})
})
describe('Pause Button', () => {
it('should show pause button when isEmbedding is true', () => {
render(<StatusHeader {...defaultProps} isEmbedding />)
expect(screen.getByRole('button')).toBeInTheDocument()
expect(screen.getByText(/embedding\.pause/i)).toBeInTheDocument()
})
it('should not show pause button when isEmbedding is false', () => {
render(<StatusHeader {...defaultProps} isEmbedding={false} />)
expect(screen.queryByText(/embedding\.pause/i)).not.toBeInTheDocument()
})
it('should call onPause when pause button is clicked', () => {
const onPause = vi.fn()
render(<StatusHeader {...defaultProps} isEmbedding onPause={onPause} />)
fireEvent.click(screen.getByRole('button'))
expect(onPause).toHaveBeenCalledTimes(1)
})
it('should disable pause button when isPauseLoading is true', () => {
render(<StatusHeader {...defaultProps} isEmbedding isPauseLoading />)
expect(screen.getByRole('button')).toBeDisabled()
})
})
describe('Resume Button', () => {
it('should show resume button when isPaused is true', () => {
render(<StatusHeader {...defaultProps} isPaused />)
expect(screen.getByRole('button')).toBeInTheDocument()
expect(screen.getByText(/embedding\.resume/i)).toBeInTheDocument()
})
it('should not show resume button when isPaused is false', () => {
render(<StatusHeader {...defaultProps} isPaused={false} />)
expect(screen.queryByText(/embedding\.resume/i)).not.toBeInTheDocument()
})
it('should call onResume when resume button is clicked', () => {
const onResume = vi.fn()
render(<StatusHeader {...defaultProps} isPaused onResume={onResume} />)
fireEvent.click(screen.getByRole('button'))
expect(onResume).toHaveBeenCalledTimes(1)
})
it('should disable resume button when isResumeLoading is true', () => {
render(<StatusHeader {...defaultProps} isPaused isResumeLoading />)
expect(screen.getByRole('button')).toBeDisabled()
})
})
describe('Button Styles', () => {
it('should have correct button styles for pause button', () => {
render(<StatusHeader {...defaultProps} isEmbedding />)
const button = screen.getByRole('button')
expect(button).toHaveClass('flex', 'items-center', 'gap-x-1', 'rounded-md')
})
it('should have correct button styles for resume button', () => {
render(<StatusHeader {...defaultProps} isPaused />)
const button = screen.getByRole('button')
expect(button).toHaveClass('flex', 'items-center', 'gap-x-1', 'rounded-md')
})
})
describe('Edge Cases', () => {
it('should not show any buttons when isCompleted', () => {
render(<StatusHeader {...defaultProps} isCompleted />)
expect(screen.queryByRole('button')).not.toBeInTheDocument()
})
it('should not show any buttons when isError', () => {
render(<StatusHeader {...defaultProps} isError />)
expect(screen.queryByRole('button')).not.toBeInTheDocument()
})
it('should show both buttons when isEmbedding and isPaused are both true', () => {
render(<StatusHeader {...defaultProps} isEmbedding isPaused />)
const buttons = screen.getAllByRole('button')
expect(buttons.length).toBe(2)
})
})
})

View File

@ -1,84 +0,0 @@
import type { FC } from 'react'
import { RiLoader2Line, RiPauseCircleLine, RiPlayCircleLine } from '@remixicon/react'
import * as React from 'react'
import { useTranslation } from 'react-i18next'
type StatusHeaderProps = {
isEmbedding: boolean
isCompleted: boolean
isPaused: boolean
isError: boolean
onPause: () => void
onResume: () => void
isPauseLoading?: boolean
isResumeLoading?: boolean
}
const StatusHeader: FC<StatusHeaderProps> = React.memo(({
isEmbedding,
isCompleted,
isPaused,
isError,
onPause,
onResume,
isPauseLoading,
isResumeLoading,
}) => {
const { t } = useTranslation()
const getStatusText = () => {
if (isEmbedding)
return t('embedding.processing', { ns: 'datasetDocuments' })
if (isCompleted)
return t('embedding.completed', { ns: 'datasetDocuments' })
if (isPaused)
return t('embedding.paused', { ns: 'datasetDocuments' })
if (isError)
return t('embedding.error', { ns: 'datasetDocuments' })
return ''
}
const buttonBaseClass = `flex items-center gap-x-1 rounded-md border-[0.5px]
border-components-button-secondary-border bg-components-button-secondary-bg
px-1.5 py-1 shadow-xs shadow-shadow-shadow-3 backdrop-blur-[5px]
disabled:cursor-not-allowed disabled:opacity-50`
return (
<div className="flex h-6 items-center gap-x-1">
{isEmbedding && <RiLoader2Line className="h-4 w-4 animate-spin text-text-secondary" />}
<span className="system-md-semibold-uppercase grow text-text-secondary">
{getStatusText()}
</span>
{isEmbedding && (
<button
type="button"
className={buttonBaseClass}
onClick={onPause}
disabled={isPauseLoading}
>
<RiPauseCircleLine className="h-3.5 w-3.5 text-components-button-secondary-text" />
<span className="system-xs-medium pr-[3px] text-components-button-secondary-text">
{t('embedding.pause', { ns: 'datasetDocuments' })}
</span>
</button>
)}
{isPaused && (
<button
type="button"
className={buttonBaseClass}
onClick={onResume}
disabled={isResumeLoading}
>
<RiPlayCircleLine className="h-3.5 w-3.5 text-components-button-secondary-text" />
<span className="system-xs-medium pr-[3px] text-components-button-secondary-text">
{t('embedding.resume', { ns: 'datasetDocuments' })}
</span>
</button>
)}
</div>
)
})
StatusHeader.displayName = 'StatusHeader'
export default StatusHeader

View File

@ -1,10 +0,0 @@
export {
calculatePercent,
isEmbeddingStatus,
isTerminalStatus,
useEmbeddingStatus,
useInvalidateEmbeddingStatus,
usePauseIndexing,
useResumeIndexing,
} from './use-embedding-status'
export type { EmbeddingStatusType } from './use-embedding-status'

View File

@ -1,462 +0,0 @@
import type { ReactNode } from 'react'
import type { IndexingStatusResponse } from '@/models/datasets'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { act, renderHook, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import * as datasetsService from '@/service/datasets'
import {
calculatePercent,
isEmbeddingStatus,
isTerminalStatus,
useEmbeddingStatus,
useInvalidateEmbeddingStatus,
usePauseIndexing,
useResumeIndexing,
} from './use-embedding-status'
vi.mock('@/service/datasets')
const mockFetchIndexingStatus = vi.mocked(datasetsService.fetchIndexingStatus)
const mockPauseDocIndexing = vi.mocked(datasetsService.pauseDocIndexing)
const mockResumeDocIndexing = vi.mocked(datasetsService.resumeDocIndexing)
const createTestQueryClient = () => new QueryClient({
defaultOptions: {
queries: { retry: false },
mutations: { retry: false },
},
})
const createWrapper = () => {
const queryClient = createTestQueryClient()
return ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
{children}
</QueryClientProvider>
)
}
const mockIndexingStatus = (overrides: Partial<IndexingStatusResponse> = {}): IndexingStatusResponse => ({
id: 'doc1',
indexing_status: 'indexing',
completed_segments: 50,
total_segments: 100,
processing_started_at: 0,
parsing_completed_at: 0,
cleaning_completed_at: 0,
splitting_completed_at: 0,
completed_at: null,
paused_at: null,
error: null,
stopped_at: null,
...overrides,
})
describe('use-embedding-status', () => {
beforeEach(() => {
vi.clearAllMocks()
})
describe('isEmbeddingStatus', () => {
it('should return true for indexing status', () => {
expect(isEmbeddingStatus('indexing')).toBe(true)
})
it('should return true for splitting status', () => {
expect(isEmbeddingStatus('splitting')).toBe(true)
})
it('should return true for parsing status', () => {
expect(isEmbeddingStatus('parsing')).toBe(true)
})
it('should return true for cleaning status', () => {
expect(isEmbeddingStatus('cleaning')).toBe(true)
})
it('should return false for completed status', () => {
expect(isEmbeddingStatus('completed')).toBe(false)
})
it('should return false for paused status', () => {
expect(isEmbeddingStatus('paused')).toBe(false)
})
it('should return false for error status', () => {
expect(isEmbeddingStatus('error')).toBe(false)
})
it('should return false for undefined', () => {
expect(isEmbeddingStatus(undefined)).toBe(false)
})
it('should return false for empty string', () => {
expect(isEmbeddingStatus('')).toBe(false)
})
})
describe('isTerminalStatus', () => {
it('should return true for completed status', () => {
expect(isTerminalStatus('completed')).toBe(true)
})
it('should return true for error status', () => {
expect(isTerminalStatus('error')).toBe(true)
})
it('should return true for paused status', () => {
expect(isTerminalStatus('paused')).toBe(true)
})
it('should return false for indexing status', () => {
expect(isTerminalStatus('indexing')).toBe(false)
})
it('should return false for undefined', () => {
expect(isTerminalStatus(undefined)).toBe(false)
})
})
describe('calculatePercent', () => {
it('should calculate percent correctly', () => {
expect(calculatePercent(50, 100)).toBe(50)
})
it('should return 0 when total is 0', () => {
expect(calculatePercent(50, 0)).toBe(0)
})
it('should return 0 when total is undefined', () => {
expect(calculatePercent(50, undefined)).toBe(0)
})
it('should return 0 when completed is undefined', () => {
expect(calculatePercent(undefined, 100)).toBe(0)
})
it('should cap at 100 when percent exceeds 100', () => {
expect(calculatePercent(150, 100)).toBe(100)
})
it('should round to nearest integer', () => {
expect(calculatePercent(33, 100)).toBe(33)
expect(calculatePercent(1, 3)).toBe(33)
})
})
describe('useEmbeddingStatus', () => {
it('should return initial state when disabled', () => {
const { result } = renderHook(
() => useEmbeddingStatus({ datasetId: 'ds1', documentId: 'doc1', enabled: false }),
{ wrapper: createWrapper() },
)
expect(result.current.isEmbedding).toBe(false)
expect(result.current.isCompleted).toBe(false)
expect(result.current.isPaused).toBe(false)
expect(result.current.isError).toBe(false)
expect(result.current.percent).toBe(0)
})
it('should not fetch when datasetId is missing', () => {
renderHook(
() => useEmbeddingStatus({ documentId: 'doc1' }),
{ wrapper: createWrapper() },
)
expect(mockFetchIndexingStatus).not.toHaveBeenCalled()
})
it('should not fetch when documentId is missing', () => {
renderHook(
() => useEmbeddingStatus({ datasetId: 'ds1' }),
{ wrapper: createWrapper() },
)
expect(mockFetchIndexingStatus).not.toHaveBeenCalled()
})
it('should fetch indexing status when enabled with valid ids', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
const { result } = renderHook(
() => useEmbeddingStatus({ datasetId: 'ds1', documentId: 'doc1' }),
{ wrapper: createWrapper() },
)
await waitFor(() => {
expect(result.current.isEmbedding).toBe(true)
})
expect(mockFetchIndexingStatus).toHaveBeenCalledWith({
datasetId: 'ds1',
documentId: 'doc1',
})
expect(result.current.percent).toBe(50)
})
it('should set isCompleted when status is completed', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({
indexing_status: 'completed',
completed_segments: 100,
}))
const { result } = renderHook(
() => useEmbeddingStatus({ datasetId: 'ds1', documentId: 'doc1' }),
{ wrapper: createWrapper() },
)
await waitFor(() => {
expect(result.current.isCompleted).toBe(true)
})
expect(result.current.percent).toBe(100)
})
it('should set isPaused when status is paused', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({
indexing_status: 'paused',
}))
const { result } = renderHook(
() => useEmbeddingStatus({ datasetId: 'ds1', documentId: 'doc1' }),
{ wrapper: createWrapper() },
)
await waitFor(() => {
expect(result.current.isPaused).toBe(true)
})
})
it('should set isError when status is error', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({
indexing_status: 'error',
completed_segments: 25,
}))
const { result } = renderHook(
() => useEmbeddingStatus({ datasetId: 'ds1', documentId: 'doc1' }),
{ wrapper: createWrapper() },
)
await waitFor(() => {
expect(result.current.isError).toBe(true)
})
})
it('should provide invalidate function', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
const { result } = renderHook(
() => useEmbeddingStatus({ datasetId: 'ds1', documentId: 'doc1' }),
{ wrapper: createWrapper() },
)
await waitFor(() => {
expect(result.current.isEmbedding).toBe(true)
})
expect(typeof result.current.invalidate).toBe('function')
// Call invalidate should not throw
await act(async () => {
result.current.invalidate()
})
})
it('should provide resetStatus function that clears data', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
const { result } = renderHook(
() => useEmbeddingStatus({ datasetId: 'ds1', documentId: 'doc1' }),
{ wrapper: createWrapper() },
)
await waitFor(() => {
expect(result.current.data).toBeDefined()
})
// Reset status should clear the data
await act(async () => {
result.current.resetStatus()
})
await waitFor(() => {
expect(result.current.data).toBeNull()
})
})
})
describe('usePauseIndexing', () => {
it('should call pauseDocIndexing when mutate is called', async () => {
mockPauseDocIndexing.mockResolvedValue({ result: 'success' })
const { result } = renderHook(
() => usePauseIndexing({ datasetId: 'ds1', documentId: 'doc1' }),
{ wrapper: createWrapper() },
)
await act(async () => {
result.current.mutate()
})
await waitFor(() => {
expect(mockPauseDocIndexing).toHaveBeenCalledWith({
datasetId: 'ds1',
documentId: 'doc1',
})
})
})
it('should call onSuccess callback on successful pause', async () => {
mockPauseDocIndexing.mockResolvedValue({ result: 'success' })
const onSuccess = vi.fn()
const { result } = renderHook(
() => usePauseIndexing({ datasetId: 'ds1', documentId: 'doc1', onSuccess }),
{ wrapper: createWrapper() },
)
await act(async () => {
result.current.mutate()
})
await waitFor(() => {
expect(onSuccess).toHaveBeenCalled()
})
})
it('should call onError callback on failed pause', async () => {
const error = new Error('Network error')
mockPauseDocIndexing.mockRejectedValue(error)
const onError = vi.fn()
const { result } = renderHook(
() => usePauseIndexing({ datasetId: 'ds1', documentId: 'doc1', onError }),
{ wrapper: createWrapper() },
)
await act(async () => {
result.current.mutate()
})
await waitFor(() => {
expect(onError).toHaveBeenCalled()
expect(onError.mock.calls[0][0]).toEqual(error)
})
})
})
describe('useResumeIndexing', () => {
it('should call resumeDocIndexing when mutate is called', async () => {
mockResumeDocIndexing.mockResolvedValue({ result: 'success' })
const { result } = renderHook(
() => useResumeIndexing({ datasetId: 'ds1', documentId: 'doc1' }),
{ wrapper: createWrapper() },
)
await act(async () => {
result.current.mutate()
})
await waitFor(() => {
expect(mockResumeDocIndexing).toHaveBeenCalledWith({
datasetId: 'ds1',
documentId: 'doc1',
})
})
})
it('should call onSuccess callback on successful resume', async () => {
mockResumeDocIndexing.mockResolvedValue({ result: 'success' })
const onSuccess = vi.fn()
const { result } = renderHook(
() => useResumeIndexing({ datasetId: 'ds1', documentId: 'doc1', onSuccess }),
{ wrapper: createWrapper() },
)
await act(async () => {
result.current.mutate()
})
await waitFor(() => {
expect(onSuccess).toHaveBeenCalled()
})
})
})
describe('useInvalidateEmbeddingStatus', () => {
it('should return a function', () => {
const { result } = renderHook(
() => useInvalidateEmbeddingStatus(),
{ wrapper: createWrapper() },
)
expect(typeof result.current).toBe('function')
})
it('should invalidate specific query when datasetId and documentId are provided', async () => {
const queryClient = createTestQueryClient()
const wrapper = ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
{children}
</QueryClientProvider>
)
// Set some initial data in the cache
queryClient.setQueryData(['embedding', 'indexing-status', 'ds1', 'doc1'], {
id: 'doc1',
indexing_status: 'indexing',
})
const { result } = renderHook(
() => useInvalidateEmbeddingStatus(),
{ wrapper },
)
await act(async () => {
result.current('ds1', 'doc1')
})
// The query should be invalidated (marked as stale)
const queryState = queryClient.getQueryState(['embedding', 'indexing-status', 'ds1', 'doc1'])
expect(queryState?.isInvalidated).toBe(true)
})
it('should invalidate all embedding status queries when ids are not provided', async () => {
const queryClient = createTestQueryClient()
const wrapper = ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
{children}
</QueryClientProvider>
)
// Set some initial data in the cache for multiple documents
queryClient.setQueryData(['embedding', 'indexing-status', 'ds1', 'doc1'], {
id: 'doc1',
indexing_status: 'indexing',
})
queryClient.setQueryData(['embedding', 'indexing-status', 'ds2', 'doc2'], {
id: 'doc2',
indexing_status: 'completed',
})
const { result } = renderHook(
() => useInvalidateEmbeddingStatus(),
{ wrapper },
)
await act(async () => {
result.current()
})
// Both queries should be invalidated
const queryState1 = queryClient.getQueryState(['embedding', 'indexing-status', 'ds1', 'doc1'])
const queryState2 = queryClient.getQueryState(['embedding', 'indexing-status', 'ds2', 'doc2'])
expect(queryState1?.isInvalidated).toBe(true)
expect(queryState2?.isInvalidated).toBe(true)
})
})
})

View File

@ -1,149 +0,0 @@
import type { CommonResponse } from '@/models/common'
import type { IndexingStatusResponse } from '@/models/datasets'
import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'
import { useCallback, useEffect, useMemo, useRef } from 'react'
import {
fetchIndexingStatus,
pauseDocIndexing,
resumeDocIndexing,
} from '@/service/datasets'
const NAME_SPACE = 'embedding'
export type EmbeddingStatusType = 'indexing' | 'splitting' | 'parsing' | 'cleaning' | 'completed' | 'paused' | 'error' | 'waiting' | ''
const EMBEDDING_STATUSES = ['indexing', 'splitting', 'parsing', 'cleaning'] as const
const TERMINAL_STATUSES = ['completed', 'error', 'paused'] as const
export const isEmbeddingStatus = (status?: string): boolean => {
return EMBEDDING_STATUSES.includes(status as typeof EMBEDDING_STATUSES[number])
}
export const isTerminalStatus = (status?: string): boolean => {
return TERMINAL_STATUSES.includes(status as typeof TERMINAL_STATUSES[number])
}
export const calculatePercent = (completed?: number, total?: number): number => {
if (!total || total === 0)
return 0
const percent = Math.round((completed || 0) * 100 / total)
return Math.min(percent, 100)
}
type UseEmbeddingStatusOptions = {
datasetId?: string
documentId?: string
enabled?: boolean
onComplete?: () => void
}
export const useEmbeddingStatus = ({
datasetId,
documentId,
enabled = true,
onComplete,
}: UseEmbeddingStatusOptions) => {
const queryClient = useQueryClient()
const isPolling = useRef(false)
const onCompleteRef = useRef(onComplete)
onCompleteRef.current = onComplete
const queryKey = useMemo(
() => [NAME_SPACE, 'indexing-status', datasetId, documentId] as const,
[datasetId, documentId],
)
const query = useQuery<IndexingStatusResponse>({
queryKey,
queryFn: () => fetchIndexingStatus({ datasetId: datasetId!, documentId: documentId! }),
enabled: enabled && !!datasetId && !!documentId,
refetchInterval: (query) => {
const status = query.state.data?.indexing_status
if (isTerminalStatus(status)) {
return false
}
return 2500
},
refetchOnWindowFocus: false,
})
const status = query.data?.indexing_status || ''
const isEmbedding = isEmbeddingStatus(status)
const isCompleted = status === 'completed'
const isPaused = status === 'paused'
const isError = status === 'error'
const percent = calculatePercent(query.data?.completed_segments, query.data?.total_segments)
// Handle completion callback
useEffect(() => {
if (isTerminalStatus(status) && isPolling.current) {
isPolling.current = false
onCompleteRef.current?.()
}
if (isEmbedding) {
isPolling.current = true
}
}, [status, isEmbedding])
const invalidate = useCallback(() => {
queryClient.invalidateQueries({ queryKey })
}, [queryClient, queryKey])
const resetStatus = useCallback(() => {
queryClient.setQueryData(queryKey, null)
}, [queryClient, queryKey])
return {
data: query.data,
isLoading: query.isLoading,
isEmbedding,
isCompleted,
isPaused,
isError,
percent,
invalidate,
resetStatus,
refetch: query.refetch,
}
}
type UsePauseResumeOptions = {
datasetId?: string
documentId?: string
onSuccess?: () => void
onError?: (error: Error) => void
}
export const usePauseIndexing = ({ datasetId, documentId, onSuccess, onError }: UsePauseResumeOptions) => {
return useMutation<CommonResponse, Error>({
mutationKey: [NAME_SPACE, 'pause', datasetId, documentId],
mutationFn: () => pauseDocIndexing({ datasetId: datasetId!, documentId: documentId! }),
onSuccess,
onError,
})
}
export const useResumeIndexing = ({ datasetId, documentId, onSuccess, onError }: UsePauseResumeOptions) => {
return useMutation<CommonResponse, Error>({
mutationKey: [NAME_SPACE, 'resume', datasetId, documentId],
mutationFn: () => resumeDocIndexing({ datasetId: datasetId!, documentId: documentId! }),
onSuccess,
onError,
})
}
export const useInvalidateEmbeddingStatus = () => {
const queryClient = useQueryClient()
return useCallback((datasetId?: string, documentId?: string) => {
if (datasetId && documentId) {
queryClient.invalidateQueries({
queryKey: [NAME_SPACE, 'indexing-status', datasetId, documentId],
})
}
else {
queryClient.invalidateQueries({
queryKey: [NAME_SPACE, 'indexing-status'],
})
}
}, [queryClient])
}

View File

@ -1,337 +0,0 @@
import type { ReactNode } from 'react'
import type { DocumentContextValue } from '../context'
import type { IndexingStatusResponse, ProcessRuleResponse } from '@/models/datasets'
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { render, screen, waitFor } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { ProcessMode } from '@/models/datasets'
import * as datasetsService from '@/service/datasets'
import * as useDataset from '@/service/knowledge/use-dataset'
import { RETRIEVE_METHOD } from '@/types/app'
import { IndexingType } from '../../../create/step-two'
import { DocumentContext } from '../context'
import EmbeddingDetail from './index'
vi.mock('@/service/datasets')
vi.mock('@/service/knowledge/use-dataset')
const mockFetchIndexingStatus = vi.mocked(datasetsService.fetchIndexingStatus)
const mockPauseDocIndexing = vi.mocked(datasetsService.pauseDocIndexing)
const mockResumeDocIndexing = vi.mocked(datasetsService.resumeDocIndexing)
const mockUseProcessRule = vi.mocked(useDataset.useProcessRule)
const createTestQueryClient = () => new QueryClient({
defaultOptions: {
queries: { retry: false, gcTime: 0 },
mutations: { retry: false },
},
})
const createWrapper = (contextValue: DocumentContextValue = { datasetId: 'ds1', documentId: 'doc1' }) => {
const queryClient = createTestQueryClient()
return ({ children }: { children: ReactNode }) => (
<QueryClientProvider client={queryClient}>
<DocumentContext.Provider value={contextValue}>
{children}
</DocumentContext.Provider>
</QueryClientProvider>
)
}
const mockIndexingStatus = (overrides: Partial<IndexingStatusResponse> = {}): IndexingStatusResponse => ({
id: 'doc1',
indexing_status: 'indexing',
completed_segments: 50,
total_segments: 100,
processing_started_at: Date.now(),
parsing_completed_at: 0,
cleaning_completed_at: 0,
splitting_completed_at: 0,
completed_at: null,
paused_at: null,
error: null,
stopped_at: null,
...overrides,
})
const mockProcessRule = (overrides: Partial<ProcessRuleResponse> = {}): ProcessRuleResponse => ({
mode: ProcessMode.general,
rules: {
segmentation: { separator: '\n', max_tokens: 500, chunk_overlap: 50 },
pre_processing_rules: [{ id: 'remove_extra_spaces', enabled: true }],
parent_mode: 'full-doc',
subchunk_segmentation: { separator: '\n', max_tokens: 200, chunk_overlap: 20 },
},
limits: { indexing_max_segmentation_tokens_length: 4000 },
...overrides,
})
describe('EmbeddingDetail', () => {
const defaultProps = {
detailUpdate: vi.fn(),
indexingType: IndexingType.QUALIFIED,
retrievalMethod: RETRIEVE_METHOD.semantic,
}
beforeEach(() => {
vi.clearAllMocks()
mockUseProcessRule.mockReturnValue({
data: mockProcessRule(),
isLoading: false,
error: null,
} as ReturnType<typeof useDataset.useProcessRule>)
})
describe('Rendering', () => {
it('should render without crashing', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/embedding\.processing/i)).toBeInTheDocument()
})
})
it('should render with provided datasetId and documentId props', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
render(
<EmbeddingDetail {...defaultProps} datasetId="custom-ds" documentId="custom-doc" />,
{ wrapper: createWrapper({ datasetId: '', documentId: '' }) },
)
await waitFor(() => {
expect(mockFetchIndexingStatus).toHaveBeenCalledWith({
datasetId: 'custom-ds',
documentId: 'custom-doc',
})
})
})
it('should fall back to context values when props are not provided', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(mockFetchIndexingStatus).toHaveBeenCalledWith({
datasetId: 'ds1',
documentId: 'doc1',
})
})
})
})
describe('Status Display', () => {
it('should show processing status when indexing', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({ indexing_status: 'indexing' }))
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/embedding\.processing/i)).toBeInTheDocument()
})
})
it('should show completed status', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({ indexing_status: 'completed' }))
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/embedding\.completed/i)).toBeInTheDocument()
})
})
it('should show paused status', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({ indexing_status: 'paused' }))
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/embedding\.paused/i)).toBeInTheDocument()
})
})
it('should show error status', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({ indexing_status: 'error' }))
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/embedding\.error/i)).toBeInTheDocument()
})
})
})
describe('Progress Display', () => {
it('should display segment progress', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({
completed_segments: 50,
total_segments: 100,
}))
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/50\/100/)).toBeInTheDocument()
expect(screen.getByText(/50%/)).toBeInTheDocument()
})
})
})
describe('Pause/Resume Actions', () => {
it('should show pause button when embedding is in progress', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({ indexing_status: 'indexing' }))
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/embedding\.pause/i)).toBeInTheDocument()
})
})
it('should show resume button when paused', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({ indexing_status: 'paused' }))
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/embedding\.resume/i)).toBeInTheDocument()
})
})
it('should call pause API when pause button is clicked', async () => {
const user = userEvent.setup()
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({ indexing_status: 'indexing' }))
mockPauseDocIndexing.mockResolvedValue({ result: 'success' })
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/embedding\.pause/i)).toBeInTheDocument()
})
await user.click(screen.getByRole('button', { name: /pause/i }))
await waitFor(() => {
expect(mockPauseDocIndexing).toHaveBeenCalledWith({
datasetId: 'ds1',
documentId: 'doc1',
})
})
})
it('should call resume API when resume button is clicked', async () => {
const user = userEvent.setup()
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus({ indexing_status: 'paused' }))
mockResumeDocIndexing.mockResolvedValue({ result: 'success' })
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/embedding\.resume/i)).toBeInTheDocument()
})
await user.click(screen.getByRole('button', { name: /resume/i }))
await waitFor(() => {
expect(mockResumeDocIndexing).toHaveBeenCalledWith({
datasetId: 'ds1',
documentId: 'doc1',
})
})
})
})
describe('Rule Detail', () => {
it('should display rule detail section', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
await waitFor(() => {
expect(screen.getByText(/stepTwo\.indexMode/i)).toBeInTheDocument()
})
})
it('should display qualified index mode', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
render(
<EmbeddingDetail {...defaultProps} indexingType={IndexingType.QUALIFIED} />,
{ wrapper: createWrapper() },
)
await waitFor(() => {
expect(screen.getByText(/stepTwo\.qualified/i)).toBeInTheDocument()
})
})
it('should display economical index mode', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
render(
<EmbeddingDetail {...defaultProps} indexingType={IndexingType.ECONOMICAL} />,
{ wrapper: createWrapper() },
)
await waitFor(() => {
expect(screen.getByText(/stepTwo\.economical/i)).toBeInTheDocument()
})
})
})
describe('detailUpdate Callback', () => {
it('should call detailUpdate when status becomes terminal', async () => {
const detailUpdate = vi.fn()
// First call returns indexing, subsequent call returns completed
mockFetchIndexingStatus
.mockResolvedValueOnce(mockIndexingStatus({ indexing_status: 'indexing' }))
.mockResolvedValueOnce(mockIndexingStatus({ indexing_status: 'completed' }))
render(
<EmbeddingDetail {...defaultProps} detailUpdate={detailUpdate} />,
{ wrapper: createWrapper() },
)
// Wait for the terminal status to trigger detailUpdate
await waitFor(() => {
expect(mockFetchIndexingStatus).toHaveBeenCalled()
}, { timeout: 5000 })
})
})
describe('Edge Cases', () => {
it('should handle missing context values', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
render(
<EmbeddingDetail {...defaultProps} datasetId="explicit-ds" documentId="explicit-doc" />,
{ wrapper: createWrapper({ datasetId: undefined, documentId: undefined }) },
)
await waitFor(() => {
expect(mockFetchIndexingStatus).toHaveBeenCalledWith({
datasetId: 'explicit-ds',
documentId: 'explicit-doc',
})
})
})
it('should render skeleton component', async () => {
mockFetchIndexingStatus.mockResolvedValue(mockIndexingStatus())
const { container } = render(<EmbeddingDetail {...defaultProps} />, { wrapper: createWrapper() })
// EmbeddingSkeleton should be rendered - check for the skeleton wrapper element
await waitFor(() => {
const skeletonWrapper = container.querySelector('.bg-dataset-chunk-list-mask-bg')
expect(skeletonWrapper).toBeInTheDocument()
})
})
})
})

View File

@ -1,18 +1,31 @@
import type { FC } from 'react'
import type { IndexingType } from '../../../create/step-two'
import type { RETRIEVE_METHOD } from '@/types/app'
import type { CommonResponse } from '@/models/common'
import type { IndexingStatusResponse, ProcessRuleResponse } from '@/models/datasets'
import { RiLoader2Line, RiPauseCircleLine, RiPlayCircleLine } from '@remixicon/react'
import Image from 'next/image'
import * as React from 'react'
import { useCallback } from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import Divider from '@/app/components/base/divider'
import { ToastContext } from '@/app/components/base/toast'
import { ProcessMode } from '@/models/datasets'
import {
fetchIndexingStatus as doFetchIndexingStatus,
pauseDocIndexing,
resumeDocIndexing,
} from '@/service/datasets'
import { useProcessRule } from '@/service/knowledge/use-dataset'
import { RETRIEVE_METHOD } from '@/types/app'
import { asyncRunSafe, sleep } from '@/utils'
import { cn } from '@/utils/classnames'
import { indexMethodIcon, retrievalIcon } from '../../../create/icons'
import { IndexingType } from '../../../create/step-two'
import { useDocumentContext } from '../context'
import { ProgressBar, RuleDetail, SegmentProgress, StatusHeader } from './components'
import { useEmbeddingStatus, usePauseIndexing, useResumeIndexing } from './hooks'
import { FieldInfo } from '../metadata'
import EmbeddingSkeleton from './skeleton'
type EmbeddingDetailProps = {
type IEmbeddingDetailProps = {
datasetId?: string
documentId?: string
indexingType?: IndexingType
@ -20,7 +33,128 @@ type EmbeddingDetailProps = {
detailUpdate: VoidFunction
}
const EmbeddingDetail: FC<EmbeddingDetailProps> = ({
type IRuleDetailProps = {
sourceData?: ProcessRuleResponse
indexingType?: IndexingType
retrievalMethod?: RETRIEVE_METHOD
}
const RuleDetail: FC<IRuleDetailProps> = React.memo(({
sourceData,
indexingType,
retrievalMethod,
}) => {
const { t } = useTranslation()
const segmentationRuleMap = {
mode: t('embedding.mode', { ns: 'datasetDocuments' }),
segmentLength: t('embedding.segmentLength', { ns: 'datasetDocuments' }),
textCleaning: t('embedding.textCleaning', { ns: 'datasetDocuments' }),
}
const getRuleName = (key: string) => {
if (key === 'remove_extra_spaces')
return t('stepTwo.removeExtraSpaces', { ns: 'datasetCreation' })
if (key === 'remove_urls_emails')
return t('stepTwo.removeUrlEmails', { ns: 'datasetCreation' })
if (key === 'remove_stopwords')
return t('stepTwo.removeStopwords', { ns: 'datasetCreation' })
}
const isNumber = (value: unknown) => {
return typeof value === 'number'
}
const getValue = useCallback((field: string) => {
let value: string | number | undefined = '-'
const maxTokens = isNumber(sourceData?.rules?.segmentation?.max_tokens)
? sourceData.rules.segmentation.max_tokens
: value
const childMaxTokens = isNumber(sourceData?.rules?.subchunk_segmentation?.max_tokens)
? sourceData.rules.subchunk_segmentation.max_tokens
: value
switch (field) {
case 'mode':
value = !sourceData?.mode
? value
: sourceData.mode === ProcessMode.general
? (t('embedding.custom', { ns: 'datasetDocuments' }) as string)
: `${t('embedding.hierarchical', { ns: 'datasetDocuments' })} · ${sourceData?.rules?.parent_mode === 'paragraph'
? t('parentMode.paragraph', { ns: 'dataset' })
: t('parentMode.fullDoc', { ns: 'dataset' })}`
break
case 'segmentLength':
value = !sourceData?.mode
? value
: sourceData.mode === ProcessMode.general
? maxTokens
: `${t('embedding.parentMaxTokens', { ns: 'datasetDocuments' })} ${maxTokens}; ${t('embedding.childMaxTokens', { ns: 'datasetDocuments' })} ${childMaxTokens}`
break
default:
value = !sourceData?.mode
? value
: sourceData?.rules?.pre_processing_rules?.filter(rule =>
rule.enabled).map(rule => getRuleName(rule.id)).join(',')
break
}
return value
}, [sourceData])
return (
<div className="py-3">
<div className="flex flex-col gap-y-1">
{Object.keys(segmentationRuleMap).map((field) => {
return (
<FieldInfo
key={field}
label={segmentationRuleMap[field as keyof typeof segmentationRuleMap]}
displayedValue={String(getValue(field))}
/>
)
})}
</div>
<Divider type="horizontal" className="bg-divider-subtle" />
<FieldInfo
label={t('stepTwo.indexMode', { ns: 'datasetCreation' })}
displayedValue={t(`stepTwo.${indexingType === IndexingType.ECONOMICAL ? 'economical' : 'qualified'}`, { ns: 'datasetCreation' }) as string}
valueIcon={(
<Image
className="size-4"
src={
indexingType === IndexingType.ECONOMICAL
? indexMethodIcon.economical
: indexMethodIcon.high_quality
}
alt=""
/>
)}
/>
<FieldInfo
label={t('form.retrievalSetting.title', { ns: 'datasetSettings' })}
displayedValue={t(`retrieval.${indexingType === IndexingType.ECONOMICAL ? 'keyword_search' : retrievalMethod ?? 'semantic_search'}.title`, { ns: 'dataset' })}
valueIcon={(
<Image
className="size-4"
src={
retrievalMethod === RETRIEVE_METHOD.fullText
? retrievalIcon.fullText
: retrievalMethod === RETRIEVE_METHOD.hybrid
? retrievalIcon.hybrid
: retrievalIcon.vector
}
alt=""
/>
)}
/>
</div>
)
})
RuleDetail.displayName = 'RuleDetail'
const EmbeddingDetail: FC<IEmbeddingDetailProps> = ({
datasetId: dstId,
documentId: docId,
detailUpdate,
@ -30,95 +164,144 @@ const EmbeddingDetail: FC<EmbeddingDetailProps> = ({
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const contextDatasetId = useDocumentContext(s => s.datasetId)
const contextDocumentId = useDocumentContext(s => s.documentId)
const datasetId = dstId ?? contextDatasetId
const documentId = docId ?? contextDocumentId
const datasetId = useDocumentContext(s => s.datasetId)
const documentId = useDocumentContext(s => s.documentId)
const localDatasetId = dstId ?? datasetId
const localDocumentId = docId ?? documentId
const {
data: indexingStatus,
isEmbedding,
isCompleted,
isPaused,
isError,
percent,
resetStatus,
refetch,
} = useEmbeddingStatus({
datasetId,
documentId,
onComplete: detailUpdate,
})
const [indexingStatusDetail, setIndexingStatusDetail] = useState<IndexingStatusResponse | null>(null)
const fetchIndexingStatus = async () => {
const status = await doFetchIndexingStatus({ datasetId: localDatasetId, documentId: localDocumentId })
setIndexingStatusDetail(status)
return status
}
const { data: ruleDetail } = useProcessRule(documentId)
const isStopQuery = useRef(false)
const stopQueryStatus = useCallback(() => {
isStopQuery.current = true
}, [])
const handleSuccess = useCallback(() => {
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
}, [notify, t])
const startQueryStatus = useCallback(async () => {
if (isStopQuery.current)
return
const handleError = useCallback(() => {
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
}, [notify, t])
try {
const indexingStatusDetail = await fetchIndexingStatus()
if (['completed', 'error', 'paused'].includes(indexingStatusDetail?.indexing_status)) {
stopQueryStatus()
detailUpdate()
return
}
const pauseMutation = usePauseIndexing({
datasetId,
documentId,
onSuccess: () => {
handleSuccess()
resetStatus()
},
onError: handleError,
})
await sleep(2500)
await startQueryStatus()
}
catch {
await sleep(2500)
await startQueryStatus()
}
}, [stopQueryStatus])
const resumeMutation = useResumeIndexing({
datasetId,
documentId,
onSuccess: () => {
handleSuccess()
refetch()
detailUpdate()
},
onError: handleError,
})
useEffect(() => {
isStopQuery.current = false
startQueryStatus()
return () => {
stopQueryStatus()
}
}, [startQueryStatus, stopQueryStatus])
const handlePause = useCallback(() => {
pauseMutation.mutate()
}, [pauseMutation])
const { data: ruleDetail } = useProcessRule(localDocumentId)
const handleResume = useCallback(() => {
resumeMutation.mutate()
}, [resumeMutation])
const isEmbedding = useMemo(() => ['indexing', 'splitting', 'parsing', 'cleaning'].includes(indexingStatusDetail?.indexing_status || ''), [indexingStatusDetail])
const isEmbeddingCompleted = useMemo(() => ['completed'].includes(indexingStatusDetail?.indexing_status || ''), [indexingStatusDetail])
const isEmbeddingPaused = useMemo(() => ['paused'].includes(indexingStatusDetail?.indexing_status || ''), [indexingStatusDetail])
const isEmbeddingError = useMemo(() => ['error'].includes(indexingStatusDetail?.indexing_status || ''), [indexingStatusDetail])
const percent = useMemo(() => {
const completedCount = indexingStatusDetail?.completed_segments || 0
const totalCount = indexingStatusDetail?.total_segments || 0
if (totalCount === 0)
return 0
const percent = Math.round(completedCount * 100 / totalCount)
return percent > 100 ? 100 : percent
}, [indexingStatusDetail])
const handleSwitch = async () => {
const opApi = isEmbedding ? pauseDocIndexing : resumeDocIndexing
const [e] = await asyncRunSafe<CommonResponse>(opApi({ datasetId: localDatasetId, documentId: localDocumentId }) as Promise<CommonResponse>)
if (!e) {
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
// if the embedding is resumed from paused, we need to start the query status
if (isEmbeddingPaused) {
isStopQuery.current = false
startQueryStatus()
detailUpdate()
}
setIndexingStatusDetail(null)
}
else {
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
}
}
return (
<>
<div className="flex flex-col gap-y-2 px-16 py-12">
<StatusHeader
isEmbedding={isEmbedding}
isCompleted={isCompleted}
isPaused={isPaused}
isError={isError}
onPause={handlePause}
onResume={handleResume}
isPauseLoading={pauseMutation.isPending}
isResumeLoading={resumeMutation.isPending}
/>
<ProgressBar
percent={percent}
isEmbedding={isEmbedding}
isCompleted={isCompleted}
isPaused={isPaused}
isError={isError}
/>
<SegmentProgress
completedSegments={indexingStatus?.completed_segments}
totalSegments={indexingStatus?.total_segments}
percent={percent}
/>
<RuleDetail
sourceData={ruleDetail}
indexingType={indexingType}
retrievalMethod={retrievalMethod}
/>
<div className="flex h-6 items-center gap-x-1">
{isEmbedding && <RiLoader2Line className="h-4 w-4 animate-spin text-text-secondary" />}
<span className="system-md-semibold-uppercase grow text-text-secondary">
{isEmbedding && t('embedding.processing', { ns: 'datasetDocuments' })}
{isEmbeddingCompleted && t('embedding.completed', { ns: 'datasetDocuments' })}
{isEmbeddingPaused && t('embedding.paused', { ns: 'datasetDocuments' })}
{isEmbeddingError && t('embedding.error', { ns: 'datasetDocuments' })}
</span>
{isEmbedding && (
<button
type="button"
className={`flex items-center gap-x-1 rounded-md border-[0.5px]
border-components-button-secondary-border bg-components-button-secondary-bg px-1.5 py-1 shadow-xs shadow-shadow-shadow-3 backdrop-blur-[5px]`}
onClick={handleSwitch}
>
<RiPauseCircleLine className="h-3.5 w-3.5 text-components-button-secondary-text" />
<span className="system-xs-medium pr-[3px] text-components-button-secondary-text">
{t('embedding.pause', { ns: 'datasetDocuments' })}
</span>
</button>
)}
{isEmbeddingPaused && (
<button
type="button"
className={`flex items-center gap-x-1 rounded-md border-[0.5px]
border-components-button-secondary-border bg-components-button-secondary-bg px-1.5 py-1 shadow-xs shadow-shadow-shadow-3 backdrop-blur-[5px]`}
onClick={handleSwitch}
>
<RiPlayCircleLine className="h-3.5 w-3.5 text-components-button-secondary-text" />
<span className="system-xs-medium pr-[3px] text-components-button-secondary-text">
{t('embedding.resume', { ns: 'datasetDocuments' })}
</span>
</button>
)}
</div>
{/* progress bar */}
<div className={cn(
'flex h-2 w-full items-center overflow-hidden rounded-md border border-components-progress-bar-border',
isEmbedding ? 'bg-components-progress-bar-bg/50' : 'bg-components-progress-bar-bg',
)}
>
<div
className={cn(
'h-full',
(isEmbedding || isEmbeddingCompleted) && 'bg-components-progress-bar-progress-solid',
(isEmbeddingPaused || isEmbeddingError) && 'bg-components-progress-bar-progress-highlight',
)}
style={{ width: `${percent}%` }}
/>
</div>
<div className="flex w-full items-center">
<span className="system-xs-medium text-text-secondary">
{`${t('embedding.segments', { ns: 'datasetDocuments' })} ${indexingStatusDetail?.completed_segments || '--'}/${indexingStatusDetail?.total_segments || '--'} · ${percent}%`}
</span>
</div>
<RuleDetail sourceData={ruleDetail} indexingType={indexingType} retrievalMethod={retrievalMethod} />
</div>
<EmbeddingSkeleton />
</>

View File

@ -6,13 +6,6 @@ import { ChunkingMode, DatasetPermission, DataSourceType } from '@/models/datase
import { RETRIEVE_METHOD } from '@/types/app'
import DatasetCardHeader from './dataset-card-header'
// Mock AppIcon component to avoid emoji-mart initialization issues
vi.mock('@/app/components/base/app-icon', () => ({
default: ({ icon, className }: { icon?: string, className?: string }) => (
<div data-testid="app-icon" className={className}>{icon}</div>
),
}))
// Mock useFormatTimeFromNow hook
vi.mock('@/hooks/use-format-time-from-now', () => ({
useFormatTimeFromNow: () => ({

View File

@ -19,28 +19,6 @@ vi.mock('../../../rename-modal', () => ({
),
}))
// Mock Confirm component since it uses createPortal which can cause issues in tests
vi.mock('@/app/components/base/confirm', () => ({
default: ({ isShow, title, content, onConfirm, onCancel }: {
isShow: boolean
title: string
content?: React.ReactNode
onConfirm: () => void
onCancel: () => void
}) => (
isShow
? (
<div data-testid="confirm-modal">
<div data-testid="confirm-title">{title}</div>
<div data-testid="confirm-content">{content}</div>
<button onClick={onCancel} role="button" aria-label="cancel">Cancel</button>
<button onClick={onConfirm} role="button" aria-label="confirm">Confirm</button>
</div>
)
: null
),
}))
describe('DatasetCardModals', () => {
const mockDataset: DataSet = {
id: 'dataset-1',
@ -194,9 +172,11 @@ describe('DatasetCardModals', () => {
/>,
)
// Find and click the confirm button using our mocked Confirm component
const confirmButton = screen.getByRole('button', { name: /confirm/i })
fireEvent.click(confirmButton)
// Find and click the confirm button
const confirmButton = screen.getByRole('button', { name: /confirm|ok|delete/i })
|| screen.getAllByRole('button').find(btn => btn.textContent?.toLowerCase().includes('confirm'))
if (confirmButton)
fireEvent.click(confirmButton)
expect(onConfirmDelete).toHaveBeenCalledTimes(1)
})

Some files were not shown because too many files have changed in this diff Show More