From 7252ce6f26aee186d35692079944cfe751527ead Mon Sep 17 00:00:00 2001 From: Stephen Zhou Date: Thu, 5 Mar 2026 01:09:32 +0800 Subject: [PATCH 1/9] chore: add react refresh plugin for vinext (#32996) --- web/vite.config.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/web/vite.config.ts b/web/vite.config.ts index 9cfa6a8e7b..152df913f8 100644 --- a/web/vite.config.ts +++ b/web/vite.config.ts @@ -91,6 +91,7 @@ export default defineConfig(({ mode }) => { : [ createCodeInspectorPlugin(), createForceInspectorClientInjectionPlugin(), + react(), vinext(), ], resolve: { From df3c66a8acbfaae37f0697c6294b5b5f3f6ffe04 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9C=A8=E4=B9=8B=E6=9C=AC=E6=BE=AA?= Date: Thu, 5 Mar 2026 01:35:24 +0800 Subject: [PATCH 2/9] test: migrate duplicate_document_indexing_task SQL tests to testcontainers (#32540) Co-authored-by: KinomotoMio <200703522+KinomotoMio@users.noreply.github.com> --- .../test_duplicate_document_indexing_task.py | 100 ++++- .../test_duplicate_document_indexing_task.py | 393 +----------------- 2 files changed, 97 insertions(+), 396 deletions(-) diff --git a/api/tests/test_containers_integration_tests/tasks/test_duplicate_document_indexing_task.py b/api/tests/test_containers_integration_tests/tasks/test_duplicate_document_indexing_task.py index b2e1ce3b89..c61e37b1e9 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_duplicate_document_indexing_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_duplicate_document_indexing_task.py @@ -3,6 +3,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker +from core.indexing_runner import DocumentIsPausedError from enums.cloud_plan import CloudPlan from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment @@ -282,7 +283,7 @@ class TestDuplicateDocumentIndexingTasks: return dataset, documents - def test_duplicate_document_indexing_task_success( + def _test_duplicate_document_indexing_task_success( self, db_session_with_containers, mock_external_service_dependencies ): """ @@ -324,7 +325,7 @@ class TestDuplicateDocumentIndexingTasks: processed_documents = call_args[0][0] # First argument should be documents list assert len(processed_documents) == 3 - def test_duplicate_document_indexing_task_with_segment_cleanup( + def _test_duplicate_document_indexing_task_with_segment_cleanup( self, db_session_with_containers, mock_external_service_dependencies ): """ @@ -374,7 +375,7 @@ class TestDuplicateDocumentIndexingTasks: mock_external_service_dependencies["indexing_runner"].assert_called_once() mock_external_service_dependencies["indexing_runner_instance"].run.assert_called_once() - def test_duplicate_document_indexing_task_dataset_not_found( + def _test_duplicate_document_indexing_task_dataset_not_found( self, db_session_with_containers, mock_external_service_dependencies ): """ @@ -445,7 +446,7 @@ class TestDuplicateDocumentIndexingTasks: processed_documents = call_args[0][0] # First argument should be documents list assert len(processed_documents) == 2 # Only existing documents - def test_duplicate_document_indexing_task_indexing_runner_exception( + def _test_duplicate_document_indexing_task_indexing_runner_exception( self, db_session_with_containers, mock_external_service_dependencies ): """ @@ -486,7 +487,7 @@ class TestDuplicateDocumentIndexingTasks: assert updated_document.indexing_status == "parsing" assert updated_document.processing_started_at is not None - def test_duplicate_document_indexing_task_billing_sandbox_plan_batch_limit( + def _test_duplicate_document_indexing_task_billing_sandbox_plan_batch_limit( self, db_session_with_containers, mock_external_service_dependencies ): """ @@ -549,7 +550,7 @@ class TestDuplicateDocumentIndexingTasks: # Verify indexing runner was not called due to early validation error mock_external_service_dependencies["indexing_runner_instance"].run.assert_not_called() - def test_duplicate_document_indexing_task_billing_vector_space_limit_exceeded( + def _test_duplicate_document_indexing_task_billing_vector_space_limit_exceeded( self, db_session_with_containers, mock_external_service_dependencies ): """ @@ -783,3 +784,90 @@ class TestDuplicateDocumentIndexingTasks: document_ids=document_ids, ) mock_queue.delete_task_key.assert_not_called() + + def test_successful_duplicate_document_indexing( + self, db_session_with_containers, mock_external_service_dependencies + ): + """Test successful duplicate document indexing flow.""" + self._test_duplicate_document_indexing_task_success( + db_session_with_containers, mock_external_service_dependencies + ) + + def test_duplicate_document_indexing_dataset_not_found( + self, db_session_with_containers, mock_external_service_dependencies + ): + """Test duplicate document indexing when dataset is not found.""" + self._test_duplicate_document_indexing_task_dataset_not_found( + db_session_with_containers, mock_external_service_dependencies + ) + + def test_duplicate_document_indexing_with_billing_enabled_sandbox_plan( + self, db_session_with_containers, mock_external_service_dependencies + ): + """Test duplicate document indexing with billing enabled and sandbox plan.""" + self._test_duplicate_document_indexing_task_billing_sandbox_plan_batch_limit( + db_session_with_containers, mock_external_service_dependencies + ) + + def test_duplicate_document_indexing_with_billing_limit_exceeded( + self, db_session_with_containers, mock_external_service_dependencies + ): + """Test duplicate document indexing when billing limit is exceeded.""" + self._test_duplicate_document_indexing_task_billing_vector_space_limit_exceeded( + db_session_with_containers, mock_external_service_dependencies + ) + + def test_duplicate_document_indexing_runner_error( + self, db_session_with_containers, mock_external_service_dependencies + ): + """Test duplicate document indexing when IndexingRunner raises an error.""" + self._test_duplicate_document_indexing_task_indexing_runner_exception( + db_session_with_containers, mock_external_service_dependencies + ) + + def _test_duplicate_document_indexing_task_document_is_paused( + self, db_session_with_containers, mock_external_service_dependencies + ): + """Test duplicate document indexing when document is paused.""" + # Arrange + dataset, documents = self._create_test_dataset_and_documents( + db_session_with_containers, mock_external_service_dependencies, document_count=2 + ) + for document in documents: + document.is_paused = True + db_session_with_containers.add(document) + db_session_with_containers.commit() + + document_ids = [doc.id for doc in documents] + mock_external_service_dependencies["indexing_runner_instance"].run.side_effect = DocumentIsPausedError( + "Document paused" + ) + + # Act + _duplicate_document_indexing_task(dataset.id, document_ids) + db_session_with_containers.expire_all() + + # Assert + for doc_id in document_ids: + updated_document = db_session_with_containers.query(Document).where(Document.id == doc_id).first() + assert updated_document.is_paused is True + assert updated_document.indexing_status == "parsing" + assert updated_document.display_status == "paused" + assert updated_document.processing_started_at is not None + mock_external_service_dependencies["indexing_runner_instance"].run.assert_called_once() + + def test_duplicate_document_indexing_document_is_paused( + self, db_session_with_containers, mock_external_service_dependencies + ): + """Test duplicate document indexing when document is paused.""" + self._test_duplicate_document_indexing_task_document_is_paused( + db_session_with_containers, mock_external_service_dependencies + ) + + def test_duplicate_document_indexing_cleans_old_segments( + self, db_session_with_containers, mock_external_service_dependencies + ): + """Test that duplicate document indexing cleans old segments.""" + self._test_duplicate_document_indexing_task_with_segment_cleanup( + db_session_with_containers, mock_external_service_dependencies + ) diff --git a/api/tests/unit_tests/tasks/test_duplicate_document_indexing_task.py b/api/tests/unit_tests/tasks/test_duplicate_document_indexing_task.py index 68fb8b748f..f6dbc4275b 100644 --- a/api/tests/unit_tests/tasks/test_duplicate_document_indexing_task.py +++ b/api/tests/unit_tests/tasks/test_duplicate_document_indexing_task.py @@ -1,158 +1,38 @@ -""" -Unit tests for duplicate document indexing tasks. - -This module tests the duplicate document indexing task functionality including: -- Task enqueuing to different queues (normal, priority, tenant-isolated) -- Batch processing of multiple duplicate documents -- Progress tracking through task lifecycle -- Error handling and retry mechanisms -- Cleanup of old document data before re-indexing -""" +"""Unit tests for queue/wrapper behaviors in duplicate document indexing tasks (non-database logic).""" import uuid -from unittest.mock import MagicMock, Mock, patch +from unittest.mock import Mock, patch import pytest -from core.indexing_runner import DocumentIsPausedError, IndexingRunner from core.rag.pipeline.queue import TenantIsolatedTaskQueue -from enums.cloud_plan import CloudPlan -from models.dataset import Dataset, Document, DocumentSegment from tasks.duplicate_document_indexing_task import ( - _duplicate_document_indexing_task, _duplicate_document_indexing_task_with_tenant_queue, duplicate_document_indexing_task, normal_duplicate_document_indexing_task, priority_duplicate_document_indexing_task, ) -# ============================================================================ -# Fixtures -# ============================================================================ - @pytest.fixture def tenant_id(): - """Generate a unique tenant ID for testing.""" return str(uuid.uuid4()) @pytest.fixture def dataset_id(): - """Generate a unique dataset ID for testing.""" return str(uuid.uuid4()) @pytest.fixture def document_ids(): - """Generate a list of document IDs for testing.""" return [str(uuid.uuid4()) for _ in range(3)] -@pytest.fixture -def mock_dataset(dataset_id, tenant_id): - """Create a mock Dataset object.""" - dataset = Mock(spec=Dataset) - dataset.id = dataset_id - dataset.tenant_id = tenant_id - dataset.indexing_technique = "high_quality" - dataset.embedding_model_provider = "openai" - dataset.embedding_model = "text-embedding-ada-002" - return dataset - - -@pytest.fixture -def mock_documents(document_ids, dataset_id): - """Create mock Document objects.""" - documents = [] - for doc_id in document_ids: - doc = Mock(spec=Document) - doc.id = doc_id - doc.dataset_id = dataset_id - doc.indexing_status = "waiting" - doc.error = None - doc.stopped_at = None - doc.processing_started_at = None - doc.doc_form = "text_model" - documents.append(doc) - return documents - - -@pytest.fixture -def mock_document_segments(document_ids): - """Create mock DocumentSegment objects.""" - segments = [] - for doc_id in document_ids: - for i in range(3): - segment = Mock(spec=DocumentSegment) - segment.id = str(uuid.uuid4()) - segment.document_id = doc_id - segment.index_node_id = f"node-{doc_id}-{i}" - segments.append(segment) - return segments - - -@pytest.fixture -def mock_db_session(): - """Mock database session via session_factory.create_session().""" - with patch("tasks.duplicate_document_indexing_task.session_factory", autospec=True) as mock_sf: - session = MagicMock() - # Allow tests to observe session.close() via context manager teardown - session.close = MagicMock() - cm = MagicMock() - cm.__enter__.return_value = session - - def _exit_side_effect(*args, **kwargs): - session.close() - - cm.__exit__.side_effect = _exit_side_effect - mock_sf.create_session.return_value = cm - - query = MagicMock() - session.query.return_value = query - query.where.return_value = query - session.scalars.return_value = MagicMock() - yield session - - -@pytest.fixture -def mock_indexing_runner(): - """Mock IndexingRunner.""" - with patch("tasks.duplicate_document_indexing_task.IndexingRunner", autospec=True) as mock_runner_class: - mock_runner = MagicMock(spec=IndexingRunner) - mock_runner_class.return_value = mock_runner - yield mock_runner - - -@pytest.fixture -def mock_feature_service(): - """Mock FeatureService.""" - with patch("tasks.duplicate_document_indexing_task.FeatureService", autospec=True) as mock_service: - mock_features = Mock() - mock_features.billing = Mock() - mock_features.billing.enabled = False - mock_features.vector_space = Mock() - mock_features.vector_space.size = 0 - mock_features.vector_space.limit = 1000 - mock_service.get_features.return_value = mock_features - yield mock_service - - -@pytest.fixture -def mock_index_processor_factory(): - """Mock IndexProcessorFactory.""" - with patch("tasks.duplicate_document_indexing_task.IndexProcessorFactory", autospec=True) as mock_factory: - mock_processor = MagicMock() - mock_processor.clean = Mock() - mock_factory.return_value.init_index_processor.return_value = mock_processor - yield mock_factory - - @pytest.fixture def mock_tenant_isolated_queue(): - """Mock TenantIsolatedTaskQueue.""" with patch("tasks.duplicate_document_indexing_task.TenantIsolatedTaskQueue", autospec=True) as mock_queue_class: - mock_queue = MagicMock(spec=TenantIsolatedTaskQueue) + mock_queue = Mock(spec=TenantIsolatedTaskQueue) mock_queue.pull_tasks.return_value = [] mock_queue.delete_task_key = Mock() mock_queue.set_task_waiting_time = Mock() @@ -160,11 +40,6 @@ def mock_tenant_isolated_queue(): yield mock_queue -# ============================================================================ -# Tests for deprecated duplicate_document_indexing_task -# ============================================================================ - - class TestDuplicateDocumentIndexingTask: """Tests for the deprecated duplicate_document_indexing_task function.""" @@ -190,258 +65,6 @@ class TestDuplicateDocumentIndexingTask: mock_core_func.assert_called_once_with(dataset_id, document_ids) -# ============================================================================ -# Tests for _duplicate_document_indexing_task core function -# ============================================================================ - - -class TestDuplicateDocumentIndexingTaskCore: - """Tests for the _duplicate_document_indexing_task core function.""" - - def test_successful_duplicate_document_indexing( - self, - mock_db_session, - mock_indexing_runner, - mock_feature_service, - mock_index_processor_factory, - mock_dataset, - mock_documents, - mock_document_segments, - dataset_id, - document_ids, - ): - """Test successful duplicate document indexing flow.""" - # Arrange - # Dataset via query.first() - mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset - # scalars() call sequence: - # 1) documents list - # 2..N) segments per document - - def _scalars_side_effect(*args, **kwargs): - m = MagicMock() - # First call returns documents; subsequent calls return segments - if not hasattr(_scalars_side_effect, "_calls"): - _scalars_side_effect._calls = 0 - if _scalars_side_effect._calls == 0: - m.all.return_value = mock_documents - else: - m.all.return_value = mock_document_segments - _scalars_side_effect._calls += 1 - return m - - mock_db_session.scalars.side_effect = _scalars_side_effect - - # Act - _duplicate_document_indexing_task(dataset_id, document_ids) - - # Assert - # Verify IndexingRunner was called - mock_indexing_runner.run.assert_called_once() - - # Verify all documents were set to parsing status - for doc in mock_documents: - assert doc.indexing_status == "parsing" - assert doc.processing_started_at is not None - - # Verify session operations - assert mock_db_session.commit.called - assert mock_db_session.close.called - - def test_duplicate_document_indexing_dataset_not_found(self, mock_db_session, dataset_id, document_ids): - """Test duplicate document indexing when dataset is not found.""" - # Arrange - mock_db_session.query.return_value.where.return_value.first.return_value = None - - # Act - _duplicate_document_indexing_task(dataset_id, document_ids) - - # Assert - # Should close the session at least once - assert mock_db_session.close.called - - def test_duplicate_document_indexing_with_billing_enabled_sandbox_plan( - self, - mock_db_session, - mock_feature_service, - mock_dataset, - dataset_id, - document_ids, - ): - """Test duplicate document indexing with billing enabled and sandbox plan.""" - # Arrange - mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset - mock_features = mock_feature_service.get_features.return_value - mock_features.billing.enabled = True - mock_features.billing.subscription.plan = CloudPlan.SANDBOX - - # Act - _duplicate_document_indexing_task(dataset_id, document_ids) - - # Assert - # For sandbox plan with multiple documents, should fail - mock_db_session.commit.assert_called() - - def test_duplicate_document_indexing_with_billing_limit_exceeded( - self, - mock_db_session, - mock_feature_service, - mock_dataset, - mock_documents, - dataset_id, - document_ids, - ): - """Test duplicate document indexing when billing limit is exceeded.""" - # Arrange - mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset - # First scalars() -> documents; subsequent -> empty segments - - def _scalars_side_effect(*args, **kwargs): - m = MagicMock() - if not hasattr(_scalars_side_effect, "_calls"): - _scalars_side_effect._calls = 0 - if _scalars_side_effect._calls == 0: - m.all.return_value = mock_documents - else: - m.all.return_value = [] - _scalars_side_effect._calls += 1 - return m - - mock_db_session.scalars.side_effect = _scalars_side_effect - mock_features = mock_feature_service.get_features.return_value - mock_features.billing.enabled = True - mock_features.billing.subscription.plan = CloudPlan.TEAM - mock_features.vector_space.size = 990 - mock_features.vector_space.limit = 1000 - - # Act - _duplicate_document_indexing_task(dataset_id, document_ids) - - # Assert - # Should commit the session - assert mock_db_session.commit.called - # Should close the session - assert mock_db_session.close.called - - def test_duplicate_document_indexing_runner_error( - self, - mock_db_session, - mock_indexing_runner, - mock_feature_service, - mock_index_processor_factory, - mock_dataset, - mock_documents, - dataset_id, - document_ids, - ): - """Test duplicate document indexing when IndexingRunner raises an error.""" - # Arrange - mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset - - def _scalars_side_effect(*args, **kwargs): - m = MagicMock() - if not hasattr(_scalars_side_effect, "_calls"): - _scalars_side_effect._calls = 0 - if _scalars_side_effect._calls == 0: - m.all.return_value = mock_documents - else: - m.all.return_value = [] - _scalars_side_effect._calls += 1 - return m - - mock_db_session.scalars.side_effect = _scalars_side_effect - mock_indexing_runner.run.side_effect = Exception("Indexing error") - - # Act - _duplicate_document_indexing_task(dataset_id, document_ids) - - # Assert - # Should close the session even after error - mock_db_session.close.assert_called_once() - - def test_duplicate_document_indexing_document_is_paused( - self, - mock_db_session, - mock_indexing_runner, - mock_feature_service, - mock_index_processor_factory, - mock_dataset, - mock_documents, - dataset_id, - document_ids, - ): - """Test duplicate document indexing when document is paused.""" - # Arrange - mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset - - def _scalars_side_effect(*args, **kwargs): - m = MagicMock() - if not hasattr(_scalars_side_effect, "_calls"): - _scalars_side_effect._calls = 0 - if _scalars_side_effect._calls == 0: - m.all.return_value = mock_documents - else: - m.all.return_value = [] - _scalars_side_effect._calls += 1 - return m - - mock_db_session.scalars.side_effect = _scalars_side_effect - mock_indexing_runner.run.side_effect = DocumentIsPausedError("Document paused") - - # Act - _duplicate_document_indexing_task(dataset_id, document_ids) - - # Assert - # Should handle DocumentIsPausedError gracefully - mock_db_session.close.assert_called_once() - - def test_duplicate_document_indexing_cleans_old_segments( - self, - mock_db_session, - mock_indexing_runner, - mock_feature_service, - mock_index_processor_factory, - mock_dataset, - mock_documents, - mock_document_segments, - dataset_id, - document_ids, - ): - """Test that duplicate document indexing cleans old segments.""" - # Arrange - mock_db_session.query.return_value.where.return_value.first.return_value = mock_dataset - - def _scalars_side_effect(*args, **kwargs): - m = MagicMock() - if not hasattr(_scalars_side_effect, "_calls"): - _scalars_side_effect._calls = 0 - if _scalars_side_effect._calls == 0: - m.all.return_value = mock_documents - else: - m.all.return_value = mock_document_segments - _scalars_side_effect._calls += 1 - return m - - mock_db_session.scalars.side_effect = _scalars_side_effect - mock_processor = mock_index_processor_factory.return_value.init_index_processor.return_value - - # Act - _duplicate_document_indexing_task(dataset_id, document_ids) - - # Assert - # Verify clean was called for each document - assert mock_processor.clean.call_count == len(mock_documents) - - # Verify segments were deleted in batch (DELETE FROM document_segments) - execute_sqls = [" ".join(str(c[0][0]).split()) for c in mock_db_session.execute.call_args_list] - assert any("DELETE FROM document_segments" in sql for sql in execute_sqls) - - -# ============================================================================ -# Tests for tenant queue wrapper function -# ============================================================================ - - class TestDuplicateDocumentIndexingTaskWithTenantQueue: """Tests for _duplicate_document_indexing_task_with_tenant_queue function.""" @@ -536,11 +159,6 @@ class TestDuplicateDocumentIndexingTaskWithTenantQueue: mock_tenant_isolated_queue.pull_tasks.assert_called_once() -# ============================================================================ -# Tests for normal_duplicate_document_indexing_task -# ============================================================================ - - class TestNormalDuplicateDocumentIndexingTask: """Tests for normal_duplicate_document_indexing_task function.""" @@ -581,11 +199,6 @@ class TestNormalDuplicateDocumentIndexingTask: ) -# ============================================================================ -# Tests for priority_duplicate_document_indexing_task -# ============================================================================ - - class TestPriorityDuplicateDocumentIndexingTask: """Tests for priority_duplicate_document_indexing_task function.""" From 9c9cb509813e91e3735c828aa46133abbc6a37b3 Mon Sep 17 00:00:00 2001 From: yyh <92089059+lyzno1@users.noreply.github.com> Date: Thu, 5 Mar 2026 08:47:54 +0800 Subject: [PATCH 3/9] chore: expand overlay migration ESLint rules to popover, dropdown, dialog (#33001) --- web/docs/overlay-migration.md | 3 ++ web/eslint-suppressions.json | 54 +++++++++++++++++++++++++++++++++-- web/eslint.config.mjs | 18 ++++++++++++ 3 files changed, 72 insertions(+), 3 deletions(-) diff --git a/web/docs/overlay-migration.md b/web/docs/overlay-migration.md index fbab86a74b..3e78b1bf39 100644 --- a/web/docs/overlay-migration.md +++ b/web/docs/overlay-migration.md @@ -10,6 +10,9 @@ This document tracks the migration away from legacy overlay APIs. - `@/app/components/base/modal` - `@/app/components/base/confirm` - `@/app/components/base/select` (including `custom` / `pure`) + - `@/app/components/base/popover` + - `@/app/components/base/dropdown` + - `@/app/components/base/dialog` - Replacement primitives: - `@/app/components/base/ui/tooltip` - `@/app/components/base/ui/dropdown-menu` diff --git a/web/eslint-suppressions.json b/web/eslint-suppressions.json index 4691e6a08f..00c817eac9 100644 --- a/web/eslint-suppressions.json +++ b/web/eslint-suppressions.json @@ -275,6 +275,9 @@ } }, "app/account/(commonLayout)/delete-account/components/feed-back.tsx": { + "no-restricted-imports": { + "count": 1 + }, "tailwindcss/enforce-consistent-class-order": { "count": 1 } @@ -287,6 +290,11 @@ "count": 3 } }, + "app/account/(commonLayout)/delete-account/index.tsx": { + "no-restricted-imports": { + "count": 1 + } + }, "app/account/(commonLayout)/header.tsx": { "tailwindcss/enforce-consistent-class-order": { "count": 2 @@ -436,6 +444,9 @@ } }, "app/components/app/annotation/header-opts/index.tsx": { + "no-restricted-imports": { + "count": 1 + }, "react/no-nested-component-definitions": { "count": 1 }, @@ -965,6 +976,11 @@ "count": 1 } }, + "app/components/app/configuration/debug/debug-with-multiple-model/debug-item.tsx": { + "no-restricted-imports": { + "count": 2 + } + }, "app/components/app/configuration/debug/debug-with-multiple-model/index.spec.tsx": { "ts/no-explicit-any": { "count": 5 @@ -1375,7 +1391,7 @@ }, "app/components/apps/app-card.tsx": { "no-restricted-imports": { - "count": 1 + "count": 3 }, "react-hooks-extra/no-direct-set-state-in-use-effect": { "count": 1 @@ -2864,6 +2880,16 @@ "count": 1 } }, + "app/components/base/tag-management/panel.tsx": { + "no-restricted-imports": { + "count": 1 + } + }, + "app/components/base/tag-management/selector.tsx": { + "no-restricted-imports": { + "count": 1 + } + }, "app/components/base/tag-management/tag-item-editor.tsx": { "no-restricted-imports": { "count": 2 @@ -3182,6 +3208,11 @@ "count": 1 } }, + "app/components/datasets/create-from-pipeline/list/template-card/actions.tsx": { + "no-restricted-imports": { + "count": 1 + } + }, "app/components/datasets/create-from-pipeline/list/template-card/content.tsx": { "tailwindcss/enforce-consistent-class-order": { "count": 3 @@ -3271,7 +3302,7 @@ }, "app/components/datasets/create/step-two/components/indexing-mode-section.tsx": { "no-restricted-imports": { - "count": 1 + "count": 2 }, "tailwindcss/enforce-consistent-class-order": { "count": 8 @@ -3306,6 +3337,9 @@ } }, "app/components/datasets/create/step-two/language-select/index.tsx": { + "no-restricted-imports": { + "count": 1 + }, "tailwindcss/enforce-consistent-class-order": { "count": 2 } @@ -3439,7 +3473,7 @@ }, "app/components/datasets/documents/components/operations.tsx": { "no-restricted-imports": { - "count": 2 + "count": 3 } }, "app/components/datasets/documents/components/rename-modal.tsx": { @@ -3859,6 +3893,9 @@ } }, "app/components/datasets/documents/detail/segment-add/index.tsx": { + "no-restricted-imports": { + "count": 1 + }, "react-refresh/only-export-components": { "count": 1 }, @@ -4073,6 +4110,11 @@ "count": 1 } }, + "app/components/datasets/list/dataset-card/components/operations-popover.tsx": { + "no-restricted-imports": { + "count": 1 + } + }, "app/components/datasets/list/dataset-card/hooks/use-dataset-card-state.ts": { "react-hooks-extra/no-direct-set-state-in-use-effect": { "count": 1 @@ -4506,6 +4548,9 @@ } }, "app/components/header/account-setting/data-source-page-new/operator.tsx": { + "no-restricted-imports": { + "count": 2 + }, "tailwindcss/enforce-consistent-class-order": { "count": 5 } @@ -4810,6 +4855,9 @@ } }, "app/components/header/account-setting/model-provider-page/model-parameter-modal/presets-parameter.tsx": { + "no-restricted-imports": { + "count": 1 + }, "tailwindcss/enforce-consistent-class-order": { "count": 1 } diff --git a/web/eslint.config.mjs b/web/eslint.config.mjs index 957aa1a653..f8d2b4ca6c 100644 --- a/web/eslint.config.mjs +++ b/web/eslint.config.mjs @@ -195,6 +195,24 @@ export default antfu( '**/base/confirm/index', ], message: 'Deprecated: use @/app/components/base/ui/alert-dialog instead. See issue #32767.', + }, { + group: [ + '**/base/popover', + '**/base/popover/index', + ], + message: 'Deprecated: use @/app/components/base/ui/popover instead. See issue #32767.', + }, { + group: [ + '**/base/dropdown', + '**/base/dropdown/index', + ], + message: 'Deprecated: use @/app/components/base/ui/dropdown-menu instead. See issue #32767.', + }, { + group: [ + '**/base/dialog', + '**/base/dialog/index', + ], + message: 'Deprecated: use @/app/components/base/ui/dialog instead. See issue #32767.', }], }], }, From 914bd4d00d621f59075bee5b637cb3a132d15065 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Mar 2026 09:49:43 +0900 Subject: [PATCH 4/9] chore(deps): bump fickling from 0.1.8 to 0.1.9 in /api (#32999) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- api/uv.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/uv.lock b/api/uv.lock index 42b010286b..59944f6f31 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -1989,11 +1989,11 @@ wheels = [ [[package]] name = "fickling" -version = "0.1.8" +version = "0.1.9" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/be/cd91e3921f064230ac9462479e4647fb91a7b0d01677103fce89f52e3042/fickling-0.1.8.tar.gz", hash = "sha256:25a0bc7acda76176a9087b405b05f7f5021f76079aa26c6fe3270855ec57d9bf", size = 336756, upload-time = "2026-02-21T00:57:26.106Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/bd/ca7127df0201596b0b30f9ab3d36e565bb9d6f8f4da1560758b817e81b65/fickling-0.1.9.tar.gz", hash = "sha256:bb518c2fd833555183bc46b6903bb4022f3ae0436a69c3fb149cfc75eebaac33", size = 336940, upload-time = "2026-03-03T23:32:19.449Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/92/af72f783ac57fa2452f8f921c9441366c42ae1f03f5af41718445114c82f/fickling-0.1.8-py3-none-any.whl", hash = "sha256:97218785cfe00a93150808dcf9e3eb512371e0484e3ce0b05bc460b97240f292", size = 52613, upload-time = "2026-02-21T00:57:24.82Z" }, + { url = "https://files.pythonhosted.org/packages/92/49/c597bad508c74917901432b41ae5a8f036839a7fb8d0d29a89765f5d3643/fickling-0.1.9-py3-none-any.whl", hash = "sha256:ccc3ce3b84733406ade2fe749717f6e428047335157c6431eefd3e7e970a06d1", size = 52786, upload-time = "2026-03-03T23:32:17.533Z" }, ] [[package]] From a0331b8b45e69cd5bf30d341c13f050218bbaa9f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Mar 2026 09:50:12 +0900 Subject: [PATCH 5/9] chore(deps): bump authlib from 1.6.6 to 1.6.7 in /api (#32998) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- api/uv.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/uv.lock b/api/uv.lock index 59944f6f31..5a9ac096dc 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -441,14 +441,14 @@ wheels = [ [[package]] name = "authlib" -version = "1.6.6" +version = "1.6.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bb/9b/b1661026ff24bc641b76b78c5222d614776b0c085bcfdac9bd15a1cb4b35/authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e", size = 164894, upload-time = "2025-12-12T08:01:41.464Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/dc/ed1681bf1339dd6ea1ce56136bad4baabc6f7ad466e375810702b0237047/authlib-1.6.7.tar.gz", hash = "sha256:dbf10100011d1e1b34048c9d120e83f13b35d69a826ae762b93d2fb5aafc337b", size = 164950, upload-time = "2026-02-06T14:04:14.171Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" }, + { url = "https://files.pythonhosted.org/packages/f8/00/3ed12264094ec91f534fae429945efbaa9f8c666f3aa7061cc3b2a26a0cd/authlib-1.6.7-py2.py3-none-any.whl", hash = "sha256:c637340d9a02789d2efa1d003a7437d10d3e565237bcb5fcbc6c134c7b95bab0", size = 244115, upload-time = "2026-02-06T14:04:12.141Z" }, ] [[package]] From 2977a4d2a4c4b6d9537a543ca18f81e01012da5a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Mar 2026 09:50:52 +0900 Subject: [PATCH 6/9] chore(deps): bump immutable from 5.1.4 to 5.1.5 in /web (#33000) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- web/pnpm-lock.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 4a87d145b1..1c5347dd2e 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -2180,6 +2180,7 @@ packages: peerDependencies: react: '>=18' react-dom: '>=18' + '@pkgjs/parseargs@0.11.0': resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} @@ -9710,6 +9711,7 @@ snapshots: dependencies: react: 19.2.4 react-dom: 19.2.4(react@19.2.4) + '@pkgjs/parseargs@0.11.0': optional: true From 2b5ce196adac5a168173ce4283b20fadf1e56fe4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9C=A8=E4=B9=8B=E6=9C=AC=E6=BE=AA?= Date: Thu, 5 Mar 2026 09:07:29 +0800 Subject: [PATCH 7/9] test: migrate test_document_service_rename_document SQL tests to testcontainers (#32542) Co-authored-by: KinomotoMio <200703522+KinomotoMio@users.noreply.github.com> --- .../test_document_service_rename_document.py | 252 ++++++++++++++++++ .../test_document_service_rename_document.py | 176 ------------ 2 files changed, 252 insertions(+), 176 deletions(-) create mode 100644 api/tests/test_containers_integration_tests/services/test_document_service_rename_document.py delete mode 100644 api/tests/unit_tests/services/test_document_service_rename_document.py diff --git a/api/tests/test_containers_integration_tests/services/test_document_service_rename_document.py b/api/tests/test_containers_integration_tests/services/test_document_service_rename_document.py new file mode 100644 index 0000000000..f641da6576 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_document_service_rename_document.py @@ -0,0 +1,252 @@ +"""Container-backed integration tests for DocumentService.rename_document real SQL paths.""" + +import datetime +import json +from unittest.mock import create_autospec, patch +from uuid import uuid4 + +import pytest + +from models import Account +from models.dataset import Dataset, Document +from models.enums import CreatorUserRole +from models.model import UploadFile +from services.dataset_service import DocumentService + +FIXED_UPLOAD_CREATED_AT = datetime.datetime(2024, 1, 1, 0, 0, 0) + + +@pytest.fixture +def mock_env(): + """Patch only non-SQL dependency used by rename_document: current_user context.""" + with patch("services.dataset_service.current_user", create_autospec(Account, instance=True)) as current_user: + current_user.current_tenant_id = str(uuid4()) + current_user.id = str(uuid4()) + yield {"current_user": current_user} + + +def make_dataset(db_session_with_containers, dataset_id=None, tenant_id=None, built_in_field_enabled=False): + """Persist a dataset row for rename_document integration scenarios.""" + dataset_id = dataset_id or str(uuid4()) + tenant_id = tenant_id or str(uuid4()) + + dataset = Dataset( + tenant_id=tenant_id, + name=f"dataset-{uuid4()}", + data_source_type="upload_file", + created_by=str(uuid4()), + ) + dataset.id = dataset_id + dataset.built_in_field_enabled = built_in_field_enabled + + db_session_with_containers.add(dataset) + db_session_with_containers.commit() + return dataset + + +def make_document( + db_session_with_containers, + document_id=None, + dataset_id=None, + tenant_id=None, + name="Old Name", + data_source_info=None, + doc_metadata=None, +): + """Persist a document row used by rename_document integration scenarios.""" + document_id = document_id or str(uuid4()) + dataset_id = dataset_id or str(uuid4()) + tenant_id = tenant_id or str(uuid4()) + + doc = Document( + tenant_id=tenant_id, + dataset_id=dataset_id, + position=1, + data_source_type="upload_file", + data_source_info=json.dumps(data_source_info or {}), + batch=f"batch-{uuid4()}", + name=name, + created_from="web", + created_by=str(uuid4()), + doc_form="text_model", + ) + doc.id = document_id + doc.indexing_status = "completed" + doc.doc_metadata = dict(doc_metadata or {}) + + db_session_with_containers.add(doc) + db_session_with_containers.commit() + return doc + + +def make_upload_file(db_session_with_containers, tenant_id: str, file_id: str, name: str): + """Persist an upload file row referenced by document.data_source_info.""" + upload_file = UploadFile( + tenant_id=tenant_id, + storage_type="local", + key=f"uploads/{uuid4()}", + name=name, + size=128, + extension="pdf", + mime_type="application/pdf", + created_by_role=CreatorUserRole.ACCOUNT, + created_by=str(uuid4()), + created_at=FIXED_UPLOAD_CREATED_AT, + used=False, + ) + upload_file.id = file_id + + db_session_with_containers.add(upload_file) + db_session_with_containers.commit() + return upload_file + + +def test_rename_document_success(db_session_with_containers, mock_env): + """Rename succeeds and returns the renamed document identity by id.""" + # Arrange + dataset_id = str(uuid4()) + document_id = str(uuid4()) + new_name = "New Document Name" + dataset = make_dataset(db_session_with_containers, dataset_id, mock_env["current_user"].current_tenant_id) + document = make_document( + db_session_with_containers, + document_id=document_id, + dataset_id=dataset_id, + tenant_id=mock_env["current_user"].current_tenant_id, + ) + + # Act + result = DocumentService.rename_document(dataset.id, document_id, new_name) + + # Assert + db_session_with_containers.refresh(document) + assert result.id == document.id + assert document.name == new_name + + +def test_rename_document_with_built_in_fields(db_session_with_containers, mock_env): + """Built-in document_name metadata is updated while existing metadata keys are preserved.""" + # Arrange + dataset_id = str(uuid4()) + document_id = str(uuid4()) + new_name = "Renamed" + dataset = make_dataset( + db_session_with_containers, + dataset_id, + mock_env["current_user"].current_tenant_id, + built_in_field_enabled=True, + ) + document = make_document( + db_session_with_containers, + document_id=document_id, + dataset_id=dataset.id, + tenant_id=mock_env["current_user"].current_tenant_id, + doc_metadata={"foo": "bar"}, + ) + + # Act + DocumentService.rename_document(dataset.id, document.id, new_name) + + # Assert + db_session_with_containers.refresh(document) + assert document.name == new_name + assert document.doc_metadata["document_name"] == new_name + assert document.doc_metadata["foo"] == "bar" + + +def test_rename_document_updates_upload_file_when_present(db_session_with_containers, mock_env): + """Rename propagates to UploadFile.name when upload_file_id is present in data_source_info.""" + # Arrange + dataset_id = str(uuid4()) + document_id = str(uuid4()) + file_id = str(uuid4()) + new_name = "Renamed" + dataset = make_dataset(db_session_with_containers, dataset_id, mock_env["current_user"].current_tenant_id) + document = make_document( + db_session_with_containers, + document_id=document_id, + dataset_id=dataset.id, + tenant_id=mock_env["current_user"].current_tenant_id, + data_source_info={"upload_file_id": file_id}, + ) + upload_file = make_upload_file( + db_session_with_containers, + tenant_id=mock_env["current_user"].current_tenant_id, + file_id=file_id, + name="old.pdf", + ) + + # Act + DocumentService.rename_document(dataset.id, document.id, new_name) + + # Assert + db_session_with_containers.refresh(document) + db_session_with_containers.refresh(upload_file) + assert document.name == new_name + assert upload_file.name == new_name + + +def test_rename_document_does_not_update_upload_file_when_missing_id(db_session_with_containers, mock_env): + """Rename does not update UploadFile when data_source_info lacks upload_file_id.""" + # Arrange + dataset_id = str(uuid4()) + document_id = str(uuid4()) + new_name = "Another Name" + dataset = make_dataset(db_session_with_containers, dataset_id, mock_env["current_user"].current_tenant_id) + document = make_document( + db_session_with_containers, + document_id=document_id, + dataset_id=dataset.id, + tenant_id=mock_env["current_user"].current_tenant_id, + data_source_info={"url": "https://example.com"}, + ) + untouched_file = make_upload_file( + db_session_with_containers, + tenant_id=mock_env["current_user"].current_tenant_id, + file_id=str(uuid4()), + name="untouched.pdf", + ) + + # Act + DocumentService.rename_document(dataset.id, document.id, new_name) + + # Assert + db_session_with_containers.refresh(document) + db_session_with_containers.refresh(untouched_file) + assert document.name == new_name + assert untouched_file.name == "untouched.pdf" + + +def test_rename_document_dataset_not_found(db_session_with_containers, mock_env): + """Rename raises Dataset not found when dataset id does not exist.""" + # Arrange + missing_dataset_id = str(uuid4()) + + # Act / Assert + with pytest.raises(ValueError, match="Dataset not found"): + DocumentService.rename_document(missing_dataset_id, str(uuid4()), "x") + + +def test_rename_document_not_found(db_session_with_containers, mock_env): + """Rename raises Document not found when document id is absent in the dataset.""" + # Arrange + dataset = make_dataset(db_session_with_containers, str(uuid4()), mock_env["current_user"].current_tenant_id) + + # Act / Assert + with pytest.raises(ValueError, match="Document not found"): + DocumentService.rename_document(dataset.id, str(uuid4()), "x") + + +def test_rename_document_permission_denied_when_tenant_mismatch(db_session_with_containers, mock_env): + """Rename raises No permission when document tenant differs from current_user tenant.""" + # Arrange + dataset = make_dataset(db_session_with_containers, str(uuid4()), mock_env["current_user"].current_tenant_id) + document = make_document( + db_session_with_containers, + dataset_id=dataset.id, + tenant_id=str(uuid4()), + ) + + # Act / Assert + with pytest.raises(ValueError, match="No permission"): + DocumentService.rename_document(dataset.id, document.id, "x") diff --git a/api/tests/unit_tests/services/test_document_service_rename_document.py b/api/tests/unit_tests/services/test_document_service_rename_document.py deleted file mode 100644 index 94850ecb09..0000000000 --- a/api/tests/unit_tests/services/test_document_service_rename_document.py +++ /dev/null @@ -1,176 +0,0 @@ -from types import SimpleNamespace -from unittest.mock import Mock, create_autospec, patch - -import pytest - -from models import Account -from services.dataset_service import DocumentService - - -@pytest.fixture -def mock_env(): - """Patch dependencies used by DocumentService.rename_document. - - Mocks: - - DatasetService.get_dataset - - DocumentService.get_document - - current_user (with current_tenant_id) - - db.session - """ - with ( - patch("services.dataset_service.DatasetService.get_dataset") as get_dataset, - patch("services.dataset_service.DocumentService.get_document") as get_document, - patch("services.dataset_service.current_user", create_autospec(Account, instance=True)) as current_user, - patch("extensions.ext_database.db.session") as db_session, - ): - current_user.current_tenant_id = "tenant-123" - yield { - "get_dataset": get_dataset, - "get_document": get_document, - "current_user": current_user, - "db_session": db_session, - } - - -def make_dataset(dataset_id="dataset-123", tenant_id="tenant-123", built_in_field_enabled=False): - return SimpleNamespace(id=dataset_id, tenant_id=tenant_id, built_in_field_enabled=built_in_field_enabled) - - -def make_document( - document_id="document-123", - dataset_id="dataset-123", - tenant_id="tenant-123", - name="Old Name", - data_source_info=None, - doc_metadata=None, -): - doc = Mock() - doc.id = document_id - doc.dataset_id = dataset_id - doc.tenant_id = tenant_id - doc.name = name - doc.data_source_info = data_source_info or {} - # property-like usage in code relies on a dict - doc.data_source_info_dict = dict(doc.data_source_info) - doc.doc_metadata = dict(doc_metadata or {}) - return doc - - -def test_rename_document_success(mock_env): - dataset_id = "dataset-123" - document_id = "document-123" - new_name = "New Document Name" - - dataset = make_dataset(dataset_id) - document = make_document(document_id=document_id, dataset_id=dataset_id) - - mock_env["get_dataset"].return_value = dataset - mock_env["get_document"].return_value = document - - result = DocumentService.rename_document(dataset_id, document_id, new_name) - - assert result is document - assert document.name == new_name - mock_env["db_session"].add.assert_called_once_with(document) - mock_env["db_session"].commit.assert_called_once() - - -def test_rename_document_with_built_in_fields(mock_env): - dataset_id = "dataset-123" - document_id = "document-123" - new_name = "Renamed" - - dataset = make_dataset(dataset_id, built_in_field_enabled=True) - document = make_document(document_id=document_id, dataset_id=dataset_id, doc_metadata={"foo": "bar"}) - - mock_env["get_dataset"].return_value = dataset - mock_env["get_document"].return_value = document - - DocumentService.rename_document(dataset_id, document_id, new_name) - - assert document.name == new_name - # BuiltInField.document_name == "document_name" in service code - assert document.doc_metadata["document_name"] == new_name - assert document.doc_metadata["foo"] == "bar" - - -def test_rename_document_updates_upload_file_when_present(mock_env): - dataset_id = "dataset-123" - document_id = "document-123" - new_name = "Renamed" - file_id = "file-123" - - dataset = make_dataset(dataset_id) - document = make_document( - document_id=document_id, - dataset_id=dataset_id, - data_source_info={"upload_file_id": file_id}, - ) - - mock_env["get_dataset"].return_value = dataset - mock_env["get_document"].return_value = document - - # Intercept UploadFile rename UPDATE chain - mock_query = Mock() - mock_query.where.return_value = mock_query - mock_env["db_session"].query.return_value = mock_query - - DocumentService.rename_document(dataset_id, document_id, new_name) - - assert document.name == new_name - mock_env["db_session"].query.assert_called() # update executed - - -def test_rename_document_does_not_update_upload_file_when_missing_id(mock_env): - """ - When data_source_info_dict exists but does not contain "upload_file_id", - UploadFile should not be updated. - """ - dataset_id = "dataset-123" - document_id = "document-123" - new_name = "Another Name" - - dataset = make_dataset(dataset_id) - # Ensure data_source_info_dict is truthy but lacks the key - document = make_document( - document_id=document_id, - dataset_id=dataset_id, - data_source_info={"url": "https://example.com"}, - ) - - mock_env["get_dataset"].return_value = dataset - mock_env["get_document"].return_value = document - - DocumentService.rename_document(dataset_id, document_id, new_name) - - assert document.name == new_name - # Should NOT attempt to update UploadFile - mock_env["db_session"].query.assert_not_called() - - -def test_rename_document_dataset_not_found(mock_env): - mock_env["get_dataset"].return_value = None - - with pytest.raises(ValueError, match="Dataset not found"): - DocumentService.rename_document("missing", "doc", "x") - - -def test_rename_document_not_found(mock_env): - dataset = make_dataset("dataset-123") - mock_env["get_dataset"].return_value = dataset - mock_env["get_document"].return_value = None - - with pytest.raises(ValueError, match="Document not found"): - DocumentService.rename_document(dataset.id, "missing", "x") - - -def test_rename_document_permission_denied_when_tenant_mismatch(mock_env): - dataset = make_dataset("dataset-123") - # different tenant than current_user.current_tenant_id - document = make_document(dataset_id=dataset.id, tenant_id="tenant-other") - - mock_env["get_dataset"].return_value = dataset - mock_env["get_document"].return_value = document - - with pytest.raises(ValueError, match="No permission"): - DocumentService.rename_document(dataset.id, document.id, "x") From 164ccb7c48f9482e227acc820bd731328fad8c9f Mon Sep 17 00:00:00 2001 From: Eric Cao Date: Thu, 5 Mar 2026 09:08:18 +0800 Subject: [PATCH 8/9] chore: add TypedDict related prompt to AGENTS.md (#32995) Co-authored-by: caoergou --- AGENTS.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/AGENTS.md b/AGENTS.md index 51fa6e4527..d25d2eed96 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -29,7 +29,7 @@ The codebase is split into: ## Language Style -- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`). +- **Python**: Keep type hints on functions and attributes, and implement relevant special methods (e.g., `__repr__`, `__str__`). Prefer `TypedDict` over `dict` or `Mapping` for type safety and better code documentation. - **TypeScript**: Use the strict config, rely on ESLint (`pnpm lint:fix` preferred) plus `pnpm type-check:tsgo`, and avoid `any` types. ## General Practices From a4373d8b7b0f65807c49569da31f3bbe4febbd01 Mon Sep 17 00:00:00 2001 From: Stephen Zhou Date: Thu, 5 Mar 2026 10:45:16 +0800 Subject: [PATCH 9/9] chore: i18n hmr support, fix hmr for app context (#32997) --- web/app/(commonLayout)/layout.tsx | 2 +- web/app/account/(commonLayout)/layout.tsx | 2 +- web/app/account/oauth/authorize/layout.tsx | 4 +- .../model-provider-page/index.tsx | 10 +- ...p-context.tsx => app-context-provider.tsx} | 81 ++--------- web/context/app-context.ts | 73 ++++++++++ web/eslint-suppressions.json | 42 ------ web/package.json | 3 +- web/pnpm-lock.yaml | 132 +++++++++++++++--- web/vite.config.ts | 90 ++++++++++++ 10 files changed, 291 insertions(+), 148 deletions(-) rename web/context/{app-context.tsx => app-context-provider.tsx} (74%) create mode 100644 web/context/app-context.ts diff --git a/web/app/(commonLayout)/layout.tsx b/web/app/(commonLayout)/layout.tsx index abd5dd96fd..bd067fde6a 100644 --- a/web/app/(commonLayout)/layout.tsx +++ b/web/app/(commonLayout)/layout.tsx @@ -8,7 +8,7 @@ import GotoAnything from '@/app/components/goto-anything' import Header from '@/app/components/header' import HeaderWrapper from '@/app/components/header/header-wrapper' import ReadmePanel from '@/app/components/plugins/readme-panel' -import { AppContextProvider } from '@/context/app-context' +import { AppContextProvider } from '@/context/app-context-provider' import { EventEmitterContextProvider } from '@/context/event-emitter' import { ModalContextProvider } from '@/context/modal-context' import { ProviderContextProvider } from '@/context/provider-context' diff --git a/web/app/account/(commonLayout)/layout.tsx b/web/app/account/(commonLayout)/layout.tsx index e4125015d9..47fb47b02b 100644 --- a/web/app/account/(commonLayout)/layout.tsx +++ b/web/app/account/(commonLayout)/layout.tsx @@ -4,7 +4,7 @@ import { AppInitializer } from '@/app/components/app-initializer' import AmplitudeProvider from '@/app/components/base/amplitude' import GA, { GaType } from '@/app/components/base/ga' import HeaderWrapper from '@/app/components/header/header-wrapper' -import { AppContextProvider } from '@/context/app-context' +import { AppContextProvider } from '@/context/app-context-provider' import { EventEmitterContextProvider } from '@/context/event-emitter' import { ModalContextProvider } from '@/context/modal-context' import { ProviderContextProvider } from '@/context/provider-context' diff --git a/web/app/account/oauth/authorize/layout.tsx b/web/app/account/oauth/authorize/layout.tsx index 189971b16f..7f6b270b45 100644 --- a/web/app/account/oauth/authorize/layout.tsx +++ b/web/app/account/oauth/authorize/layout.tsx @@ -2,7 +2,7 @@ import Loading from '@/app/components/base/loading' import Header from '@/app/signin/_header' -import { AppContextProvider } from '@/context/app-context' +import { AppContextProvider } from '@/context/app-context-provider' import { useGlobalPublicStore } from '@/context/global-public-context' import useDocumentTitle from '@/hooks/use-document-title' import { useIsLogin } from '@/service/use-common' @@ -38,7 +38,7 @@ export default function SignInLayout({ children }: any) { {systemFeatures.branding.enabled === false && ( -
+
© {' '} {new Date().getFullYear()} diff --git a/web/app/components/header/account-setting/model-provider-page/index.tsx b/web/app/components/header/account-setting/model-provider-page/index.tsx index 7606bbc04f..4fb1b770e1 100644 --- a/web/app/components/header/account-setting/model-provider-page/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/index.tsx @@ -99,7 +99,7 @@ const ModelProviderPage = ({ searchText }: Props) => { return (
-
{t('modelProvider.models', { ns: 'common' })}
+
{t('modelProvider.models', { ns: 'common' })}
{ > {defaultModelNotConfigured &&
} {defaultModelNotConfigured && ( -
+
{t('modelProvider.notConfigured', { ns: 'common' })}
@@ -129,8 +129,8 @@ const ModelProviderPage = ({ searchText }: Props) => {
-
{t('modelProvider.emptyProviderTitle', { ns: 'common' })}
-
{t('modelProvider.emptyProviderTip', { ns: 'common' })}
+
{t('modelProvider.emptyProviderTitle', { ns: 'common' })}
+
{t('modelProvider.emptyProviderTip', { ns: 'common' })}
)} {!!filteredConfiguredProviders?.length && ( @@ -145,7 +145,7 @@ const ModelProviderPage = ({ searchText }: Props) => { )} {!!filteredNotConfiguredProviders?.length && ( <> -
{t('modelProvider.toBeConfigured', { ns: 'common' })}
+
{t('modelProvider.toBeConfigured', { ns: 'common' })}
{filteredNotConfiguredProviders?.map(provider => ( ({ - userProfile: userProfilePlaceholder, - currentWorkspace: initialWorkspaceInfo, - isCurrentWorkspaceManager: false, - isCurrentWorkspaceOwner: false, - isCurrentWorkspaceEditor: false, - isCurrentWorkspaceDatasetOperator: false, - mutateUserProfile: noop, - mutateCurrentWorkspace: noop, - langGeniusVersionInfo: initialLangGeniusVersionInfo, - useSelector, - isLoadingCurrentWorkspace: false, - isValidatingCurrentWorkspace: false, -}) - -export function useSelector(selector: (value: AppContextValue) => T): T { - return useContextSelector(AppContext, selector) -} - export type AppContextProviderProps = { children: ReactNode } @@ -170,7 +109,7 @@ export const AppContextProvider: FC = ({ children }) => // Report user and workspace info to Amplitude when loaded if (userProfile?.id) { setUserId(userProfile.email) - const properties: Record = { + const properties: Record = { email: userProfile.email, name: userProfile.name, has_password: userProfile.is_password_set, @@ -213,7 +152,3 @@ export const AppContextProvider: FC = ({ children }) => ) } - -export const useAppContext = () => useContext(AppContext) - -export default AppContext diff --git a/web/context/app-context.ts b/web/context/app-context.ts new file mode 100644 index 0000000000..298e213e7d --- /dev/null +++ b/web/context/app-context.ts @@ -0,0 +1,73 @@ +'use client' + +import type { ICurrentWorkspace, LangGeniusVersionResponse, UserProfileResponse } from '@/models/common' +import { noop } from 'es-toolkit/function' +import { createContext, useContext, useContextSelector } from 'use-context-selector' + +export type AppContextValue = { + userProfile: UserProfileResponse + mutateUserProfile: VoidFunction + currentWorkspace: ICurrentWorkspace + isCurrentWorkspaceManager: boolean + isCurrentWorkspaceOwner: boolean + isCurrentWorkspaceEditor: boolean + isCurrentWorkspaceDatasetOperator: boolean + mutateCurrentWorkspace: VoidFunction + langGeniusVersionInfo: LangGeniusVersionResponse + useSelector: typeof useSelector + isLoadingCurrentWorkspace: boolean + isValidatingCurrentWorkspace: boolean +} + +export const userProfilePlaceholder = { + id: '', + name: '', + email: '', + avatar: '', + avatar_url: '', + is_password_set: false, +} + +export const initialLangGeniusVersionInfo = { + current_env: '', + current_version: '', + latest_version: '', + release_date: '', + release_notes: '', + version: '', + can_auto_update: false, +} + +export const initialWorkspaceInfo: ICurrentWorkspace = { + id: '', + name: '', + plan: '', + status: '', + created_at: 0, + role: 'normal', + providers: [], + trial_credits: 200, + trial_credits_used: 0, + next_credit_reset_date: 0, +} + +export const AppContext = createContext({ + userProfile: userProfilePlaceholder, + currentWorkspace: initialWorkspaceInfo, + isCurrentWorkspaceManager: false, + isCurrentWorkspaceOwner: false, + isCurrentWorkspaceEditor: false, + isCurrentWorkspaceDatasetOperator: false, + mutateUserProfile: noop, + mutateCurrentWorkspace: noop, + langGeniusVersionInfo: initialLangGeniusVersionInfo, + useSelector, + isLoadingCurrentWorkspace: false, + isValidatingCurrentWorkspace: false, +}) + +export function useSelector(selector: (value: AppContextValue) => T): T { + return useContextSelector(AppContext, selector) +} + +export const useAppContext = () => useContext(AppContext) diff --git a/web/eslint-suppressions.json b/web/eslint-suppressions.json index 00c817eac9..0880084320 100644 --- a/web/eslint-suppressions.json +++ b/web/eslint-suppressions.json @@ -301,9 +301,6 @@ } }, "app/account/oauth/authorize/layout.tsx": { - "tailwindcss/enforce-consistent-class-order": { - "count": 1 - }, "ts/no-explicit-any": { "count": 1 } @@ -4697,11 +4694,6 @@ "count": 3 } }, - "app/components/header/account-setting/model-provider-page/index.tsx": { - "tailwindcss/enforce-consistent-class-order": { - "count": 5 - } - }, "app/components/header/account-setting/model-provider-page/install-from-marketplace.tsx": { "tailwindcss/enforce-consistent-class-order": { "count": 3 @@ -6472,11 +6464,6 @@ "count": 2 } }, - "app/components/workflow/__tests__/trigger-status-sync.test.tsx": { - "ts/no-explicit-any": { - "count": 2 - } - }, "app/components/workflow/block-selector/all-start-blocks.tsx": { "react-hooks-extra/no-direct-set-state-in-use-effect": { "count": 1 @@ -8488,11 +8475,6 @@ "count": 5 } }, - "app/components/workflow/nodes/tool/__tests__/output-schema-utils.test.ts": { - "ts/no-explicit-any": { - "count": 1 - } - }, "app/components/workflow/nodes/tool/components/copy-id.tsx": { "no-restricted-imports": { "count": 1 @@ -8617,11 +8599,6 @@ "count": 1 } }, - "app/components/workflow/nodes/trigger-plugin/utils/__tests__/form-helpers.test.ts": { - "ts/no-explicit-any": { - "count": 2 - } - }, "app/components/workflow/nodes/trigger-plugin/utils/form-helpers.ts": { "ts/no-explicit-any": { "count": 7 @@ -9594,14 +9571,6 @@ "count": 5 } }, - "context/app-context.tsx": { - "react-refresh/only-export-components": { - "count": 2 - }, - "ts/no-explicit-any": { - "count": 1 - } - }, "context/datasets-context.tsx": { "react-refresh/only-export-components": { "count": 1 @@ -9762,17 +9731,6 @@ "count": 1 } }, - "lib/utils.ts": { - "import/consistent-type-specifier-style": { - "count": 1 - }, - "perfectionist/sort-named-imports": { - "count": 1 - }, - "style/quotes": { - "count": 2 - } - }, "models/common.ts": { "ts/no-explicit-any": { "count": 3 diff --git a/web/package.json b/web/package.json index 5527dcaa37..0633a499b4 100644 --- a/web/package.json +++ b/web/package.json @@ -243,8 +243,9 @@ "tsx": "4.21.0", "typescript": "5.9.3", "uglify-js": "3.19.3", - "vinext": "https://pkg.pr.new/hyoban/vinext@a30ba79", + "vinext": "https://pkg.pr.new/hyoban/vinext@556a6d6", "vite": "8.0.0-beta.16", + "vite-plugin-inspect": "11.3.3", "vite-tsconfig-paths": "6.1.1", "vitest": "4.0.18", "vitest-canvas-mock": "1.1.3" diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 1c5347dd2e..3620dbaae2 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -596,11 +596,14 @@ importers: specifier: 3.19.3 version: 3.19.3 vinext: - specifier: https://pkg.pr.new/hyoban/vinext@a30ba79 - version: https://pkg.pr.new/hyoban/vinext@a30ba79(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3)) + specifier: https://pkg.pr.new/hyoban/vinext@556a6d6 + version: https://pkg.pr.new/hyoban/vinext@556a6d6(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3)) vite: specifier: 8.0.0-beta.16 version: 8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2) + vite-plugin-inspect: + specifier: 11.3.3 + version: 11.3.3(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)) vite-tsconfig-paths: specifier: 6.1.1 version: 6.1.1(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)) @@ -2175,20 +2178,13 @@ packages: resolution: {integrity: sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==} engines: {node: '>= 10.0.0'} - '@pivanov/utils@0.0.2': - resolution: {integrity: sha512-q9CN0bFWxWgMY5hVVYyBgez1jGiLBa6I+LkG37ycylPhFvEGOOeaADGtUSu46CaZasPnlY8fCdVJZmrgKb1EPA==} - peerDependencies: - react: '>=18' - react-dom: '>=18' - - '@pkgjs/parseargs@0.11.0': - resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} - engines: {node: '>=14'} - '@pkgr/core@0.2.9': resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + '@polka/url@1.0.0-next.29': + resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==} + '@preact/signals-core@1.12.2': resolution: {integrity: sha512-5Yf8h1Ke3SMHr15xl630KtwPTW4sYDFkkxS0vQ8UiQLWwZQnrF9IKaVG1mN5VcJz52EcWs2acsc/Npjha/7ysA==} @@ -3874,6 +3870,9 @@ packages: birecord@0.1.1: resolution: {integrity: sha512-VUpsf/qykW0heRlC8LooCq28Kxn3mAqKohhDG/49rrsQ1dT1CXyj/pgXS+5BSRzFTR/3DyIBOqQOrGyZOh71Aw==} + birpc@2.9.0: + resolution: {integrity: sha512-KrayHS5pBi69Xi9JmvoqrIgYGDkD6mcSe/i6YKi3w5kekCLzrX4+nawcXqrj2tIp50Kw/mT/s3p+GVK0A0sKxw==} + bl@4.1.0: resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} @@ -4580,6 +4579,9 @@ packages: resolution: {integrity: sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==} engines: {node: '>=18'} + error-stack-parser-es@1.0.5: + resolution: {integrity: sha512-5qucVt2XcuGMcEGgWI7i+yZpmpByQ8J1lHhcL7PwqCwu9FPP3VUXzT4ltHe5i2z9dePwEHcDVOAfSnHsOlCXRA==} + es-module-lexer@1.7.0: resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} @@ -6109,6 +6111,10 @@ packages: moo-color@1.0.3: resolution: {integrity: sha512-i/+ZKXMDf6aqYtBhuOcej71YSlbjT3wCO/4H1j8rPvxDJEifdwgg5MaFyu6iYAT8GBZJg2z0dkgK4YMzvURALQ==} + mrmime@2.0.1: + resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==} + engines: {node: '>=10'} + ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} @@ -6231,6 +6237,9 @@ packages: obug@2.1.1: resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + ohash@2.0.11: + resolution: {integrity: sha512-RdR9FQrFwNBNXAr4GixM8YaRZRJ5PUWbKYbE5eOsrwAjJW0q2REGcf79oYPsLyskQCZG1PLN+S/K1V00joZAoQ==} + once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} @@ -6354,6 +6363,9 @@ packages: pend@1.2.0: resolution: {integrity: sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==} + perfect-debounce@2.1.0: + resolution: {integrity: sha512-LjgdTytVFXeUgtHZr9WYViYSM/g8MkcTPYDlPa3cDqMirHjKiSZPYd6DoL7pK8AJQr+uWkQvCjHNdiMqsrJs+g==} + periscopic@4.0.2: resolution: {integrity: sha512-sqpQDUy8vgB7ycLkendSKS6HnVz1Rneoc3Rc+ZBUCe2pbqlVuCC5vF52l0NJ1aiMg/r1qfYF9/myz8CZeI2rjA==} @@ -6982,6 +6994,10 @@ packages: simple-swizzle@0.2.4: resolution: {integrity: sha512-nAu1WFPQSMNr2Zn9PGSZK9AGn4t/y97lEm+MXTtUDwfP0ksAIX4nO+6ruD9Jwut4C49SB1Ws+fbXsm/yScWOHw==} + sirv@3.0.2: + resolution: {integrity: sha512-2wcC/oGxHis/BoHkkPwldgiPSYcpZK3JU28WoMVv55yHJgcZ8rlXvuG9iZggz+sU1d4bRgIGASwyWqjxu3FM0g==} + engines: {node: '>=18'} + sisteransi@1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} @@ -7289,6 +7305,10 @@ packages: resolution: {integrity: sha512-A5F0cM6+mDleacLIEUkmfpkBbnHJFV1d2rprHU2MXNk7mlxHq2zGojA+SRvQD1RoMo9gqjZPWEaKG4v1BQ48lw==} engines: {node: ^20.19.0 || ^22.13.0 || >=24} + totalist@3.0.1: + resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} + engines: {node: '>=6'} + tough-cookie@6.0.0: resolution: {integrity: sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==} engines: {node: '>=16'} @@ -7436,6 +7456,10 @@ packages: unpic@4.2.2: resolution: {integrity: sha512-z6T2ScMgRV2y2H8MwwhY5xHZWXhUx/YxtOCGJwfURSl7ypVy4HpLIMWoIZKnnxQa/RKzM0kg8hUh0paIrpLfvw==} + unplugin-utils@0.3.1: + resolution: {integrity: sha512-5lWVjgi6vuHhJ526bI4nlCOmkCIF3nnfXkCMDeMJrtdvxTs6ZFCM8oNufGTsDbKv/tJ/xj8RpvXjRuPBZJuJog==} + engines: {node: '>=20.19.0'} + unplugin@2.1.0: resolution: {integrity: sha512-us4j03/499KhbGP8BU7Hrzrgseo+KdfJYWcbcajCOqsAyb8Gk0Yn2kiUIcZISYCb1JFaZfIuG3b42HmguVOKCQ==} engines: {node: '>=18.12.0'} @@ -7542,8 +7566,8 @@ packages: vfile@6.0.3: resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} - vinext@https://pkg.pr.new/hyoban/vinext@a30ba79: - resolution: {integrity: sha512-yx/gCneOli5eGTrLUq6/M7A6DGQs14qOJW/Xp9RN6sTI0mErKyWWjO5E7FZT98BJbqH5xzI5nk8EOCLF3bojkA==, tarball: https://pkg.pr.new/hyoban/vinext@a30ba79} + vinext@https://pkg.pr.new/hyoban/vinext@556a6d6: + resolution: {integrity: sha512-Sz8RkTDsY6cnGrevlQi4nXgahu8okEGsdKY5m31d/L9tXo35bNETMHfVee5gaI2UKZS9LMcffWaTOxxINUgogQ==, tarball: https://pkg.pr.new/hyoban/vinext@556a6d6} version: 0.0.5 engines: {node: '>=22'} hasBin: true @@ -7552,12 +7576,32 @@ packages: react-dom: '>=19.2.0' vite: ^7.0.0 + vite-dev-rpc@1.1.0: + resolution: {integrity: sha512-pKXZlgoXGoE8sEKiKJSng4hI1sQ4wi5YT24FCrwrLt6opmkjlqPPVmiPWWJn8M8byMxRGzp1CrFuqQs4M/Z39A==} + peerDependencies: + vite: ^2.9.0 || ^3.0.0-0 || ^4.0.0-0 || ^5.0.0-0 || ^6.0.1 || ^7.0.0-0 + + vite-hot-client@2.1.0: + resolution: {integrity: sha512-7SpgZmU7R+dDnSmvXE1mfDtnHLHQSisdySVR7lO8ceAXvM0otZeuQQ6C8LrS5d/aYyP/QZ0hI0L+dIPrm4YlFQ==} + peerDependencies: + vite: ^2.6.0 || ^3.0.0 || ^4.0.0 || ^5.0.0-0 || ^6.0.0-0 || ^7.0.0-0 + vite-plugin-commonjs@0.10.4: resolution: {integrity: sha512-eWQuvQKCcx0QYB5e5xfxBNjQKyrjEWZIR9UOkOV6JAgxVhtbZvCOF+FNC2ZijBJ3U3Px04ZMMyyMyFBVWIJ5+g==} vite-plugin-dynamic-import@1.6.0: resolution: {integrity: sha512-TM0sz70wfzTIo9YCxVFwS8OA9lNREsh+0vMHGSkWDTZ7bgd1Yjs5RV8EgB634l/91IsXJReg0xtmuQqP0mf+rg==} + vite-plugin-inspect@11.3.3: + resolution: {integrity: sha512-u2eV5La99oHoYPHE6UvbwgEqKKOQGz86wMg40CCosP6q8BkB6e5xPneZfYagK4ojPJSj5anHCrnvC20DpwVdRA==} + engines: {node: '>=14'} + peerDependencies: + '@nuxt/kit': '*' + vite: ^6.0.0 || ^7.0.0-0 + peerDependenciesMeta: + '@nuxt/kit': + optional: true + vite-plugin-storybook-nextjs@3.2.2: resolution: {integrity: sha512-ZJXCrhi9mW4jEJTKhJ5sUtpBe84mylU40me2aMuLSgIJo4gE/Rc559hZvMYLFTWta1gX7Rm8Co5EEHakPct+wA==} peerDependencies: @@ -9707,16 +9751,10 @@ snapshots: '@parcel/watcher-win32-x64': 2.5.6 optional: true - '@pivanov/utils@0.0.2(react-dom@19.2.4(react@19.2.4))(react@19.2.4)': - dependencies: - react: 19.2.4 - react-dom: 19.2.4(react@19.2.4) - - '@pkgjs/parseargs@0.11.0': - optional: true - '@pkgr/core@0.2.9': {} + '@polka/url@1.0.0-next.29': {} + '@preact/signals-core@1.12.2': {} '@preact/signals@1.3.2(preact@10.28.2)': @@ -11533,6 +11571,8 @@ snapshots: birecord@0.1.1: {} + birpc@2.9.0: {} + bl@4.1.0: dependencies: buffer: 5.7.1 @@ -12237,6 +12277,8 @@ snapshots: environment@1.1.0: {} + error-stack-parser-es@1.0.5: {} + es-module-lexer@1.7.0: {} es-module-lexer@2.0.0: {} @@ -14300,6 +14342,8 @@ snapshots: dependencies: color-name: 1.1.4 + mrmime@2.0.1: {} + ms@2.1.3: {} mz@2.7.0: @@ -14396,6 +14440,8 @@ snapshots: obug@2.1.1: {} + ohash@2.0.11: {} + once@1.4.0: dependencies: wrappy: 1.0.2 @@ -14549,6 +14595,8 @@ snapshots: pend@1.2.0: {} + perfect-debounce@2.1.0: {} + periscopic@4.0.2: dependencies: '@types/estree': 1.0.8 @@ -15381,6 +15429,12 @@ snapshots: dependencies: is-arrayish: 0.3.4 + sirv@3.0.2: + dependencies: + '@polka/url': 1.0.0-next.29 + mrmime: 2.0.1 + totalist: 3.0.1 + sisteransi@1.0.5: {} size-sensor@1.0.3: {} @@ -15696,6 +15750,8 @@ snapshots: dependencies: eslint-visitor-keys: 5.0.0 + totalist@3.0.1: {} + tough-cookie@6.0.0: dependencies: tldts: 7.0.17 @@ -15840,6 +15896,11 @@ snapshots: unpic@4.2.2: {} + unplugin-utils@0.3.1: + dependencies: + pathe: 2.0.3 + picomatch: 4.0.3 + unplugin@2.1.0: dependencies: acorn: 8.16.0 @@ -15933,7 +15994,7 @@ snapshots: '@types/unist': 3.0.3 vfile-message: 4.0.3 - vinext@https://pkg.pr.new/hyoban/vinext@a30ba79(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3)): + vinext@https://pkg.pr.new/hyoban/vinext@556a6d6(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2))(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3)): dependencies: '@unpic/react': 1.0.2(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(react-dom@19.2.4(react@19.2.4))(react@19.2.4) '@vercel/og': 0.8.6 @@ -15952,6 +16013,16 @@ snapshots: - typescript - webpack + vite-dev-rpc@1.1.0(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)): + dependencies: + birpc: 2.9.0 + vite: 8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2) + vite-hot-client: 2.1.0(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)) + + vite-hot-client@2.1.0(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)): + dependencies: + vite: 8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2) + vite-plugin-commonjs@0.10.4: dependencies: acorn: 8.16.0 @@ -15965,6 +16036,21 @@ snapshots: fast-glob: 3.3.3 magic-string: 0.30.21 + vite-plugin-inspect@11.3.3(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)): + dependencies: + ansis: 4.2.0 + debug: 4.4.3 + error-stack-parser-es: 1.0.5 + ohash: 2.0.11 + open: 10.2.0 + perfect-debounce: 2.1.0 + sirv: 3.0.2 + unplugin-utils: 0.3.1 + vite: 8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2) + vite-dev-rpc: 1.1.0(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)) + transitivePeerDependencies: + - supports-color + vite-plugin-storybook-nextjs@3.2.2(next@16.1.5(@babel/core@7.29.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)(sass@1.93.2))(storybook@10.2.13(@testing-library/dom@10.4.1)(react-dom@19.2.4(react@19.2.4))(react@19.2.4))(typescript@5.9.3)(vite@8.0.0-beta.16(@types/node@24.10.12)(esbuild@0.27.2)(jiti@1.21.7)(sass@1.93.2)(terser@5.46.0)(tsx@4.21.0)(yaml@2.8.2)): dependencies: '@next/env': 16.0.0 diff --git a/web/vite.config.ts b/web/vite.config.ts index 152df913f8..b07e7ea7be 100644 --- a/web/vite.config.ts +++ b/web/vite.config.ts @@ -6,6 +6,7 @@ import react from '@vitejs/plugin-react' import { codeInspectorPlugin } from 'code-inspector-plugin' import vinext from 'vinext' import { defineConfig } from 'vite' +import Inspect from 'vite-plugin-inspect' import tsconfigPaths from 'vite-tsconfig-paths' const __dirname = path.dirname(fileURLToPath(import.meta.url)) @@ -70,6 +71,93 @@ const createForceInspectorClientInjectionPlugin = (): Plugin => { } } +function customI18nHmrPlugin(): Plugin { + const injectTarget = inspectorInjectTarget + const i18nHmrClientMarker = 'custom-i18n-hmr-client' + const i18nHmrClientSnippet = `/* ${i18nHmrClientMarker} */ +if (import.meta.hot) { + const getI18nUpdateTarget = (file) => { + const match = file.match(/[/\\\\]i18n[/\\\\]([^/\\\\]+)[/\\\\]([^/\\\\]+)\\.json$/) + if (!match) + return null + const [, locale, namespaceFile] = match + return { locale, namespaceFile } + } + + import.meta.hot.on('i18n-update', async ({ file, content }) => { + const target = getI18nUpdateTarget(file) + if (!target) + return + + const [{ getI18n }, { camelCase }] = await Promise.all([ + import('react-i18next'), + import('es-toolkit/string'), + ]) + + const i18n = getI18n() + if (!i18n) + return + if (target.locale !== i18n.language) + return + + let resources + try { + resources = JSON.parse(content) + } + catch { + return + } + + const namespace = camelCase(target.namespaceFile) + i18n.addResourceBundle(target.locale, namespace, resources, true, true) + i18n.emit('languageChanged', i18n.language) + }) +} +` + + const injectI18nHmrClient = (code: string) => { + if (code.includes(i18nHmrClientMarker)) + return code + + const useClientMatch = code.match(/(['"])use client\1;?\s*\n/) + if (!useClientMatch) + return `${i18nHmrClientSnippet}\n${code}` + + const insertAt = (useClientMatch.index ?? 0) + useClientMatch[0].length + return `${code.slice(0, insertAt)}\n${i18nHmrClientSnippet}\n${code.slice(insertAt)}` + } + + return { + name: 'custom-i18n-hmr', + apply: 'serve', + handleHotUpdate({ file, server }) { + if (file.endsWith('.json') && file.includes('/i18n/')) { + server.ws.send({ + type: 'custom', + event: 'i18n-update', + data: { + file, + content: fs.readFileSync(file, 'utf-8'), + }, + }) + + // return empty array to prevent the default HMR + return [] + } + }, + transform(code, id) { + const cleanId = normalizeInspectorModuleId(id) + if (cleanId !== injectTarget) + return null + + const nextCode = injectI18nHmrClient(code) + if (nextCode === code) + return null + return { code: nextCode, map: null } + }, + } +} + export default defineConfig(({ mode }) => { const isTest = mode === 'test' @@ -89,10 +177,12 @@ export default defineConfig(({ mode }) => { } as Plugin, ] : [ + Inspect(), createCodeInspectorPlugin(), createForceInspectorClientInjectionPlugin(), react(), vinext(), + customI18nHmrPlugin(), ], resolve: { alias: {