mirror of
https://github.com/langgenius/dify.git
synced 2026-02-04 18:57:51 +08:00
Compare commits
362 Commits
build/debu
...
feat/colla
| Author | SHA1 | Date | |
|---|---|---|---|
| f4321279b9 | |||
| 2a372df33c | |||
| ef536ba909 | |||
| b192c6e658 | |||
| 89b2ae01a6 | |||
| edb4457684 | |||
| bb6d6a4f96 | |||
| 486a30402b | |||
| e105dc6289 | |||
| 51c8c50b82 | |||
| 1b70a7e4c7 | |||
| eaf888b02a | |||
| f99ac24d5c | |||
| bdac6f91dd | |||
| 9be496f953 | |||
| 4acca22ff0 | |||
| 018175ec2d | |||
| faa88dc2f3 | |||
| 060c7f2b45 | |||
| acb603bff7 | |||
| e36ee54a16 | |||
| f3fa4f11ba | |||
| cb8fc9cf2d | |||
| aaa3d2d74f | |||
| c17f564718 | |||
| 3389071361 | |||
| 41473ff450 | |||
| 805bb7c468 | |||
| 995d5ccf66 | |||
| 0d08f7db97 | |||
| 6443366f50 | |||
| 70c41a7dc3 | |||
| 8804623121 | |||
| 1fb6d1286f | |||
| 511df81201 | |||
| 682c93f262 | |||
| 51c96b0b7e | |||
| 224f426765 | |||
| e9657cfb48 | |||
| 4200ac0da3 | |||
| 434f7f3bcb | |||
| 03cc196965 | |||
| 25c88b3f5c | |||
| 2d94904241 | |||
| a99e70d96e | |||
| 9eeceb2455 | |||
| 7901e18fa6 | |||
| 2befef0b21 | |||
| 8869cd7008 | |||
| 91e6ae2a7d | |||
| 6ab8e05a5e | |||
| 717f99a352 | |||
| 735cd78dc2 | |||
| c820501cbb | |||
| 43ef2395ac | |||
| bb3d94f1c5 | |||
| c45fbb6491 | |||
| fc291e4ca2 | |||
| b549d669d6 | |||
| 802b38eede | |||
| 4b57e7bd53 | |||
| bfedee0532 | |||
| 1845938e70 | |||
| fad81ab85e | |||
| d1c64f5c74 | |||
| 7f6c93bdce | |||
| 7730c88c74 | |||
| ac6b540fd8 | |||
| 8c9276370c | |||
| b91370aff7 | |||
| 30424df7ce | |||
| 14f7f4758a | |||
| 79c19983e0 | |||
| aeb3fc6729 | |||
| 0c18d4e058 | |||
| bd597497e7 | |||
| be1f841b37 | |||
| d98a428100 | |||
| 26d330e744 | |||
| 61bed38afb | |||
| 16fbc6b270 | |||
| fe132de3c8 | |||
| f85bf0867c | |||
| b441a7fbc4 | |||
| 8497d296b1 | |||
| 3ee2508ec8 | |||
| ff8d5ac4b5 | |||
| 7fc98b2183 | |||
| a4adafd8ad | |||
| c1bc3aeab9 | |||
| edf962cdb5 | |||
| 2fa13cdf86 | |||
| 39de7673eb | |||
| d930d8cc4a | |||
| 97626a3ba5 | |||
| b7f7d04639 | |||
| 13674bd859 | |||
| fb9cbc0471 | |||
| 2f60288d86 | |||
| ee3ded0fc2 | |||
| 351bad9ec4 | |||
| 9bf7473bbf | |||
| fa09c88f5c | |||
| 83df78d0c8 | |||
| 79266f7302 | |||
| 7fecc7236c | |||
| 9c7f6b7b71 | |||
| b46da93e99 | |||
| e299a1fb20 | |||
| 122033cadb | |||
| df9bd1b3b5 | |||
| f74492eb59 | |||
| eaf1ae37dd | |||
| 8e3b412ff6 | |||
| ba17f576e9 | |||
| 9415ce4512 | |||
| 239536933b | |||
| 80b34598e9 | |||
| 9c66b92c34 | |||
| 79872ea5e2 | |||
| cbf181bd76 | |||
| 1393d21858 | |||
| 3a46b7bd18 | |||
| 0bbfd81d26 | |||
| 86db517142 | |||
| 50151f4007 | |||
| 0395d1f91f | |||
| 5f4c1e4057 | |||
| d14413f3b0 | |||
| 4fd968270c | |||
| 708a7dd362 | |||
| cd85b75312 | |||
| d685da377e | |||
| 8583992d23 | |||
| 23fec75c90 | |||
| ebe7303894 | |||
| 79fb977f10 | |||
| c0af3414a3 | |||
| 1857d37fae | |||
| 60fdbb56a9 | |||
| 4c7853164d | |||
| 6c7a3ce4bb | |||
| a9e74b21f1 | |||
| e6730f7164 | |||
| 3344723393 | |||
| c571185a91 | |||
| 325c1cfa41 | |||
| 1069421753 | |||
| b33a97ea5b | |||
| d2c1d4c337 | |||
| 67762cf1d8 | |||
| eadce0287c | |||
| ecaff5b63f | |||
| a300c9ef96 | |||
| 44fe71e4db | |||
| 0ac32188c5 | |||
| 9aaace706b | |||
| b22de5a824 | |||
| 97463661c1 | |||
| 239a11855a | |||
| 0632557d91 | |||
| 44be7d4c51 | |||
| efb4a9d327 | |||
| a077a3f609 | |||
| 3ccec0aab0 | |||
| 3006133f0e | |||
| 79beb25530 | |||
| b47b228164 | |||
| be91db14d9 | |||
| 120893209e | |||
| f19630bcf5 | |||
| 9d93fda471 | |||
| d986659add | |||
| 00dab7ca5f | |||
| a4add403fb | |||
| e9cdc96c74 | |||
| 6af1fea232 | |||
| 45d5d9e44f | |||
| 376a084aca | |||
| d1f42d47fe | |||
| 64b8fd87ad | |||
| 364be48248 | |||
| 2bce046278 | |||
| 1120d552b6 | |||
| 69cab0817f | |||
| c4d03bf378 | |||
| 6c039be2ca | |||
| 832dabc8a4 | |||
| 1da2028d9d | |||
| 7c3f6dcc8d | |||
| 1472884eb5 | |||
| ec22b1c706 | |||
| a1712df7c2 | |||
| a40e11cb3e | |||
| 61c46bea40 | |||
| 1c5c28a82c | |||
| 2310145937 | |||
| 6a9c9cadd0 | |||
| 7774ff9944 | |||
| 33d4c95470 | |||
| 659cbc05a9 | |||
| 6ce65de2cd | |||
| 93b2eb3ff6 | |||
| bf71300635 | |||
| 37ecd4a0bc | |||
| 827a1b181b | |||
| c4e7cb75cd | |||
| 98e4bfcda8 | |||
| ee48ca7671 | |||
| 4ba6de1116 | |||
| bfbe636555 | |||
| 54ae43ef47 | |||
| 7a74b5ee3e | |||
| 0e9d43d605 | |||
| cc54363c27 | |||
| 89affe3139 | |||
| 2c4977dbb1 | |||
| e240175116 | |||
| 2398ed6fe8 | |||
| a8420ac33c | |||
| 8470be6411 | |||
| 3d6295c622 | |||
| ff2f7206f3 | |||
| b937fc8978 | |||
| 86a9a51952 | |||
| 4188c9a1dd | |||
| 8c00f89e36 | |||
| 9e8ac5c96b | |||
| 05a67f4716 | |||
| f49476a206 | |||
| c1e9c56e25 | |||
| d5dd73cacf | |||
| 21f7a49b4e | |||
| 716ac04e13 | |||
| c28a32fc47 | |||
| 31cba28e8a | |||
| 48cd7e6481 | |||
| 47aba1c9f9 | |||
| 0f3f8bc0d9 | |||
| e0df12c212 | |||
| eb448d9bb8 | |||
| 0ba77f13db | |||
| f0a2eb843c | |||
| 5cf3d9e4d9 | |||
| 41958f55cd | |||
| 600ad232e1 | |||
| 7a3825cfce | |||
| 9519653422 | |||
| efa2307c73 | |||
| 068fa3d0e3 | |||
| 13d8dbd542 | |||
| b442ba8b2b | |||
| 10e36d2355 | |||
| 13c53fedad | |||
| 4bda1bd884 | |||
| 3abe7850d6 | |||
| b50284d864 | |||
| 81c6e52401 | |||
| 847d257366 | |||
| 687662cf1f | |||
| 6432d98469 | |||
| 088ccf8b8d | |||
| e8683bf957 | |||
| 4653981b6b | |||
| e2547413d3 | |||
| ea17f41b5b | |||
| 29178d8adf | |||
| 7e86ead574 | |||
| 72debcb228 | |||
| 72737dabc7 | |||
| f6e5cb4381 | |||
| ffad3b5fb1 | |||
| cba9fc3020 | |||
| e776accaf3 | |||
| 3eac26929a | |||
| 4d3adec738 | |||
| 89bed479e4 | |||
| fdd673a3a9 | |||
| 22f6d285c7 | |||
| 10aa16b471 | |||
| b3838581fd | |||
| affbe7ccdb | |||
| dd8577f832 | |||
| d7f5da5df4 | |||
| 9fda130b3a | |||
| 72cdbdba0f | |||
| b92a153902 | |||
| 9f2927979b | |||
| 75257232c3 | |||
| 1721314c62 | |||
| fc230bcc59 | |||
| b4636ddf44 | |||
| b1140301a4 | |||
| 58cd785da6 | |||
| 2035186cd2 | |||
| 53ba6aadff | |||
| f091868b7c | |||
| 89bedae0d3 | |||
| c8acc48976 | |||
| 21fee59b22 | |||
| 957a8253f8 | |||
| d5fc3e7bed | |||
| ab438b42da | |||
| 3867fece4a | |||
| 2b908d4fbe | |||
| 8ff062ec8b | |||
| 294fc41aec | |||
| 684f7df158 | |||
| c3287755e3 | |||
| 9f97f4d79e | |||
| 34eb421649 | |||
| 850b05573e | |||
| 6ec8bfdfee | |||
| 81638c248e | |||
| 2e11b1298e | |||
| 20320f3a27 | |||
| 4019c12d26 | |||
| cf72184ce4 | |||
| ca8d15bc64 | |||
| a91c897fd3 | |||
| 816bdf0320 | |||
| d4a6acbd99 | |||
| e421db4005 | |||
| 9067c2a9c1 | |||
| 9f7321ca1a | |||
| 5fa01132b9 | |||
| e082b6d599 | |||
| d44be2d835 | |||
| 7dc8557033 | |||
| 72037a1865 | |||
| 2d1621c43d | |||
| d1a5db3310 | |||
| ad8fd8fecc | |||
| be74b76079 | |||
| dd64af728f | |||
| e43b46786d | |||
| 3f3b37b843 | |||
| 2ecf9f6ddf | |||
| 48c069fe68 | |||
| 9c5c597c85 | |||
| c2eec8545d | |||
| 2395d4be26 | |||
| 9455476705 | |||
| 494e223706 | |||
| 348fd18230 | |||
| 7233b4de55 | |||
| af6df05685 | |||
| 965b65db6e | |||
| 4cc01c8aa8 | |||
| 41372168b6 | |||
| f4438b0a08 | |||
| 897c842637 | |||
| ee86ceb906 | |||
| e298732499 | |||
| 4081937e22 | |||
| f9aedb2118 | |||
| 74b4719af8 | |||
| 2f35cc9188 | |||
| 2f966d8c38 | |||
| b0868d9136 | |||
| 37440e9416 | |||
| 0d7d27ec0b |
@ -33,6 +33,9 @@ TRIGGER_URL=http://localhost:5001
|
||||
# The time in seconds after the signature is rejected
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
|
||||
# Collaboration mode toggle
|
||||
ENABLE_COLLABORATION_MODE=false
|
||||
|
||||
# Access token expiration time in minutes
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
|
||||
|
||||
19
api/app.py
19
api/app.py
@ -1,3 +1,4 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
|
||||
@ -8,10 +9,15 @@ def is_db_command() -> bool:
|
||||
|
||||
|
||||
# create app
|
||||
flask_app = None
|
||||
socketio_app = None
|
||||
|
||||
if is_db_command():
|
||||
from app_factory import create_migrations_app
|
||||
|
||||
app = create_migrations_app()
|
||||
socketio_app = app
|
||||
flask_app = app
|
||||
else:
|
||||
# Gunicorn and Celery handle monkey patching automatically in production by
|
||||
# specifying the `gevent` worker class. Manual monkey patching is not required here.
|
||||
@ -22,8 +28,15 @@ else:
|
||||
|
||||
from app_factory import create_app
|
||||
|
||||
app = create_app()
|
||||
celery = app.extensions["celery"]
|
||||
socketio_app, flask_app = create_app()
|
||||
app = flask_app
|
||||
celery = flask_app.extensions["celery"]
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run(host="0.0.0.0", port=5001)
|
||||
from gevent import pywsgi
|
||||
from geventwebsocket.handler import WebSocketHandler # type: ignore[reportMissingTypeStubs]
|
||||
|
||||
host = os.environ.get("HOST", "0.0.0.0")
|
||||
port = int(os.environ.get("PORT", 5001))
|
||||
server = pywsgi.WSGIServer((host, port), socketio_app, handler_class=WebSocketHandler)
|
||||
server.serve_forever()
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
import socketio # type: ignore[reportMissingTypeStubs]
|
||||
from opentelemetry.trace import get_current_span
|
||||
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
|
||||
|
||||
@ -8,6 +9,7 @@ from configs import dify_config
|
||||
from contexts.wrapper import RecyclableContextVar
|
||||
from core.logging.context import init_request_context
|
||||
from dify_app import DifyApp
|
||||
from extensions.ext_socketio import sio
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -60,14 +62,18 @@ def create_flask_app_with_configs() -> DifyApp:
|
||||
return dify_app
|
||||
|
||||
|
||||
def create_app() -> DifyApp:
|
||||
def create_app() -> tuple[socketio.WSGIApp, DifyApp]:
|
||||
start_time = time.perf_counter()
|
||||
app = create_flask_app_with_configs()
|
||||
initialize_extensions(app)
|
||||
|
||||
sio.app = app
|
||||
socketio_app = socketio.WSGIApp(sio, app)
|
||||
|
||||
end_time = time.perf_counter()
|
||||
if dify_config.DEBUG:
|
||||
logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
|
||||
return app
|
||||
return socketio_app, app
|
||||
|
||||
|
||||
def initialize_extensions(app: DifyApp):
|
||||
|
||||
@ -1229,6 +1229,13 @@ class PositionConfig(BaseSettings):
|
||||
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
|
||||
|
||||
|
||||
class CollaborationConfig(BaseSettings):
|
||||
ENABLE_COLLABORATION_MODE: bool = Field(
|
||||
description="Whether to enable collaboration mode features across the workspace",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class LoginConfig(BaseSettings):
|
||||
ENABLE_EMAIL_CODE_LOGIN: bool = Field(
|
||||
description="whether to enable email code login",
|
||||
@ -1347,6 +1354,7 @@ class FeatureConfig(
|
||||
WorkflowConfig,
|
||||
WorkflowNodeExecutionConfig,
|
||||
WorkspaceConfig,
|
||||
CollaborationConfig,
|
||||
LoginConfig,
|
||||
AccountConfig,
|
||||
SwaggerUIConfig,
|
||||
|
||||
@ -63,6 +63,7 @@ from .app import (
|
||||
statistic,
|
||||
workflow,
|
||||
workflow_app_log,
|
||||
workflow_comment,
|
||||
workflow_draft_variable,
|
||||
workflow_run,
|
||||
workflow_statistic,
|
||||
@ -114,6 +115,7 @@ from .explore import (
|
||||
saved_message,
|
||||
trial,
|
||||
)
|
||||
from .socketio import workflow as socketio_workflow # pyright: ignore[reportUnusedImport]
|
||||
|
||||
# Import tag controllers
|
||||
from .tag import tags
|
||||
@ -207,6 +209,7 @@ __all__ = [
|
||||
"website",
|
||||
"workflow",
|
||||
"workflow_app_log",
|
||||
"workflow_comment",
|
||||
"workflow_draft_variable",
|
||||
"workflow_run",
|
||||
"workflow_statistic",
|
||||
|
||||
@ -32,8 +32,10 @@ from core.trigger.debug.event_selectors import (
|
||||
from core.workflow.enums import NodeType
|
||||
from core.workflow.graph_engine.manager import GraphEngineManager
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from factories import file_factory, variable_factory
|
||||
from fields.member_fields import simple_account_fields
|
||||
from fields.online_user_fields import online_user_list_fields
|
||||
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
|
||||
from fields.workflow_run_fields import workflow_run_node_execution_fields
|
||||
from libs import helper
|
||||
@ -43,6 +45,7 @@ from libs.login import current_account_with_tenant, login_required
|
||||
from models import App
|
||||
from models.model import AppMode
|
||||
from models.workflow import Workflow
|
||||
from repositories.workflow_collaboration_repository import WORKFLOW_ONLINE_USERS_PREFIX
|
||||
from services.app_generate_service import AppGenerateService
|
||||
from services.errors.app import WorkflowHashNotEqualError
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
@ -180,6 +183,14 @@ class WorkflowUpdatePayload(BaseModel):
|
||||
marked_comment: str | None = Field(default=None, max_length=100)
|
||||
|
||||
|
||||
class WorkflowFeaturesPayload(BaseModel):
|
||||
features: dict[str, Any] = Field(..., description="Workflow feature configuration")
|
||||
|
||||
|
||||
class WorkflowOnlineUsersQuery(BaseModel):
|
||||
workflow_ids: str = Field(..., description="Comma-separated workflow IDs")
|
||||
|
||||
|
||||
class DraftWorkflowTriggerRunPayload(BaseModel):
|
||||
node_id: str
|
||||
|
||||
@ -203,6 +214,8 @@ reg(DefaultBlockConfigQuery)
|
||||
reg(ConvertToWorkflowPayload)
|
||||
reg(WorkflowListQuery)
|
||||
reg(WorkflowUpdatePayload)
|
||||
reg(WorkflowFeaturesPayload)
|
||||
reg(WorkflowOnlineUsersQuery)
|
||||
reg(DraftWorkflowTriggerRunPayload)
|
||||
reg(DraftWorkflowTriggerRunAllPayload)
|
||||
|
||||
@ -470,7 +483,7 @@ class AdvancedChatDraftRunLoopNodeApi(Resource):
|
||||
Run draft workflow loop node
|
||||
"""
|
||||
current_user, _ = current_account_with_tenant()
|
||||
args = LoopNodeRunPayload.model_validate(console_ns.payload or {})
|
||||
args = LoopNodeRunPayload.model_validate(console_ns.payload or {}).model_dump(exclude_none=True)
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate_single_loop(
|
||||
@ -508,7 +521,7 @@ class WorkflowDraftRunLoopNodeApi(Resource):
|
||||
Run draft workflow loop node
|
||||
"""
|
||||
current_user, _ = current_account_with_tenant()
|
||||
args = LoopNodeRunPayload.model_validate(console_ns.payload or {})
|
||||
args = LoopNodeRunPayload.model_validate(console_ns.payload or {}).model_dump(exclude_none=True)
|
||||
|
||||
try:
|
||||
response = AppGenerateService.generate_single_loop(
|
||||
@ -791,6 +804,31 @@ class ConvertToWorkflowApi(Resource):
|
||||
}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/features")
|
||||
class WorkflowFeaturesApi(Resource):
|
||||
"""Update draft workflow features."""
|
||||
|
||||
@console_ns.expect(console_ns.models[WorkflowFeaturesPayload.__name__])
|
||||
@console_ns.doc("update_workflow_features")
|
||||
@console_ns.doc(description="Update draft workflow features")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Workflow features updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
current_user, _ = current_account_with_tenant()
|
||||
|
||||
args = WorkflowFeaturesPayload.model_validate(console_ns.payload or {})
|
||||
features = args.features
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
workflow_service.update_draft_workflow_features(app_model=app_model, features=features, account=current_user)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows")
|
||||
class PublishedAllWorkflowApi(Resource):
|
||||
@console_ns.expect(console_ns.models[WorkflowListQuery.__name__])
|
||||
@ -999,7 +1037,6 @@ class DraftWorkflowTriggerRunApi(Resource):
|
||||
if not event:
|
||||
return jsonable_encoder({"status": "waiting", "retry_in": LISTENING_RETRY_IN})
|
||||
workflow_args = dict(event.workflow_args)
|
||||
|
||||
workflow_args[SKIP_PREPARE_USER_INPUTS_KEY] = True
|
||||
return helper.compact_generate_response(
|
||||
AppGenerateService.generate(
|
||||
@ -1148,7 +1185,6 @@ class DraftWorkflowTriggerRunAllApi(Resource):
|
||||
|
||||
try:
|
||||
workflow_args = dict(trigger_debug_event.workflow_args)
|
||||
|
||||
workflow_args[SKIP_PREPARE_USER_INPUTS_KEY] = True
|
||||
response = AppGenerateService.generate(
|
||||
app_model=app_model,
|
||||
@ -1168,3 +1204,32 @@ class DraftWorkflowTriggerRunAllApi(Resource):
|
||||
"status": "error",
|
||||
}
|
||||
), 400
|
||||
|
||||
|
||||
@console_ns.route("/apps/workflows/online-users")
|
||||
class WorkflowOnlineUsersApi(Resource):
|
||||
@console_ns.expect(console_ns.models[WorkflowOnlineUsersQuery.__name__])
|
||||
@console_ns.doc("get_workflow_online_users")
|
||||
@console_ns.doc(description="Get workflow online users")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@marshal_with(online_user_list_fields)
|
||||
def get(self):
|
||||
args = WorkflowOnlineUsersQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
workflow_ids = [workflow_id.strip() for workflow_id in args.workflow_ids.split(",") if workflow_id.strip()]
|
||||
|
||||
results = []
|
||||
for workflow_id in workflow_ids:
|
||||
users_json = redis_client.hgetall(f"{WORKFLOW_ONLINE_USERS_PREFIX}{workflow_id}")
|
||||
|
||||
users = []
|
||||
for _, user_info_json in users_json.items():
|
||||
try:
|
||||
users.append(json.loads(user_info_json))
|
||||
except Exception:
|
||||
continue
|
||||
results.append({"workflow_id": workflow_id, "users": users})
|
||||
|
||||
return {"data": results}
|
||||
|
||||
317
api/controllers/console/app/workflow_comment.py
Normal file
317
api/controllers/console/app/workflow_comment.py
Normal file
@ -0,0 +1,317 @@
|
||||
import logging
|
||||
|
||||
from flask_restx import Resource, fields, marshal_with
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.wraps import get_app_model
|
||||
from controllers.console.wraps import account_initialization_required, setup_required
|
||||
from fields.member_fields import account_with_role_fields
|
||||
from fields.workflow_comment_fields import (
|
||||
workflow_comment_basic_fields,
|
||||
workflow_comment_create_fields,
|
||||
workflow_comment_detail_fields,
|
||||
workflow_comment_reply_create_fields,
|
||||
workflow_comment_reply_update_fields,
|
||||
workflow_comment_resolve_fields,
|
||||
workflow_comment_update_fields,
|
||||
)
|
||||
from libs.login import current_user, login_required
|
||||
from models import App
|
||||
from services.account_service import TenantService
|
||||
from services.workflow_comment_service import WorkflowCommentService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
|
||||
|
||||
|
||||
class WorkflowCommentCreatePayload(BaseModel):
|
||||
position_x: float = Field(..., description="Comment X position")
|
||||
position_y: float = Field(..., description="Comment Y position")
|
||||
content: str = Field(..., description="Comment content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentUpdatePayload(BaseModel):
|
||||
content: str = Field(..., description="Comment content")
|
||||
position_x: float | None = Field(default=None, description="Comment X position")
|
||||
position_y: float | None = Field(default=None, description="Comment Y position")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentReplyCreatePayload(BaseModel):
|
||||
content: str = Field(..., description="Reply content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
class WorkflowCommentReplyUpdatePayload(BaseModel):
|
||||
content: str = Field(..., description="Reply content")
|
||||
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
|
||||
|
||||
|
||||
for model in (
|
||||
WorkflowCommentCreatePayload,
|
||||
WorkflowCommentUpdatePayload,
|
||||
WorkflowCommentReplyCreatePayload,
|
||||
WorkflowCommentReplyUpdatePayload,
|
||||
):
|
||||
console_ns.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
|
||||
|
||||
workflow_comment_basic_model = console_ns.model("WorkflowCommentBasic", workflow_comment_basic_fields)
|
||||
workflow_comment_detail_model = console_ns.model("WorkflowCommentDetail", workflow_comment_detail_fields)
|
||||
workflow_comment_create_model = console_ns.model("WorkflowCommentCreate", workflow_comment_create_fields)
|
||||
workflow_comment_update_model = console_ns.model("WorkflowCommentUpdate", workflow_comment_update_fields)
|
||||
workflow_comment_resolve_model = console_ns.model("WorkflowCommentResolve", workflow_comment_resolve_fields)
|
||||
workflow_comment_reply_create_model = console_ns.model(
|
||||
"WorkflowCommentReplyCreate", workflow_comment_reply_create_fields
|
||||
)
|
||||
workflow_comment_reply_update_model = console_ns.model(
|
||||
"WorkflowCommentReplyUpdate", workflow_comment_reply_update_fields
|
||||
)
|
||||
workflow_comment_mention_users_model = console_ns.model(
|
||||
"WorkflowCommentMentionUsers",
|
||||
{"users": fields.List(fields.Nested(account_with_role_fields))},
|
||||
)
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments")
|
||||
class WorkflowCommentListApi(Resource):
|
||||
"""API for listing and creating workflow comments."""
|
||||
|
||||
@console_ns.doc("list_workflow_comments")
|
||||
@console_ns.doc(description="Get all comments for a workflow")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Comments retrieved successfully", workflow_comment_basic_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_basic_model, envelope="data")
|
||||
def get(self, app_model: App):
|
||||
"""Get all comments for a workflow."""
|
||||
comments = WorkflowCommentService.get_comments(tenant_id=current_user.current_tenant_id, app_id=app_model.id)
|
||||
|
||||
return comments
|
||||
|
||||
@console_ns.doc("create_workflow_comment")
|
||||
@console_ns.doc(description="Create a new workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentCreatePayload.__name__])
|
||||
@console_ns.response(201, "Comment created successfully", workflow_comment_create_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_create_model)
|
||||
def post(self, app_model: App):
|
||||
"""Create a new workflow comment."""
|
||||
payload = WorkflowCommentCreatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.create_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
created_by=current_user.id,
|
||||
content=payload.content,
|
||||
position_x=payload.position_x,
|
||||
position_y=payload.position_y,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result, 201
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>")
|
||||
class WorkflowCommentDetailApi(Resource):
|
||||
"""API for managing individual workflow comments."""
|
||||
|
||||
@console_ns.doc("get_workflow_comment")
|
||||
@console_ns.doc(description="Get a specific workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(200, "Comment retrieved successfully", workflow_comment_detail_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_detail_model)
|
||||
def get(self, app_model: App, comment_id: str):
|
||||
"""Get a specific workflow comment."""
|
||||
comment = WorkflowCommentService.get_comment(
|
||||
tenant_id=current_user.current_tenant_id, app_id=app_model.id, comment_id=comment_id
|
||||
)
|
||||
|
||||
return comment
|
||||
|
||||
@console_ns.doc("update_workflow_comment")
|
||||
@console_ns.doc(description="Update a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentUpdatePayload.__name__])
|
||||
@console_ns.response(200, "Comment updated successfully", workflow_comment_update_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_update_model)
|
||||
def put(self, app_model: App, comment_id: str):
|
||||
"""Update a workflow comment."""
|
||||
payload = WorkflowCommentUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.update_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
content=payload.content,
|
||||
position_x=payload.position_x,
|
||||
position_y=payload.position_y,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@console_ns.doc("delete_workflow_comment")
|
||||
@console_ns.doc(description="Delete a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(204, "Comment deleted successfully")
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
def delete(self, app_model: App, comment_id: str):
|
||||
"""Delete a workflow comment."""
|
||||
WorkflowCommentService.delete_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/resolve")
|
||||
class WorkflowCommentResolveApi(Resource):
|
||||
"""API for resolving and reopening workflow comments."""
|
||||
|
||||
@console_ns.doc("resolve_workflow_comment")
|
||||
@console_ns.doc(description="Resolve a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.response(200, "Comment resolved successfully", workflow_comment_resolve_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_resolve_model)
|
||||
def post(self, app_model: App, comment_id: str):
|
||||
"""Resolve a workflow comment."""
|
||||
comment = WorkflowCommentService.resolve_comment(
|
||||
tenant_id=current_user.current_tenant_id,
|
||||
app_id=app_model.id,
|
||||
comment_id=comment_id,
|
||||
user_id=current_user.id,
|
||||
)
|
||||
|
||||
return comment
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies")
|
||||
class WorkflowCommentReplyApi(Resource):
|
||||
"""API for managing comment replies."""
|
||||
|
||||
@console_ns.doc("create_workflow_comment_reply")
|
||||
@console_ns.doc(description="Add a reply to a workflow comment")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentReplyCreatePayload.__name__])
|
||||
@console_ns.response(201, "Reply created successfully", workflow_comment_reply_create_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_reply_create_model)
|
||||
def post(self, app_model: App, comment_id: str):
|
||||
"""Add a reply to a workflow comment."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
payload = WorkflowCommentReplyCreatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
result = WorkflowCommentService.create_reply(
|
||||
comment_id=comment_id,
|
||||
content=payload.content,
|
||||
created_by=current_user.id,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return result, 201
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies/<string:reply_id>")
|
||||
class WorkflowCommentReplyDetailApi(Resource):
|
||||
"""API for managing individual comment replies."""
|
||||
|
||||
@console_ns.doc("update_workflow_comment_reply")
|
||||
@console_ns.doc(description="Update a comment reply")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
|
||||
@console_ns.expect(console_ns.models[WorkflowCommentReplyUpdatePayload.__name__])
|
||||
@console_ns.response(200, "Reply updated successfully", workflow_comment_reply_update_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_reply_update_model)
|
||||
def put(self, app_model: App, comment_id: str, reply_id: str):
|
||||
"""Update a comment reply."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
payload = WorkflowCommentReplyUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
reply = WorkflowCommentService.update_reply(
|
||||
reply_id=reply_id,
|
||||
user_id=current_user.id,
|
||||
content=payload.content,
|
||||
mentioned_user_ids=payload.mentioned_user_ids,
|
||||
)
|
||||
|
||||
return reply
|
||||
|
||||
@console_ns.doc("delete_workflow_comment_reply")
|
||||
@console_ns.doc(description="Delete a comment reply")
|
||||
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
|
||||
@console_ns.response(204, "Reply deleted successfully")
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
def delete(self, app_model: App, comment_id: str, reply_id: str):
|
||||
"""Delete a comment reply."""
|
||||
# Validate comment access first
|
||||
WorkflowCommentService.validate_comment_access(
|
||||
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
|
||||
)
|
||||
|
||||
WorkflowCommentService.delete_reply(reply_id=reply_id, user_id=current_user.id)
|
||||
|
||||
return {"result": "success"}, 204
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/mention-users")
|
||||
class WorkflowCommentMentionUsersApi(Resource):
|
||||
"""API for getting mentionable users for workflow comments."""
|
||||
|
||||
@console_ns.doc("workflow_comment_mention_users")
|
||||
@console_ns.doc(description="Get all users in current tenant for mentions")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Mentionable users retrieved successfully", workflow_comment_mention_users_model)
|
||||
@login_required
|
||||
@setup_required
|
||||
@account_initialization_required
|
||||
@get_app_model()
|
||||
@marshal_with(workflow_comment_mention_users_model)
|
||||
def get(self, app_model: App):
|
||||
"""Get all users in current tenant for mentions."""
|
||||
members = TenantService.get_tenant_members(current_user.current_tenant)
|
||||
return {"users": members}
|
||||
@ -21,9 +21,9 @@ from core.variables.segments import ArrayFileSegment, FileSegment, Segment
|
||||
from core.variables.types import SegmentType
|
||||
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
|
||||
from extensions.ext_database import db
|
||||
from factories import variable_factory
|
||||
from factories.file_factory import build_from_mapping, build_from_mappings
|
||||
from factories.variable_factory import build_segment_with_type
|
||||
from libs.login import login_required
|
||||
from libs.login import current_user, login_required
|
||||
from models import App, AppMode
|
||||
from models.workflow import WorkflowDraftVariable
|
||||
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
|
||||
@ -43,6 +43,16 @@ class WorkflowDraftVariableUpdatePayload(BaseModel):
|
||||
value: Any | None = Field(default=None, description="Variable value")
|
||||
|
||||
|
||||
class ConversationVariableUpdatePayload(BaseModel):
|
||||
conversation_variables: list[dict[str, Any]] = Field(
|
||||
..., description="Conversation variables for the draft workflow"
|
||||
)
|
||||
|
||||
|
||||
class EnvironmentVariableUpdatePayload(BaseModel):
|
||||
environment_variables: list[dict[str, Any]] = Field(..., description="Environment variables for the draft workflow")
|
||||
|
||||
|
||||
console_ns.schema_model(
|
||||
WorkflowDraftVariableListQuery.__name__,
|
||||
WorkflowDraftVariableListQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
@ -51,6 +61,14 @@ console_ns.schema_model(
|
||||
WorkflowDraftVariableUpdatePayload.__name__,
|
||||
WorkflowDraftVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
console_ns.schema_model(
|
||||
ConversationVariableUpdatePayload.__name__,
|
||||
ConversationVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
console_ns.schema_model(
|
||||
EnvironmentVariableUpdatePayload.__name__,
|
||||
EnvironmentVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
|
||||
)
|
||||
|
||||
|
||||
def _convert_values_to_json_serializable_object(value: Segment):
|
||||
@ -383,7 +401,7 @@ class VariableApi(Resource):
|
||||
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
|
||||
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
|
||||
raw_value = build_from_mappings(mappings=raw_value, tenant_id=app_model.tenant_id)
|
||||
new_value = build_segment_with_type(variable.value_type, raw_value)
|
||||
new_value = variable_factory.build_segment_with_type(variable.value_type, raw_value)
|
||||
draft_var_srv.update_variable(variable, name=new_name, value=new_value)
|
||||
db.session.commit()
|
||||
return variable
|
||||
@ -476,6 +494,34 @@ class ConversationVariableCollectionApi(Resource):
|
||||
db.session.commit()
|
||||
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
|
||||
|
||||
@console_ns.expect(console_ns.models[ConversationVariableUpdatePayload.__name__])
|
||||
@console_ns.doc("update_conversation_variables")
|
||||
@console_ns.doc(description="Update conversation variables for workflow draft")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Conversation variables updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_app_model(mode=AppMode.ADVANCED_CHAT)
|
||||
def post(self, app_model: App):
|
||||
payload = ConversationVariableUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
conversation_variables_list = payload.conversation_variables
|
||||
conversation_variables = [
|
||||
variable_factory.build_conversation_variable_from_mapping(obj) for obj in conversation_variables_list
|
||||
]
|
||||
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app_model,
|
||||
account=current_user,
|
||||
conversation_variables=conversation_variables,
|
||||
)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
|
||||
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
|
||||
class SystemVariableCollectionApi(Resource):
|
||||
@ -527,3 +573,31 @@ class EnvironmentVariableCollectionApi(Resource):
|
||||
)
|
||||
|
||||
return {"items": env_vars_list}
|
||||
|
||||
@console_ns.expect(console_ns.models[EnvironmentVariableUpdatePayload.__name__])
|
||||
@console_ns.doc("update_environment_variables")
|
||||
@console_ns.doc(description="Update environment variables for workflow draft")
|
||||
@console_ns.doc(params={"app_id": "Application ID"})
|
||||
@console_ns.response(200, "Environment variables updated successfully")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
@edit_permission_required
|
||||
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
|
||||
def post(self, app_model: App):
|
||||
payload = EnvironmentVariableUpdatePayload.model_validate(console_ns.payload or {})
|
||||
|
||||
workflow_service = WorkflowService()
|
||||
|
||||
environment_variables_list = payload.environment_variables
|
||||
environment_variables = [
|
||||
variable_factory.build_environment_variable_from_mapping(obj) for obj in environment_variables_list
|
||||
]
|
||||
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app_model,
|
||||
account=current_user,
|
||||
environment_variables=environment_variables,
|
||||
)
|
||||
|
||||
return {"result": "success"}
|
||||
|
||||
1
api/controllers/console/socketio/__init__.py
Normal file
1
api/controllers/console/socketio/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
|
||||
108
api/controllers/console/socketio/workflow.py
Normal file
108
api/controllers/console/socketio/workflow.py
Normal file
@ -0,0 +1,108 @@
|
||||
import logging
|
||||
from collections.abc import Callable
|
||||
from typing import cast
|
||||
|
||||
from flask import Request as FlaskRequest
|
||||
|
||||
from extensions.ext_socketio import sio
|
||||
from libs.passport import PassportService
|
||||
from libs.token import extract_access_token
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
|
||||
from services.account_service import AccountService
|
||||
from services.workflow_collaboration_service import WorkflowCollaborationService
|
||||
|
||||
repository = WorkflowCollaborationRepository()
|
||||
collaboration_service = WorkflowCollaborationService(repository, sio)
|
||||
|
||||
|
||||
def _sio_on(event: str) -> Callable[[Callable[..., object]], Callable[..., object]]:
|
||||
return cast(Callable[[Callable[..., object]], Callable[..., object]], sio.on(event))
|
||||
|
||||
|
||||
@_sio_on("connect")
|
||||
def socket_connect(sid, environ, auth):
|
||||
"""
|
||||
WebSocket connect event, do authentication here.
|
||||
"""
|
||||
try:
|
||||
request_environ = FlaskRequest(environ)
|
||||
token = extract_access_token(request_environ)
|
||||
except Exception:
|
||||
logging.exception("Failed to extract token")
|
||||
token = None
|
||||
|
||||
if not token:
|
||||
logging.warning("Socket connect rejected: missing token (sid=%s)", sid)
|
||||
return False
|
||||
|
||||
try:
|
||||
decoded = PassportService().verify(token)
|
||||
user_id = decoded.get("user_id")
|
||||
if not user_id:
|
||||
logging.warning("Socket connect rejected: missing user_id (sid=%s)", sid)
|
||||
return False
|
||||
|
||||
with sio.app.app_context():
|
||||
user = AccountService.load_logged_in_account(account_id=user_id)
|
||||
if not user:
|
||||
logging.warning("Socket connect rejected: user not found (user_id=%s, sid=%s)", user_id, sid)
|
||||
return False
|
||||
if not user.has_edit_permission:
|
||||
logging.warning("Socket connect rejected: no edit permission (user_id=%s, sid=%s)", user_id, sid)
|
||||
return False
|
||||
|
||||
collaboration_service.save_session(sid, user)
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
logging.exception("Socket authentication failed")
|
||||
return False
|
||||
|
||||
|
||||
@_sio_on("user_connect")
|
||||
def handle_user_connect(sid, data):
|
||||
"""
|
||||
Handle user connect event. Each session (tab) is treated as an independent collaborator.
|
||||
"""
|
||||
workflow_id = data.get("workflow_id")
|
||||
if not workflow_id:
|
||||
return {"msg": "workflow_id is required"}, 400
|
||||
|
||||
result = collaboration_service.register_session(workflow_id, sid)
|
||||
if not result:
|
||||
return {"msg": "unauthorized"}, 401
|
||||
|
||||
user_id, is_leader = result
|
||||
return {"msg": "connected", "user_id": user_id, "sid": sid, "isLeader": is_leader}
|
||||
|
||||
|
||||
@_sio_on("disconnect")
|
||||
def handle_disconnect(sid):
|
||||
"""
|
||||
Handle session disconnect event. Remove the specific session from online users.
|
||||
"""
|
||||
collaboration_service.disconnect_session(sid)
|
||||
|
||||
|
||||
@_sio_on("collaboration_event")
|
||||
def handle_collaboration_event(sid, data):
|
||||
"""
|
||||
Handle general collaboration events, include:
|
||||
1. mouse_move
|
||||
2. vars_and_features_update
|
||||
3. sync_request (ask leader to update graph)
|
||||
4. app_state_update
|
||||
5. mcp_server_update
|
||||
6. workflow_update
|
||||
7. comments_update
|
||||
8. node_panel_presence
|
||||
"""
|
||||
return collaboration_service.relay_collaboration_event(sid, data)
|
||||
|
||||
|
||||
@_sio_on("graph_event")
|
||||
def handle_graph_event(sid, data):
|
||||
"""
|
||||
Handle graph events - simple broadcast relay.
|
||||
"""
|
||||
return collaboration_service.relay_graph_event(sid, data)
|
||||
@ -36,6 +36,7 @@ from controllers.console.wraps import (
|
||||
only_edition_cloud,
|
||||
setup_required,
|
||||
)
|
||||
from core.file import helpers as file_helpers
|
||||
from extensions.ext_database import db
|
||||
from fields.member_fields import account_fields
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
@ -73,6 +74,10 @@ class AccountAvatarPayload(BaseModel):
|
||||
avatar: str
|
||||
|
||||
|
||||
class AccountAvatarQuery(BaseModel):
|
||||
avatar: str = Field(..., description="Avatar file ID")
|
||||
|
||||
|
||||
class AccountInterfaceLanguagePayload(BaseModel):
|
||||
interface_language: str
|
||||
|
||||
@ -158,6 +163,7 @@ def reg(cls: type[BaseModel]):
|
||||
reg(AccountInitPayload)
|
||||
reg(AccountNamePayload)
|
||||
reg(AccountAvatarPayload)
|
||||
reg(AccountAvatarQuery)
|
||||
reg(AccountInterfaceLanguagePayload)
|
||||
reg(AccountInterfaceThemePayload)
|
||||
reg(AccountTimezonePayload)
|
||||
@ -248,6 +254,18 @@ class AccountNameApi(Resource):
|
||||
|
||||
@console_ns.route("/account/avatar")
|
||||
class AccountAvatarApi(Resource):
|
||||
@console_ns.expect(console_ns.models[AccountAvatarQuery.__name__])
|
||||
@console_ns.doc("get_account_avatar")
|
||||
@console_ns.doc(description="Get account avatar url")
|
||||
@setup_required
|
||||
@login_required
|
||||
@account_initialization_required
|
||||
def get(self):
|
||||
args = AccountAvatarQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
|
||||
|
||||
avatar_url = file_helpers.get_signed_file_url(args.avatar)
|
||||
return {"avatar_url": avatar_url}
|
||||
|
||||
@console_ns.expect(console_ns.models[AccountAvatarPayload.__name__])
|
||||
@setup_required
|
||||
@login_required
|
||||
|
||||
@ -1,11 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextvars
|
||||
import logging
|
||||
import threading
|
||||
import uuid
|
||||
from collections.abc import Generator, Mapping
|
||||
from typing import TYPE_CHECKING, Any, Literal, Union, overload
|
||||
from typing import Any, Literal, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from pydantic import ValidationError
|
||||
@ -15,9 +13,6 @@ from sqlalchemy.orm import Session, sessionmaker
|
||||
import contexts
|
||||
from configs import dify_config
|
||||
from constants import UUID_NIL
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from controllers.console.app.workflow import LoopNodeRunPayload
|
||||
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
|
||||
from core.app.apps.advanced_chat.app_config_manager import AdvancedChatAppConfigManager
|
||||
from core.app.apps.advanced_chat.app_runner import AdvancedChatAppRunner
|
||||
@ -309,7 +304,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
workflow: Workflow,
|
||||
node_id: str,
|
||||
user: Account | EndUser,
|
||||
args: LoopNodeRunPayload,
|
||||
args: Mapping,
|
||||
streaming: bool = True,
|
||||
) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], Any, None]:
|
||||
"""
|
||||
@ -325,7 +320,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
if not node_id:
|
||||
raise ValueError("node_id is required")
|
||||
|
||||
if args.inputs is None:
|
||||
if args.get("inputs") is None:
|
||||
raise ValueError("inputs is required")
|
||||
|
||||
# convert to app config
|
||||
@ -343,7 +338,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator):
|
||||
stream=streaming,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
extras={"auto_generate_conversation_name": False},
|
||||
single_loop_run=AdvancedChatAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args.inputs),
|
||||
single_loop_run=AdvancedChatAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]),
|
||||
)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
contexts.plugin_tool_providers_lock.set(threading.Lock())
|
||||
|
||||
@ -1,11 +1,9 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import contextvars
|
||||
import logging
|
||||
import threading
|
||||
import uuid
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from typing import TYPE_CHECKING, Any, Literal, Union, overload
|
||||
from typing import Any, Literal, Union, overload
|
||||
|
||||
from flask import Flask, current_app
|
||||
from pydantic import ValidationError
|
||||
@ -42,9 +40,6 @@ from models import Account, App, EndUser, Workflow, WorkflowNodeExecutionTrigger
|
||||
from models.enums import WorkflowRunTriggeredFrom
|
||||
from services.workflow_draft_variable_service import DraftVarLoader, WorkflowDraftVariableService
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from controllers.console.app.workflow import LoopNodeRunPayload
|
||||
|
||||
SKIP_PREPARE_USER_INPUTS_KEY = "_skip_prepare_user_inputs"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -386,7 +381,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
workflow: Workflow,
|
||||
node_id: str,
|
||||
user: Account | EndUser,
|
||||
args: LoopNodeRunPayload,
|
||||
args: Mapping[str, Any],
|
||||
streaming: bool = True,
|
||||
) -> Mapping[str, Any] | Generator[str | Mapping[str, Any], None, None]:
|
||||
"""
|
||||
@ -402,7 +397,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
if not node_id:
|
||||
raise ValueError("node_id is required")
|
||||
|
||||
if args.inputs is None:
|
||||
if args.get("inputs") is None:
|
||||
raise ValueError("inputs is required")
|
||||
|
||||
# convert to app config
|
||||
@ -418,7 +413,7 @@ class WorkflowAppGenerator(BaseAppGenerator):
|
||||
stream=streaming,
|
||||
invoke_from=InvokeFrom.DEBUGGER,
|
||||
extras={"auto_generate_conversation_name": False},
|
||||
single_loop_run=WorkflowAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args.inputs or {}),
|
||||
single_loop_run=WorkflowAppGenerateEntity.SingleLoopRunEntity(node_id=node_id, inputs=args["inputs"]),
|
||||
workflow_execution_id=str(uuid.uuid4()),
|
||||
)
|
||||
contexts.plugin_tool_providers.set({})
|
||||
|
||||
@ -119,14 +119,16 @@ elif [[ "${MODE}" == "job" ]]; then
|
||||
|
||||
else
|
||||
if [[ "${DEBUG}" == "true" ]]; then
|
||||
exec flask run --host=${DIFY_BIND_ADDRESS:-0.0.0.0} --port=${DIFY_PORT:-5001} --debug
|
||||
export HOST=${DIFY_BIND_ADDRESS:-0.0.0.0}
|
||||
export PORT=${DIFY_PORT:-5001}
|
||||
exec python -m app
|
||||
else
|
||||
exec gunicorn \
|
||||
--bind "${DIFY_BIND_ADDRESS:-0.0.0.0}:${DIFY_PORT:-5001}" \
|
||||
--workers ${SERVER_WORKER_AMOUNT:-1} \
|
||||
--worker-class ${SERVER_WORKER_CLASS:-gevent} \
|
||||
--worker-class ${SERVER_WORKER_CLASS:-geventwebsocket.gunicorn.workers.GeventWebSocketWorker} \
|
||||
--worker-connections ${SERVER_WORKER_CONNECTIONS:-10} \
|
||||
--timeout ${GUNICORN_TIMEOUT:-200} \
|
||||
app:app
|
||||
app:socketio_app
|
||||
fi
|
||||
fi
|
||||
|
||||
@ -1,21 +0,0 @@
|
||||
from enum import StrEnum
|
||||
|
||||
|
||||
class HostedTrialProvider(StrEnum):
|
||||
"""
|
||||
Enum representing hosted model provider names for trial access.
|
||||
"""
|
||||
|
||||
OPENAI = "langgenius/openai/openai"
|
||||
ANTHROPIC = "langgenius/anthropic/anthropic"
|
||||
GEMINI = "langgenius/gemini/google"
|
||||
X = "langgenius/x/x"
|
||||
DEEPSEEK = "langgenius/deepseek/deepseek"
|
||||
TONGYI = "langgenius/tongyi/tongyi"
|
||||
|
||||
@property
|
||||
def config_key(self) -> str:
|
||||
"""Return the config key used in dify_config (e.g., HOSTED_{config_key}_PAID_ENABLED)."""
|
||||
if self == HostedTrialProvider.X:
|
||||
return "XAI"
|
||||
return self.name
|
||||
5
api/extensions/ext_socketio.py
Normal file
5
api/extensions/ext_socketio.py
Normal file
@ -0,0 +1,5 @@
|
||||
import socketio # type: ignore[reportMissingTypeStubs]
|
||||
|
||||
from configs import dify_config
|
||||
|
||||
sio = socketio.Server(async_mode="gevent", cors_allowed_origins=dify_config.CONSOLE_CORS_ALLOW_ORIGINS)
|
||||
17
api/fields/online_user_fields.py
Normal file
17
api/fields/online_user_fields.py
Normal file
@ -0,0 +1,17 @@
|
||||
from flask_restx import fields
|
||||
|
||||
online_user_partial_fields = {
|
||||
"user_id": fields.String,
|
||||
"username": fields.String,
|
||||
"avatar": fields.String,
|
||||
"sid": fields.String,
|
||||
}
|
||||
|
||||
workflow_online_users_fields = {
|
||||
"workflow_id": fields.String,
|
||||
"users": fields.List(fields.Nested(online_user_partial_fields)),
|
||||
}
|
||||
|
||||
online_user_list_fields = {
|
||||
"data": fields.List(fields.Nested(workflow_online_users_fields)),
|
||||
}
|
||||
96
api/fields/workflow_comment_fields.py
Normal file
96
api/fields/workflow_comment_fields.py
Normal file
@ -0,0 +1,96 @@
|
||||
from flask_restx import fields
|
||||
|
||||
from libs.helper import AvatarUrlField, TimestampField
|
||||
|
||||
# basic account fields for comments
|
||||
account_fields = {
|
||||
"id": fields.String,
|
||||
"name": fields.String,
|
||||
"email": fields.String,
|
||||
"avatar_url": AvatarUrlField,
|
||||
}
|
||||
|
||||
# Comment mention fields
|
||||
workflow_comment_mention_fields = {
|
||||
"mentioned_user_id": fields.String,
|
||||
"mentioned_user_account": fields.Nested(account_fields, allow_null=True),
|
||||
"reply_id": fields.String,
|
||||
}
|
||||
|
||||
# Comment reply fields
|
||||
workflow_comment_reply_fields = {
|
||||
"id": fields.String,
|
||||
"content": fields.String,
|
||||
"created_by": fields.String,
|
||||
"created_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
# Basic comment fields (for list views)
|
||||
workflow_comment_basic_fields = {
|
||||
"id": fields.String,
|
||||
"position_x": fields.Float,
|
||||
"position_y": fields.Float,
|
||||
"content": fields.String,
|
||||
"created_by": fields.String,
|
||||
"created_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
"resolved": fields.Boolean,
|
||||
"resolved_at": TimestampField,
|
||||
"resolved_by": fields.String,
|
||||
"resolved_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"reply_count": fields.Integer,
|
||||
"mention_count": fields.Integer,
|
||||
"participants": fields.List(fields.Nested(account_fields)),
|
||||
}
|
||||
|
||||
# Detailed comment fields (for single comment view)
|
||||
workflow_comment_detail_fields = {
|
||||
"id": fields.String,
|
||||
"position_x": fields.Float,
|
||||
"position_y": fields.Float,
|
||||
"content": fields.String,
|
||||
"created_by": fields.String,
|
||||
"created_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"created_at": TimestampField,
|
||||
"updated_at": TimestampField,
|
||||
"resolved": fields.Boolean,
|
||||
"resolved_at": TimestampField,
|
||||
"resolved_by": fields.String,
|
||||
"resolved_by_account": fields.Nested(account_fields, allow_null=True),
|
||||
"replies": fields.List(fields.Nested(workflow_comment_reply_fields)),
|
||||
"mentions": fields.List(fields.Nested(workflow_comment_mention_fields)),
|
||||
}
|
||||
|
||||
# Comment creation response fields (simplified)
|
||||
workflow_comment_create_fields = {
|
||||
"id": fields.String,
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
# Comment update response fields (simplified)
|
||||
workflow_comment_update_fields = {
|
||||
"id": fields.String,
|
||||
"updated_at": TimestampField,
|
||||
}
|
||||
|
||||
# Comment resolve response fields
|
||||
workflow_comment_resolve_fields = {
|
||||
"id": fields.String,
|
||||
"resolved": fields.Boolean,
|
||||
"resolved_at": TimestampField,
|
||||
"resolved_by": fields.String,
|
||||
}
|
||||
|
||||
# Reply creation response fields (simplified)
|
||||
workflow_comment_reply_create_fields = {
|
||||
"id": fields.String,
|
||||
"created_at": TimestampField,
|
||||
}
|
||||
|
||||
# Reply update response fields
|
||||
workflow_comment_reply_update_fields = {
|
||||
"id": fields.String,
|
||||
"updated_at": TimestampField,
|
||||
}
|
||||
@ -0,0 +1,90 @@
|
||||
"""Add workflow comments table
|
||||
|
||||
Revision ID: 227822d22895
|
||||
Revises: 9d77545f524e
|
||||
Create Date: 2025-08-22 17:26:15.255980
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import models as models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '227822d22895'
|
||||
down_revision = '9d77545f524e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('workflow_comments',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('app_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('position_x', sa.Float(), nullable=False),
|
||||
sa.Column('position_y', sa.Float(), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('resolved', sa.Boolean(), server_default=sa.text('false'), nullable=False),
|
||||
sa.Column('resolved_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('resolved_by', models.types.StringUUID(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_comments_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_comments', schema=None) as batch_op:
|
||||
batch_op.create_index('workflow_comments_app_idx', ['tenant_id', 'app_id'], unique=False)
|
||||
batch_op.create_index('workflow_comments_created_at_idx', ['created_at'], unique=False)
|
||||
|
||||
op.create_table('workflow_comment_replies',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('comment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('content', sa.Text(), nullable=False),
|
||||
sa.Column('created_by', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['comment_id'], ['workflow_comments.id'], name=op.f('workflow_comment_replies_comment_id_fkey'), ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_comment_replies_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_comment_replies', schema=None) as batch_op:
|
||||
batch_op.create_index('comment_replies_comment_idx', ['comment_id'], unique=False)
|
||||
batch_op.create_index('comment_replies_created_at_idx', ['created_at'], unique=False)
|
||||
|
||||
op.create_table('workflow_comment_mentions',
|
||||
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
|
||||
sa.Column('comment_id', models.types.StringUUID(), nullable=False),
|
||||
sa.Column('reply_id', models.types.StringUUID(), nullable=True),
|
||||
sa.Column('mentioned_user_id', models.types.StringUUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['comment_id'], ['workflow_comments.id'], name=op.f('workflow_comment_mentions_comment_id_fkey'), ondelete='CASCADE'),
|
||||
sa.ForeignKeyConstraint(['reply_id'], ['workflow_comment_replies.id'], name=op.f('workflow_comment_mentions_reply_id_fkey'), ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id', name='workflow_comment_mentions_pkey')
|
||||
)
|
||||
with op.batch_alter_table('workflow_comment_mentions', schema=None) as batch_op:
|
||||
batch_op.create_index('comment_mentions_comment_idx', ['comment_id'], unique=False)
|
||||
batch_op.create_index('comment_mentions_reply_idx', ['reply_id'], unique=False)
|
||||
batch_op.create_index('comment_mentions_user_idx', ['mentioned_user_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('workflow_comment_mentions', schema=None) as batch_op:
|
||||
batch_op.drop_index('comment_mentions_user_idx')
|
||||
batch_op.drop_index('comment_mentions_reply_idx')
|
||||
batch_op.drop_index('comment_mentions_comment_idx')
|
||||
|
||||
op.drop_table('workflow_comment_mentions')
|
||||
with op.batch_alter_table('workflow_comment_replies', schema=None) as batch_op:
|
||||
batch_op.drop_index('comment_replies_created_at_idx')
|
||||
batch_op.drop_index('comment_replies_comment_idx')
|
||||
|
||||
op.drop_table('workflow_comment_replies')
|
||||
with op.batch_alter_table('workflow_comments', schema=None) as batch_op:
|
||||
batch_op.drop_index('workflow_comments_created_at_idx')
|
||||
batch_op.drop_index('workflow_comments_app_idx')
|
||||
|
||||
op.drop_table('workflow_comments')
|
||||
# ### end Alembic commands ###
|
||||
@ -9,6 +9,11 @@ from .account import (
|
||||
TenantStatus,
|
||||
)
|
||||
from .api_based_extension import APIBasedExtension, APIBasedExtensionPoint
|
||||
from .comment import (
|
||||
WorkflowComment,
|
||||
WorkflowCommentMention,
|
||||
WorkflowCommentReply,
|
||||
)
|
||||
from .dataset import (
|
||||
AppDatasetJoin,
|
||||
Dataset,
|
||||
@ -205,6 +210,9 @@ __all__ = [
|
||||
"WorkflowAppLog",
|
||||
"WorkflowAppLogCreatedFrom",
|
||||
"WorkflowArchiveLog",
|
||||
"WorkflowComment",
|
||||
"WorkflowCommentMention",
|
||||
"WorkflowCommentReply",
|
||||
"WorkflowNodeExecutionModel",
|
||||
"WorkflowNodeExecutionOffload",
|
||||
"WorkflowNodeExecutionTriggeredFrom",
|
||||
|
||||
210
api/models/comment.py
Normal file
210
api/models/comment.py
Normal file
@ -0,0 +1,210 @@
|
||||
"""Workflow comment models."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import Index, func
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from .account import Account
|
||||
from .base import Base
|
||||
from .engine import db
|
||||
from .types import StringUUID
|
||||
|
||||
|
||||
class WorkflowComment(Base):
|
||||
"""Workflow comment model for canvas commenting functionality.
|
||||
|
||||
Comments are associated with apps rather than specific workflow versions,
|
||||
since an app has only one draft workflow at a time and comments should persist
|
||||
across workflow version changes.
|
||||
|
||||
Attributes:
|
||||
id: Comment ID
|
||||
tenant_id: Workspace ID
|
||||
app_id: App ID (primary association, comments belong to apps)
|
||||
position_x: X coordinate on canvas
|
||||
position_y: Y coordinate on canvas
|
||||
content: Comment content
|
||||
created_by: Creator account ID
|
||||
created_at: Creation time
|
||||
updated_at: Last update time
|
||||
resolved: Whether comment is resolved
|
||||
resolved_at: Resolution time
|
||||
resolved_by: Resolver account ID
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_comments"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_comments_pkey"),
|
||||
Index("workflow_comments_app_idx", "tenant_id", "app_id"),
|
||||
Index("workflow_comments_created_at_idx", "created_at"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
|
||||
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
position_x: Mapped[float] = mapped_column(db.Float)
|
||||
position_y: Mapped[float] = mapped_column(db.Float)
|
||||
content: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
)
|
||||
resolved: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
|
||||
resolved_at: Mapped[datetime | None] = mapped_column(db.DateTime)
|
||||
resolved_by: Mapped[str | None] = mapped_column(StringUUID)
|
||||
|
||||
# Relationships
|
||||
replies: Mapped[list["WorkflowCommentReply"]] = relationship(
|
||||
"WorkflowCommentReply", back_populates="comment", cascade="all, delete-orphan"
|
||||
)
|
||||
mentions: Mapped[list["WorkflowCommentMention"]] = relationship(
|
||||
"WorkflowCommentMention", back_populates="comment", cascade="all, delete-orphan"
|
||||
)
|
||||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
"""Get creator account."""
|
||||
if hasattr(self, "_created_by_account_cache"):
|
||||
return self._created_by_account_cache
|
||||
return db.session.get(Account, self.created_by)
|
||||
|
||||
def cache_created_by_account(self, account: Account | None) -> None:
|
||||
"""Cache creator account to avoid extra queries."""
|
||||
self._created_by_account_cache = account
|
||||
|
||||
@property
|
||||
def resolved_by_account(self):
|
||||
"""Get resolver account."""
|
||||
if hasattr(self, "_resolved_by_account_cache"):
|
||||
return self._resolved_by_account_cache
|
||||
if self.resolved_by:
|
||||
return db.session.get(Account, self.resolved_by)
|
||||
return None
|
||||
|
||||
def cache_resolved_by_account(self, account: Account | None) -> None:
|
||||
"""Cache resolver account to avoid extra queries."""
|
||||
self._resolved_by_account_cache = account
|
||||
|
||||
@property
|
||||
def reply_count(self):
|
||||
"""Get reply count."""
|
||||
return len(self.replies)
|
||||
|
||||
@property
|
||||
def mention_count(self):
|
||||
"""Get mention count."""
|
||||
return len(self.mentions)
|
||||
|
||||
@property
|
||||
def participants(self):
|
||||
"""Get all participants (creator + repliers + mentioned users)."""
|
||||
participant_ids = set()
|
||||
|
||||
# Add comment creator
|
||||
participant_ids.add(self.created_by)
|
||||
|
||||
# Add reply creators
|
||||
participant_ids.update(reply.created_by for reply in self.replies)
|
||||
|
||||
# Add mentioned users
|
||||
participant_ids.update(mention.mentioned_user_id for mention in self.mentions)
|
||||
|
||||
# Get account objects
|
||||
participants = []
|
||||
for user_id in participant_ids:
|
||||
account = db.session.get(Account, user_id)
|
||||
if account:
|
||||
participants.append(account)
|
||||
|
||||
return participants
|
||||
|
||||
|
||||
class WorkflowCommentReply(Base):
|
||||
"""Workflow comment reply model.
|
||||
|
||||
Attributes:
|
||||
id: Reply ID
|
||||
comment_id: Parent comment ID
|
||||
content: Reply content
|
||||
created_by: Creator account ID
|
||||
created_at: Creation time
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_comment_replies"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_comment_replies_pkey"),
|
||||
Index("comment_replies_comment_idx", "comment_id"),
|
||||
Index("comment_replies_created_at_idx", "created_at"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
|
||||
comment_id: Mapped[str] = mapped_column(
|
||||
StringUUID, db.ForeignKey("workflow_comments.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
content: Mapped[str] = mapped_column(db.Text, nullable=False)
|
||||
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
|
||||
)
|
||||
# Relationships
|
||||
comment: Mapped["WorkflowComment"] = relationship("WorkflowComment", back_populates="replies")
|
||||
|
||||
@property
|
||||
def created_by_account(self):
|
||||
"""Get creator account."""
|
||||
if hasattr(self, "_created_by_account_cache"):
|
||||
return self._created_by_account_cache
|
||||
return db.session.get(Account, self.created_by)
|
||||
|
||||
def cache_created_by_account(self, account: Account | None) -> None:
|
||||
"""Cache creator account to avoid extra queries."""
|
||||
self._created_by_account_cache = account
|
||||
|
||||
|
||||
class WorkflowCommentMention(Base):
|
||||
"""Workflow comment mention model.
|
||||
|
||||
Mentions are only for internal accounts since end users
|
||||
cannot access workflow canvas and commenting features.
|
||||
|
||||
Attributes:
|
||||
id: Mention ID
|
||||
comment_id: Parent comment ID
|
||||
mentioned_user_id: Mentioned account ID
|
||||
"""
|
||||
|
||||
__tablename__ = "workflow_comment_mentions"
|
||||
__table_args__ = (
|
||||
db.PrimaryKeyConstraint("id", name="workflow_comment_mentions_pkey"),
|
||||
Index("comment_mentions_comment_idx", "comment_id"),
|
||||
Index("comment_mentions_reply_idx", "reply_id"),
|
||||
Index("comment_mentions_user_idx", "mentioned_user_id"),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
|
||||
comment_id: Mapped[str] = mapped_column(
|
||||
StringUUID, db.ForeignKey("workflow_comments.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
reply_id: Mapped[str | None] = mapped_column(
|
||||
StringUUID, db.ForeignKey("workflow_comment_replies.id", ondelete="CASCADE"), nullable=True
|
||||
)
|
||||
mentioned_user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
|
||||
|
||||
# Relationships
|
||||
comment: Mapped["WorkflowComment"] = relationship("WorkflowComment", back_populates="mentions")
|
||||
reply: Mapped[Optional["WorkflowCommentReply"]] = relationship("WorkflowCommentReply")
|
||||
|
||||
@property
|
||||
def mentioned_user_account(self):
|
||||
"""Get mentioned account."""
|
||||
if hasattr(self, "_mentioned_user_account_cache"):
|
||||
return self._mentioned_user_account_cache
|
||||
return db.session.get(Account, self.mentioned_user_id)
|
||||
|
||||
def cache_mentioned_user_account(self, account: Account | None) -> None:
|
||||
"""Cache mentioned account to avoid extra queries."""
|
||||
self._mentioned_user_account_cache = account
|
||||
@ -400,7 +400,7 @@ class Workflow(Base): # bug
|
||||
|
||||
:return: hash
|
||||
"""
|
||||
entity = {"graph": self.graph_dict, "features": self.features_dict}
|
||||
entity = {"graph": self.graph_dict}
|
||||
|
||||
return helper.generate_text_hash(json.dumps(entity, sort_keys=True))
|
||||
|
||||
|
||||
@ -21,6 +21,7 @@ dependencies = [
|
||||
"flask-orjson~=2.0.0",
|
||||
"flask-sqlalchemy~=3.1.1",
|
||||
"gevent~=25.9.1",
|
||||
"gevent-websocket~=0.10.1",
|
||||
"gmpy2~=2.2.1",
|
||||
"google-api-core==2.18.0",
|
||||
"google-api-python-client==2.90.0",
|
||||
@ -72,6 +73,7 @@ dependencies = [
|
||||
"pypdfium2==5.2.0",
|
||||
"python-docx~=1.1.0",
|
||||
"python-dotenv==1.0.1",
|
||||
"python-socketio~=5.13.0",
|
||||
"pyyaml~=6.0.1",
|
||||
"readabilipy~=0.3.0",
|
||||
"redis[hiredis]~=6.1.0",
|
||||
|
||||
147
api/repositories/workflow_collaboration_repository.py
Normal file
147
api/repositories/workflow_collaboration_repository.py
Normal file
@ -0,0 +1,147 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import TypedDict
|
||||
|
||||
from extensions.ext_redis import redis_client
|
||||
|
||||
SESSION_STATE_TTL_SECONDS = 3600
|
||||
WORKFLOW_ONLINE_USERS_PREFIX = "workflow_online_users:"
|
||||
WORKFLOW_LEADER_PREFIX = "workflow_leader:"
|
||||
WS_SID_MAP_PREFIX = "ws_sid_map:"
|
||||
|
||||
|
||||
class WorkflowSessionInfo(TypedDict):
|
||||
user_id: str
|
||||
username: str
|
||||
avatar: str | None
|
||||
sid: str
|
||||
connected_at: int
|
||||
|
||||
|
||||
class SidMapping(TypedDict):
|
||||
workflow_id: str
|
||||
user_id: str
|
||||
|
||||
|
||||
class WorkflowCollaborationRepository:
|
||||
def __init__(self) -> None:
|
||||
self._redis = redis_client
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(redis_client={self._redis})"
|
||||
|
||||
@staticmethod
|
||||
def workflow_key(workflow_id: str) -> str:
|
||||
return f"{WORKFLOW_ONLINE_USERS_PREFIX}{workflow_id}"
|
||||
|
||||
@staticmethod
|
||||
def leader_key(workflow_id: str) -> str:
|
||||
return f"{WORKFLOW_LEADER_PREFIX}{workflow_id}"
|
||||
|
||||
@staticmethod
|
||||
def sid_key(sid: str) -> str:
|
||||
return f"{WS_SID_MAP_PREFIX}{sid}"
|
||||
|
||||
@staticmethod
|
||||
def _decode(value: str | bytes | None) -> str | None:
|
||||
if value is None:
|
||||
return None
|
||||
if isinstance(value, bytes):
|
||||
return value.decode("utf-8")
|
||||
return value
|
||||
|
||||
def refresh_session_state(self, workflow_id: str, sid: str) -> None:
|
||||
workflow_key = self.workflow_key(workflow_id)
|
||||
sid_key = self.sid_key(sid)
|
||||
if self._redis.exists(workflow_key):
|
||||
self._redis.expire(workflow_key, SESSION_STATE_TTL_SECONDS)
|
||||
if self._redis.exists(sid_key):
|
||||
self._redis.expire(sid_key, SESSION_STATE_TTL_SECONDS)
|
||||
|
||||
def set_session_info(self, workflow_id: str, session_info: WorkflowSessionInfo) -> None:
|
||||
workflow_key = self.workflow_key(workflow_id)
|
||||
self._redis.hset(workflow_key, session_info["sid"], json.dumps(session_info))
|
||||
self._redis.set(
|
||||
self.sid_key(session_info["sid"]),
|
||||
json.dumps({"workflow_id": workflow_id, "user_id": session_info["user_id"]}),
|
||||
ex=SESSION_STATE_TTL_SECONDS,
|
||||
)
|
||||
self.refresh_session_state(workflow_id, session_info["sid"])
|
||||
|
||||
def get_sid_mapping(self, sid: str) -> SidMapping | None:
|
||||
raw = self._redis.get(self.sid_key(sid))
|
||||
if not raw:
|
||||
return None
|
||||
value = self._decode(raw)
|
||||
if not value:
|
||||
return None
|
||||
try:
|
||||
return json.loads(value)
|
||||
except (TypeError, json.JSONDecodeError):
|
||||
return None
|
||||
|
||||
def delete_session(self, workflow_id: str, sid: str) -> None:
|
||||
self._redis.hdel(self.workflow_key(workflow_id), sid)
|
||||
self._redis.delete(self.sid_key(sid))
|
||||
|
||||
def session_exists(self, workflow_id: str, sid: str) -> bool:
|
||||
return bool(self._redis.hexists(self.workflow_key(workflow_id), sid))
|
||||
|
||||
def sid_mapping_exists(self, sid: str) -> bool:
|
||||
return bool(self._redis.exists(self.sid_key(sid)))
|
||||
|
||||
def get_session_sids(self, workflow_id: str) -> list[str]:
|
||||
raw_sids = self._redis.hkeys(self.workflow_key(workflow_id))
|
||||
decoded_sids: list[str] = []
|
||||
for sid in raw_sids:
|
||||
decoded = self._decode(sid)
|
||||
if decoded:
|
||||
decoded_sids.append(decoded)
|
||||
return decoded_sids
|
||||
|
||||
def list_sessions(self, workflow_id: str) -> list[WorkflowSessionInfo]:
|
||||
sessions_json = self._redis.hgetall(self.workflow_key(workflow_id))
|
||||
users: list[WorkflowSessionInfo] = []
|
||||
|
||||
for session_info_json in sessions_json.values():
|
||||
value = self._decode(session_info_json)
|
||||
if not value:
|
||||
continue
|
||||
try:
|
||||
session_info = json.loads(value)
|
||||
except (TypeError, json.JSONDecodeError):
|
||||
continue
|
||||
|
||||
if not isinstance(session_info, dict):
|
||||
continue
|
||||
if "user_id" not in session_info or "username" not in session_info or "sid" not in session_info:
|
||||
continue
|
||||
|
||||
users.append(
|
||||
{
|
||||
"user_id": str(session_info["user_id"]),
|
||||
"username": str(session_info["username"]),
|
||||
"avatar": session_info.get("avatar"),
|
||||
"sid": str(session_info["sid"]),
|
||||
"connected_at": int(session_info.get("connected_at") or 0),
|
||||
}
|
||||
)
|
||||
|
||||
return users
|
||||
|
||||
def get_current_leader(self, workflow_id: str) -> str | None:
|
||||
raw = self._redis.get(self.leader_key(workflow_id))
|
||||
return self._decode(raw)
|
||||
|
||||
def set_leader_if_absent(self, workflow_id: str, sid: str) -> bool:
|
||||
return bool(self._redis.set(self.leader_key(workflow_id), sid, nx=True, ex=SESSION_STATE_TTL_SECONDS))
|
||||
|
||||
def set_leader(self, workflow_id: str, sid: str) -> None:
|
||||
self._redis.set(self.leader_key(workflow_id), sid, ex=SESSION_STATE_TTL_SECONDS)
|
||||
|
||||
def delete_leader(self, workflow_id: str) -> None:
|
||||
self._redis.delete(self.leader_key(workflow_id))
|
||||
|
||||
def expire_leader(self, workflow_id: str) -> None:
|
||||
self._redis.expire(self.leader_key(workflow_id), SESSION_STATE_TTL_SECONDS)
|
||||
@ -1,8 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from collections.abc import Generator, Mapping
|
||||
from typing import TYPE_CHECKING, Any, Union
|
||||
from typing import Any, Union
|
||||
|
||||
from configs import dify_config
|
||||
from core.app.apps.advanced_chat.app_generator import AdvancedChatAppGenerator
|
||||
@ -20,9 +18,6 @@ from services.errors.app import QuotaExceededError, WorkflowIdFormatError, Workf
|
||||
from services.errors.llm import InvokeRateLimitError
|
||||
from services.workflow_service import WorkflowService
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from controllers.console.app.workflow import LoopNodeRunPayload
|
||||
|
||||
|
||||
class AppGenerateService:
|
||||
@classmethod
|
||||
@ -170,9 +165,7 @@ class AppGenerateService:
|
||||
raise ValueError(f"Invalid app mode {app_model.mode}")
|
||||
|
||||
@classmethod
|
||||
def generate_single_loop(
|
||||
cls, app_model: App, user: Account, node_id: str, args: LoopNodeRunPayload, streaming: bool = True
|
||||
):
|
||||
def generate_single_loop(cls, app_model: App, user: Account, node_id: str, args: Any, streaming: bool = True):
|
||||
if app_model.mode == AppMode.ADVANCED_CHAT:
|
||||
workflow = cls._get_workflow(app_model, InvokeFrom.DEBUGGER)
|
||||
return AdvancedChatAppGenerator.convert_to_event_stream(
|
||||
|
||||
@ -143,14 +143,6 @@ class BillingService:
|
||||
raise ValueError("Invalid arguments.")
|
||||
if method == "POST" and response.status_code != httpx.codes.OK:
|
||||
raise ValueError(f"Unable to send request to {url}. Please try again later or contact support.")
|
||||
if method == "DELETE" and response.status_code != httpx.codes.OK:
|
||||
logger.error(
|
||||
"billing_service: %s _send_request: response: %s %s", method, response.status_code, response.text
|
||||
)
|
||||
raise ValueError(f"Unable to process delete request {url}. Please try again later or contact support.")
|
||||
logger.info(
|
||||
"billing_service: %s _send_request: response: %s %s", method, response.status_code, response.text
|
||||
)
|
||||
return response.json()
|
||||
|
||||
@staticmethod
|
||||
@ -173,7 +165,7 @@ class BillingService:
|
||||
def delete_account(cls, account_id: str):
|
||||
"""Delete account."""
|
||||
params = {"account_id": account_id}
|
||||
return cls._send_request("DELETE", "/account", params=params)
|
||||
return cls._send_request("DELETE", "/account/", params=params)
|
||||
|
||||
@classmethod
|
||||
def is_email_in_freeze(cls, email: str) -> bool:
|
||||
|
||||
@ -4,7 +4,6 @@ from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
from configs import dify_config
|
||||
from enums.cloud_plan import CloudPlan
|
||||
from enums.hosted_provider import HostedTrialProvider
|
||||
from services.billing_service import BillingService
|
||||
from services.enterprise.enterprise_service import EnterpriseService
|
||||
|
||||
@ -162,6 +161,7 @@ class SystemFeatureModel(BaseModel):
|
||||
enable_email_code_login: bool = False
|
||||
enable_email_password_login: bool = True
|
||||
enable_social_oauth_login: bool = False
|
||||
enable_collaboration_mode: bool = False
|
||||
is_allow_register: bool = False
|
||||
is_allow_create_workspace: bool = False
|
||||
is_email_setup: bool = False
|
||||
@ -171,7 +171,6 @@ class SystemFeatureModel(BaseModel):
|
||||
plugin_installation_permission: PluginInstallationPermissionModel = PluginInstallationPermissionModel()
|
||||
enable_change_email: bool = True
|
||||
plugin_manager: PluginManagerModel = PluginManagerModel()
|
||||
trial_models: list[str] = []
|
||||
enable_trial_app: bool = False
|
||||
enable_explore_banner: bool = False
|
||||
|
||||
@ -226,24 +225,13 @@ class FeatureService:
|
||||
system_features.enable_email_code_login = dify_config.ENABLE_EMAIL_CODE_LOGIN
|
||||
system_features.enable_email_password_login = dify_config.ENABLE_EMAIL_PASSWORD_LOGIN
|
||||
system_features.enable_social_oauth_login = dify_config.ENABLE_SOCIAL_OAUTH_LOGIN
|
||||
system_features.enable_collaboration_mode = dify_config.ENABLE_COLLABORATION_MODE
|
||||
system_features.is_allow_register = dify_config.ALLOW_REGISTER
|
||||
system_features.is_allow_create_workspace = dify_config.ALLOW_CREATE_WORKSPACE
|
||||
system_features.is_email_setup = dify_config.MAIL_TYPE is not None and dify_config.MAIL_TYPE != ""
|
||||
system_features.trial_models = cls._fulfill_trial_models_from_env()
|
||||
system_features.enable_trial_app = dify_config.ENABLE_TRIAL_APP
|
||||
system_features.enable_explore_banner = dify_config.ENABLE_EXPLORE_BANNER
|
||||
|
||||
@classmethod
|
||||
def _fulfill_trial_models_from_env(cls) -> list[str]:
|
||||
return [
|
||||
provider.value
|
||||
for provider in HostedTrialProvider
|
||||
if (
|
||||
getattr(dify_config, f"HOSTED_{provider.config_key}_PAID_ENABLED", False)
|
||||
and getattr(dify_config, f"HOSTED_{provider.config_key}_TRIAL_ENABLED", False)
|
||||
)
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def _fulfill_params_from_env(cls, features: FeatureModel):
|
||||
features.can_replace_logo = dify_config.CAN_REPLACE_LOGO
|
||||
|
||||
196
api/services/workflow_collaboration_service.py
Normal file
196
api/services/workflow_collaboration_service.py
Normal file
@ -0,0 +1,196 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Mapping
|
||||
|
||||
from models.account import Account
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository, WorkflowSessionInfo
|
||||
|
||||
|
||||
class WorkflowCollaborationService:
|
||||
def __init__(self, repository: WorkflowCollaborationRepository, socketio) -> None:
|
||||
self._repository = repository
|
||||
self._socketio = socketio
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}(repository={self._repository})"
|
||||
|
||||
def save_session(self, sid: str, user: Account) -> None:
|
||||
self._socketio.save_session(
|
||||
sid,
|
||||
{
|
||||
"user_id": user.id,
|
||||
"username": user.name,
|
||||
"avatar": user.avatar,
|
||||
},
|
||||
)
|
||||
|
||||
def register_session(self, workflow_id: str, sid: str) -> tuple[str, bool] | None:
|
||||
session = self._socketio.get_session(sid)
|
||||
user_id = session.get("user_id")
|
||||
if not user_id:
|
||||
return None
|
||||
|
||||
session_info: WorkflowSessionInfo = {
|
||||
"user_id": str(user_id),
|
||||
"username": str(session.get("username", "Unknown")),
|
||||
"avatar": session.get("avatar"),
|
||||
"sid": sid,
|
||||
"connected_at": int(time.time()),
|
||||
}
|
||||
|
||||
self._repository.set_session_info(workflow_id, session_info)
|
||||
|
||||
leader_sid = self.get_or_set_leader(workflow_id, sid)
|
||||
is_leader = leader_sid == sid
|
||||
|
||||
self._socketio.enter_room(sid, workflow_id)
|
||||
self.broadcast_online_users(workflow_id)
|
||||
|
||||
self._socketio.emit("status", {"isLeader": is_leader}, room=sid)
|
||||
|
||||
return str(user_id), is_leader
|
||||
|
||||
def disconnect_session(self, sid: str) -> None:
|
||||
mapping = self._repository.get_sid_mapping(sid)
|
||||
if not mapping:
|
||||
return
|
||||
|
||||
workflow_id = mapping["workflow_id"]
|
||||
self._repository.delete_session(workflow_id, sid)
|
||||
|
||||
self.handle_leader_disconnect(workflow_id, sid)
|
||||
self.broadcast_online_users(workflow_id)
|
||||
|
||||
def relay_collaboration_event(self, sid: str, data: Mapping[str, object]) -> tuple[dict[str, str], int]:
|
||||
mapping = self._repository.get_sid_mapping(sid)
|
||||
if not mapping:
|
||||
return {"msg": "unauthorized"}, 401
|
||||
|
||||
workflow_id = mapping["workflow_id"]
|
||||
user_id = mapping["user_id"]
|
||||
self.refresh_session_state(workflow_id, sid)
|
||||
|
||||
event_type = data.get("type")
|
||||
event_data = data.get("data")
|
||||
timestamp = data.get("timestamp", int(time.time()))
|
||||
|
||||
if not event_type:
|
||||
return {"msg": "invalid event type"}, 400
|
||||
|
||||
self._socketio.emit(
|
||||
"collaboration_update",
|
||||
{"type": event_type, "userId": user_id, "data": event_data, "timestamp": timestamp},
|
||||
room=workflow_id,
|
||||
skip_sid=sid,
|
||||
)
|
||||
|
||||
return {"msg": "event_broadcasted"}, 200
|
||||
|
||||
def relay_graph_event(self, sid: str, data: object) -> tuple[dict[str, str], int]:
|
||||
mapping = self._repository.get_sid_mapping(sid)
|
||||
if not mapping:
|
||||
return {"msg": "unauthorized"}, 401
|
||||
|
||||
workflow_id = mapping["workflow_id"]
|
||||
self.refresh_session_state(workflow_id, sid)
|
||||
|
||||
self._socketio.emit("graph_update", data, room=workflow_id, skip_sid=sid)
|
||||
|
||||
return {"msg": "graph_update_broadcasted"}, 200
|
||||
|
||||
def get_or_set_leader(self, workflow_id: str, sid: str) -> str:
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
|
||||
if current_leader:
|
||||
if self.is_session_active(workflow_id, current_leader):
|
||||
return current_leader
|
||||
self._repository.delete_session(workflow_id, current_leader)
|
||||
self._repository.delete_leader(workflow_id)
|
||||
|
||||
was_set = self._repository.set_leader_if_absent(workflow_id, sid)
|
||||
|
||||
if was_set:
|
||||
if current_leader:
|
||||
self.broadcast_leader_change(workflow_id, sid)
|
||||
return sid
|
||||
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
if current_leader:
|
||||
return current_leader
|
||||
|
||||
return sid
|
||||
|
||||
def handle_leader_disconnect(self, workflow_id: str, disconnected_sid: str) -> None:
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
if not current_leader:
|
||||
return
|
||||
|
||||
if current_leader != disconnected_sid:
|
||||
return
|
||||
|
||||
session_sids = self._repository.get_session_sids(workflow_id)
|
||||
if session_sids:
|
||||
new_leader_sid = session_sids[0]
|
||||
self._repository.set_leader(workflow_id, new_leader_sid)
|
||||
self.broadcast_leader_change(workflow_id, new_leader_sid)
|
||||
else:
|
||||
self._repository.delete_leader(workflow_id)
|
||||
|
||||
def broadcast_leader_change(self, workflow_id: str, new_leader_sid: str) -> None:
|
||||
for sid in self._repository.get_session_sids(workflow_id):
|
||||
try:
|
||||
is_leader = sid == new_leader_sid
|
||||
self._socketio.emit("status", {"isLeader": is_leader}, room=sid)
|
||||
except Exception:
|
||||
logging.exception("Failed to emit leader status to session %s", sid)
|
||||
|
||||
def get_current_leader(self, workflow_id: str) -> str | None:
|
||||
return self._repository.get_current_leader(workflow_id)
|
||||
|
||||
def broadcast_online_users(self, workflow_id: str) -> None:
|
||||
users = self._repository.list_sessions(workflow_id)
|
||||
users.sort(key=lambda x: x.get("connected_at") or 0)
|
||||
|
||||
leader_sid = self.get_current_leader(workflow_id)
|
||||
|
||||
self._socketio.emit(
|
||||
"online_users",
|
||||
{"workflow_id": workflow_id, "users": users, "leader": leader_sid},
|
||||
room=workflow_id,
|
||||
)
|
||||
|
||||
def refresh_session_state(self, workflow_id: str, sid: str) -> None:
|
||||
self._repository.refresh_session_state(workflow_id, sid)
|
||||
self._ensure_leader(workflow_id, sid)
|
||||
|
||||
def _ensure_leader(self, workflow_id: str, sid: str) -> None:
|
||||
current_leader = self._repository.get_current_leader(workflow_id)
|
||||
if current_leader and self.is_session_active(workflow_id, current_leader):
|
||||
self._repository.expire_leader(workflow_id)
|
||||
return
|
||||
|
||||
if current_leader:
|
||||
self._repository.delete_leader(workflow_id)
|
||||
|
||||
self._repository.set_leader(workflow_id, sid)
|
||||
self.broadcast_leader_change(workflow_id, sid)
|
||||
|
||||
def is_session_active(self, workflow_id: str, sid: str) -> bool:
|
||||
if not sid:
|
||||
return False
|
||||
|
||||
try:
|
||||
if not self._socketio.manager.is_connected(sid, "/"):
|
||||
return False
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
if not self._repository.session_exists(workflow_id, sid):
|
||||
return False
|
||||
|
||||
if not self._repository.sid_mapping_exists(sid):
|
||||
return False
|
||||
|
||||
return True
|
||||
345
api/services/workflow_comment_service.py
Normal file
345
api/services/workflow_comment_service.py
Normal file
@ -0,0 +1,345 @@
|
||||
import logging
|
||||
from collections.abc import Sequence
|
||||
|
||||
from sqlalchemy import desc, select
|
||||
from sqlalchemy.orm import Session, selectinload
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
from extensions.ext_database import db
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from libs.helper import uuid_value
|
||||
from models import WorkflowComment, WorkflowCommentMention, WorkflowCommentReply
|
||||
from models.account import Account
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WorkflowCommentService:
|
||||
"""Service for managing workflow comments."""
|
||||
|
||||
@staticmethod
|
||||
def _validate_content(content: str) -> None:
|
||||
if len(content.strip()) == 0:
|
||||
raise ValueError("Comment content cannot be empty")
|
||||
|
||||
if len(content) > 1000:
|
||||
raise ValueError("Comment content cannot exceed 1000 characters")
|
||||
|
||||
@staticmethod
|
||||
def get_comments(tenant_id: str, app_id: str) -> Sequence[WorkflowComment]:
|
||||
"""Get all comments for a workflow."""
|
||||
with Session(db.engine) as session:
|
||||
# Get all comments with eager loading
|
||||
stmt = (
|
||||
select(WorkflowComment)
|
||||
.options(selectinload(WorkflowComment.replies), selectinload(WorkflowComment.mentions))
|
||||
.where(WorkflowComment.tenant_id == tenant_id, WorkflowComment.app_id == app_id)
|
||||
.order_by(desc(WorkflowComment.created_at))
|
||||
)
|
||||
|
||||
comments = session.scalars(stmt).all()
|
||||
|
||||
# Batch preload all Account objects to avoid N+1 queries
|
||||
WorkflowCommentService._preload_accounts(session, comments)
|
||||
|
||||
return comments
|
||||
|
||||
@staticmethod
|
||||
def _preload_accounts(session: Session, comments: Sequence[WorkflowComment]) -> None:
|
||||
"""Batch preload Account objects for comments, replies, and mentions."""
|
||||
# Collect all user IDs
|
||||
user_ids: set[str] = set()
|
||||
for comment in comments:
|
||||
user_ids.add(comment.created_by)
|
||||
if comment.resolved_by:
|
||||
user_ids.add(comment.resolved_by)
|
||||
user_ids.update(reply.created_by for reply in comment.replies)
|
||||
user_ids.update(mention.mentioned_user_id for mention in comment.mentions)
|
||||
|
||||
if not user_ids:
|
||||
return
|
||||
|
||||
# Batch query all accounts
|
||||
accounts = session.scalars(select(Account).where(Account.id.in_(user_ids))).all()
|
||||
account_map = {str(account.id): account for account in accounts}
|
||||
|
||||
# Cache accounts on objects
|
||||
for comment in comments:
|
||||
comment.cache_created_by_account(account_map.get(comment.created_by))
|
||||
comment.cache_resolved_by_account(account_map.get(comment.resolved_by) if comment.resolved_by else None)
|
||||
for reply in comment.replies:
|
||||
reply.cache_created_by_account(account_map.get(reply.created_by))
|
||||
for mention in comment.mentions:
|
||||
mention.cache_mentioned_user_account(account_map.get(mention.mentioned_user_id))
|
||||
|
||||
@staticmethod
|
||||
def get_comment(tenant_id: str, app_id: str, comment_id: str, session: Session | None = None) -> WorkflowComment:
|
||||
"""Get a specific comment."""
|
||||
|
||||
def _get_comment(session: Session) -> WorkflowComment:
|
||||
stmt = (
|
||||
select(WorkflowComment)
|
||||
.options(selectinload(WorkflowComment.replies), selectinload(WorkflowComment.mentions))
|
||||
.where(
|
||||
WorkflowComment.id == comment_id,
|
||||
WorkflowComment.tenant_id == tenant_id,
|
||||
WorkflowComment.app_id == app_id,
|
||||
)
|
||||
)
|
||||
comment = session.scalar(stmt)
|
||||
|
||||
if not comment:
|
||||
raise NotFound("Comment not found")
|
||||
|
||||
# Preload accounts to avoid N+1 queries
|
||||
WorkflowCommentService._preload_accounts(session, [comment])
|
||||
|
||||
return comment
|
||||
|
||||
if session is not None:
|
||||
return _get_comment(session)
|
||||
else:
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
return _get_comment(session)
|
||||
|
||||
@staticmethod
|
||||
def create_comment(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
created_by: str,
|
||||
content: str,
|
||||
position_x: float,
|
||||
position_y: float,
|
||||
mentioned_user_ids: list[str] | None = None,
|
||||
) -> dict:
|
||||
"""Create a new workflow comment."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine) as session:
|
||||
comment = WorkflowComment(
|
||||
tenant_id=tenant_id,
|
||||
app_id=app_id,
|
||||
position_x=position_x,
|
||||
position_y=position_y,
|
||||
content=content,
|
||||
created_by=created_by,
|
||||
)
|
||||
|
||||
session.add(comment)
|
||||
session.flush() # Get the comment ID for mentions
|
||||
|
||||
# Create mentions if specified
|
||||
mentioned_user_ids = mentioned_user_ids or []
|
||||
for user_id in mentioned_user_ids:
|
||||
if isinstance(user_id, str) and uuid_value(user_id):
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=comment.id,
|
||||
reply_id=None, # This is a comment mention, not reply mention
|
||||
mentioned_user_id=user_id,
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
session.commit()
|
||||
|
||||
# Return only what we need - id and created_at
|
||||
return {"id": comment.id, "created_at": comment.created_at}
|
||||
|
||||
@staticmethod
|
||||
def update_comment(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
comment_id: str,
|
||||
user_id: str,
|
||||
content: str,
|
||||
position_x: float | None = None,
|
||||
position_y: float | None = None,
|
||||
mentioned_user_ids: list[str] | None = None,
|
||||
) -> dict:
|
||||
"""Update a workflow comment."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Get comment with validation
|
||||
stmt = select(WorkflowComment).where(
|
||||
WorkflowComment.id == comment_id,
|
||||
WorkflowComment.tenant_id == tenant_id,
|
||||
WorkflowComment.app_id == app_id,
|
||||
)
|
||||
comment = session.scalar(stmt)
|
||||
|
||||
if not comment:
|
||||
raise NotFound("Comment not found")
|
||||
|
||||
# Only the creator can update the comment
|
||||
if comment.created_by != user_id:
|
||||
raise Forbidden("Only the comment creator can update it")
|
||||
|
||||
# Update comment fields
|
||||
comment.content = content
|
||||
if position_x is not None:
|
||||
comment.position_x = position_x
|
||||
if position_y is not None:
|
||||
comment.position_y = position_y
|
||||
|
||||
# Update mentions - first remove existing mentions for this comment only (not replies)
|
||||
existing_mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(
|
||||
WorkflowCommentMention.comment_id == comment.id,
|
||||
WorkflowCommentMention.reply_id.is_(None), # Only comment mentions, not reply mentions
|
||||
)
|
||||
).all()
|
||||
for mention in existing_mentions:
|
||||
session.delete(mention)
|
||||
|
||||
# Add new mentions
|
||||
mentioned_user_ids = mentioned_user_ids or []
|
||||
for user_id_str in mentioned_user_ids:
|
||||
if isinstance(user_id_str, str) and uuid_value(user_id_str):
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=comment.id,
|
||||
reply_id=None, # This is a comment mention
|
||||
mentioned_user_id=user_id_str,
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
session.commit()
|
||||
|
||||
return {"id": comment.id, "updated_at": comment.updated_at}
|
||||
|
||||
@staticmethod
|
||||
def delete_comment(tenant_id: str, app_id: str, comment_id: str, user_id: str) -> None:
|
||||
"""Delete a workflow comment."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
comment = WorkflowCommentService.get_comment(tenant_id, app_id, comment_id, session)
|
||||
|
||||
# Only the creator can delete the comment
|
||||
if comment.created_by != user_id:
|
||||
raise Forbidden("Only the comment creator can delete it")
|
||||
|
||||
# Delete associated mentions (both comment and reply mentions)
|
||||
mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(WorkflowCommentMention.comment_id == comment_id)
|
||||
).all()
|
||||
for mention in mentions:
|
||||
session.delete(mention)
|
||||
|
||||
# Delete associated replies
|
||||
replies = session.scalars(
|
||||
select(WorkflowCommentReply).where(WorkflowCommentReply.comment_id == comment_id)
|
||||
).all()
|
||||
for reply in replies:
|
||||
session.delete(reply)
|
||||
|
||||
session.delete(comment)
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def resolve_comment(tenant_id: str, app_id: str, comment_id: str, user_id: str) -> WorkflowComment:
|
||||
"""Resolve a workflow comment."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
comment = WorkflowCommentService.get_comment(tenant_id, app_id, comment_id, session)
|
||||
if comment.resolved:
|
||||
return comment
|
||||
|
||||
comment.resolved = True
|
||||
comment.resolved_at = naive_utc_now()
|
||||
comment.resolved_by = user_id
|
||||
session.commit()
|
||||
|
||||
return comment
|
||||
|
||||
@staticmethod
|
||||
def create_reply(
|
||||
comment_id: str, content: str, created_by: str, mentioned_user_ids: list[str] | None = None
|
||||
) -> dict:
|
||||
"""Add a reply to a workflow comment."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
# Check if comment exists
|
||||
comment = session.get(WorkflowComment, comment_id)
|
||||
if not comment:
|
||||
raise NotFound("Comment not found")
|
||||
|
||||
reply = WorkflowCommentReply(comment_id=comment_id, content=content, created_by=created_by)
|
||||
|
||||
session.add(reply)
|
||||
session.flush() # Get the reply ID for mentions
|
||||
|
||||
# Create mentions if specified
|
||||
mentioned_user_ids = mentioned_user_ids or []
|
||||
for user_id in mentioned_user_ids:
|
||||
if isinstance(user_id, str) and uuid_value(user_id):
|
||||
# Create mention linking to specific reply
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=comment_id, reply_id=reply.id, mentioned_user_id=user_id
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
session.commit()
|
||||
|
||||
return {"id": reply.id, "created_at": reply.created_at}
|
||||
|
||||
@staticmethod
|
||||
def update_reply(reply_id: str, user_id: str, content: str, mentioned_user_ids: list[str] | None = None) -> dict:
|
||||
"""Update a comment reply."""
|
||||
WorkflowCommentService._validate_content(content)
|
||||
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
reply = session.get(WorkflowCommentReply, reply_id)
|
||||
if not reply:
|
||||
raise NotFound("Reply not found")
|
||||
|
||||
# Only the creator can update the reply
|
||||
if reply.created_by != user_id:
|
||||
raise Forbidden("Only the reply creator can update it")
|
||||
|
||||
reply.content = content
|
||||
|
||||
# Update mentions - first remove existing mentions for this reply
|
||||
existing_mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(WorkflowCommentMention.reply_id == reply.id)
|
||||
).all()
|
||||
for mention in existing_mentions:
|
||||
session.delete(mention)
|
||||
|
||||
# Add mentions
|
||||
mentioned_user_ids = mentioned_user_ids or []
|
||||
for user_id_str in mentioned_user_ids:
|
||||
if isinstance(user_id_str, str) and uuid_value(user_id_str):
|
||||
mention = WorkflowCommentMention(
|
||||
comment_id=reply.comment_id, reply_id=reply.id, mentioned_user_id=user_id_str
|
||||
)
|
||||
session.add(mention)
|
||||
|
||||
session.commit()
|
||||
session.refresh(reply) # Refresh to get updated timestamp
|
||||
|
||||
return {"id": reply.id, "updated_at": reply.updated_at}
|
||||
|
||||
@staticmethod
|
||||
def delete_reply(reply_id: str, user_id: str) -> None:
|
||||
"""Delete a comment reply."""
|
||||
with Session(db.engine, expire_on_commit=False) as session:
|
||||
reply = session.get(WorkflowCommentReply, reply_id)
|
||||
if not reply:
|
||||
raise NotFound("Reply not found")
|
||||
|
||||
# Only the creator can delete the reply
|
||||
if reply.created_by != user_id:
|
||||
raise Forbidden("Only the reply creator can delete it")
|
||||
|
||||
# Delete associated mentions first
|
||||
mentions = session.scalars(
|
||||
select(WorkflowCommentMention).where(WorkflowCommentMention.reply_id == reply_id)
|
||||
).all()
|
||||
for mention in mentions:
|
||||
session.delete(mention)
|
||||
|
||||
session.delete(reply)
|
||||
session.commit()
|
||||
|
||||
@staticmethod
|
||||
def validate_comment_access(comment_id: str, tenant_id: str, app_id: str) -> WorkflowComment:
|
||||
"""Validate that a comment belongs to the specified tenant and app."""
|
||||
return WorkflowCommentService.get_comment(tenant_id, app_id, comment_id)
|
||||
@ -249,6 +249,78 @@ class WorkflowService:
|
||||
# return draft workflow
|
||||
return workflow
|
||||
|
||||
def update_draft_workflow_environment_variables(
|
||||
self,
|
||||
*,
|
||||
app_model: App,
|
||||
environment_variables: Sequence[VariableBase],
|
||||
account: Account,
|
||||
):
|
||||
"""
|
||||
Update draft workflow environment variables
|
||||
"""
|
||||
# fetch draft workflow by app_model
|
||||
workflow = self.get_draft_workflow(app_model=app_model)
|
||||
|
||||
if not workflow:
|
||||
raise ValueError("No draft workflow found.")
|
||||
|
||||
workflow.environment_variables = environment_variables
|
||||
workflow.updated_by = account.id
|
||||
workflow.updated_at = naive_utc_now()
|
||||
|
||||
# commit db session changes
|
||||
db.session.commit()
|
||||
|
||||
def update_draft_workflow_conversation_variables(
|
||||
self,
|
||||
*,
|
||||
app_model: App,
|
||||
conversation_variables: Sequence[VariableBase],
|
||||
account: Account,
|
||||
):
|
||||
"""
|
||||
Update draft workflow conversation variables
|
||||
"""
|
||||
# fetch draft workflow by app_model
|
||||
workflow = self.get_draft_workflow(app_model=app_model)
|
||||
|
||||
if not workflow:
|
||||
raise ValueError("No draft workflow found.")
|
||||
|
||||
workflow.conversation_variables = conversation_variables
|
||||
workflow.updated_by = account.id
|
||||
workflow.updated_at = naive_utc_now()
|
||||
|
||||
# commit db session changes
|
||||
db.session.commit()
|
||||
|
||||
def update_draft_workflow_features(
|
||||
self,
|
||||
*,
|
||||
app_model: App,
|
||||
features: dict,
|
||||
account: Account,
|
||||
):
|
||||
"""
|
||||
Update draft workflow features
|
||||
"""
|
||||
# fetch draft workflow by app_model
|
||||
workflow = self.get_draft_workflow(app_model=app_model)
|
||||
|
||||
if not workflow:
|
||||
raise ValueError("No draft workflow found.")
|
||||
|
||||
# validate features structure
|
||||
self.validate_features_structure(app_model=app_model, features=features)
|
||||
|
||||
workflow.features = json.dumps(features)
|
||||
workflow.updated_by = account.id
|
||||
workflow.updated_at = naive_utc_now()
|
||||
|
||||
# commit db session changes
|
||||
db.session.commit()
|
||||
|
||||
def publish_workflow(
|
||||
self,
|
||||
*,
|
||||
|
||||
@ -38,7 +38,7 @@ os.environ["OPENDAL_FS_ROOT"] = "/tmp/dify-storage"
|
||||
os.environ.setdefault("STORAGE_TYPE", "opendal")
|
||||
os.environ.setdefault("OPENDAL_SCHEME", "fs")
|
||||
|
||||
_CACHED_APP = create_app()
|
||||
_SIO_APP, _CACHED_APP = create_app()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@ -364,7 +364,7 @@ def _create_app_with_containers() -> Flask:
|
||||
|
||||
# Create and configure the Flask application
|
||||
logger.info("Initializing Flask application...")
|
||||
app = create_app()
|
||||
sio_app, app = create_app()
|
||||
logger.info("Flask application created successfully")
|
||||
|
||||
# Initialize database schema
|
||||
|
||||
@ -274,6 +274,7 @@ class TestFeatureService:
|
||||
mock_config.ENABLE_EMAIL_CODE_LOGIN = True
|
||||
mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True
|
||||
mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False
|
||||
mock_config.ENABLE_COLLABORATION_MODE = True
|
||||
mock_config.ALLOW_REGISTER = False
|
||||
mock_config.ALLOW_CREATE_WORKSPACE = False
|
||||
mock_config.MAIL_TYPE = "smtp"
|
||||
@ -298,6 +299,7 @@ class TestFeatureService:
|
||||
# Verify authentication settings
|
||||
assert result.enable_email_code_login is True
|
||||
assert result.enable_email_password_login is False
|
||||
assert result.enable_collaboration_mode is True
|
||||
assert result.is_allow_register is False
|
||||
assert result.is_allow_create_workspace is False
|
||||
|
||||
@ -402,6 +404,7 @@ class TestFeatureService:
|
||||
mock_config.ENABLE_EMAIL_CODE_LOGIN = True
|
||||
mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True
|
||||
mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False
|
||||
mock_config.ENABLE_COLLABORATION_MODE = False
|
||||
mock_config.ALLOW_REGISTER = True
|
||||
mock_config.ALLOW_CREATE_WORKSPACE = True
|
||||
mock_config.MAIL_TYPE = "smtp"
|
||||
@ -423,6 +426,7 @@ class TestFeatureService:
|
||||
assert result.enable_email_code_login is True
|
||||
assert result.enable_email_password_login is True
|
||||
assert result.enable_social_oauth_login is False
|
||||
assert result.enable_collaboration_mode is False
|
||||
assert result.is_allow_register is True
|
||||
assert result.is_allow_create_workspace is True
|
||||
assert result.is_email_setup is True
|
||||
|
||||
@ -0,0 +1,121 @@
|
||||
import json
|
||||
from unittest.mock import Mock
|
||||
|
||||
import pytest
|
||||
|
||||
from repositories import workflow_collaboration_repository as repo_module
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
|
||||
|
||||
|
||||
class TestWorkflowCollaborationRepository:
|
||||
@pytest.fixture
|
||||
def mock_redis(self, monkeypatch: pytest.MonkeyPatch) -> Mock:
|
||||
mock_redis = Mock()
|
||||
monkeypatch.setattr(repo_module, "redis_client", mock_redis)
|
||||
return mock_redis
|
||||
|
||||
def test_get_sid_mapping_returns_mapping(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.get.return_value = b'{"workflow_id":"wf-1","user_id":"u-1"}'
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.get_sid_mapping("sid-1")
|
||||
|
||||
# Assert
|
||||
assert result == {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
def test_list_sessions_filters_invalid_entries(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.hgetall.return_value = {
|
||||
b"sid-1": b'{"user_id":"u-1","username":"Jane","sid":"sid-1","connected_at":2}',
|
||||
b"sid-2": b'{"username":"Missing","sid":"sid-2"}',
|
||||
b"sid-3": b"not-json",
|
||||
}
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.list_sessions("wf-1")
|
||||
|
||||
# Assert
|
||||
assert result == [
|
||||
{
|
||||
"user_id": "u-1",
|
||||
"username": "Jane",
|
||||
"avatar": None,
|
||||
"sid": "sid-1",
|
||||
"connected_at": 2,
|
||||
}
|
||||
]
|
||||
|
||||
def test_set_session_info_persists_payload(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.exists.return_value = True
|
||||
repository = WorkflowCollaborationRepository()
|
||||
payload = {
|
||||
"user_id": "u-1",
|
||||
"username": "Jane",
|
||||
"avatar": None,
|
||||
"sid": "sid-1",
|
||||
"connected_at": 1,
|
||||
}
|
||||
|
||||
# Act
|
||||
repository.set_session_info("wf-1", payload)
|
||||
|
||||
# Assert
|
||||
assert mock_redis.hset.called
|
||||
workflow_key, sid, session_json = mock_redis.hset.call_args.args
|
||||
assert workflow_key == "workflow_online_users:wf-1"
|
||||
assert sid == "sid-1"
|
||||
assert json.loads(session_json)["user_id"] == "u-1"
|
||||
assert mock_redis.set.called
|
||||
|
||||
def test_refresh_session_state_expires_keys(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.exists.return_value = True
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
repository.refresh_session_state("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert mock_redis.expire.call_count == 2
|
||||
|
||||
def test_get_current_leader_decodes_bytes(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.get.return_value = b"sid-1"
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.get_current_leader("wf-1")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-1"
|
||||
|
||||
def test_set_leader_if_absent_uses_nx(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.set.return_value = True
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.set_leader_if_absent("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
_key, _value = mock_redis.set.call_args.args
|
||||
assert _key == "workflow_leader:wf-1"
|
||||
assert _value == "sid-1"
|
||||
assert mock_redis.set.call_args.kwargs["nx"] is True
|
||||
assert "ex" in mock_redis.set.call_args.kwargs
|
||||
|
||||
def test_get_session_sids_decodes(self, mock_redis: Mock) -> None:
|
||||
# Arrange
|
||||
mock_redis.hkeys.return_value = [b"sid-1", "sid-2"]
|
||||
repository = WorkflowCollaborationRepository()
|
||||
|
||||
# Act
|
||||
result = repository.get_session_sids("wf-1")
|
||||
|
||||
# Assert
|
||||
assert result == ["sid-1", "sid-2"]
|
||||
@ -0,0 +1,271 @@
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
|
||||
from services.workflow_collaboration_service import WorkflowCollaborationService
|
||||
|
||||
|
||||
class TestWorkflowCollaborationService:
|
||||
@pytest.fixture
|
||||
def service(self) -> tuple[WorkflowCollaborationService, Mock, Mock]:
|
||||
repository = Mock(spec=WorkflowCollaborationRepository)
|
||||
socketio = Mock()
|
||||
return WorkflowCollaborationService(repository, socketio), repository, socketio
|
||||
|
||||
def test_register_session_returns_leader_status(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
socketio.get_session.return_value = {"user_id": "u-1", "username": "Jane", "avatar": None}
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "get_or_set_leader", return_value="sid-1"),
|
||||
patch.object(collaboration_service, "broadcast_online_users"),
|
||||
):
|
||||
# Act
|
||||
result = collaboration_service.register_session("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert result == ("u-1", True)
|
||||
repository.set_session_info.assert_called_once()
|
||||
socketio.enter_room.assert_called_once_with("sid-1", "wf-1")
|
||||
socketio.emit.assert_called_once_with("status", {"isLeader": True}, room="sid-1")
|
||||
|
||||
def test_register_session_returns_none_when_missing_user(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, _repository, socketio = service
|
||||
socketio.get_session.return_value = {}
|
||||
|
||||
# Act
|
||||
result = collaboration_service.register_session("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
assert result is None
|
||||
|
||||
def test_relay_collaboration_event_unauthorized(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = None
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_collaboration_event("sid-1", {})
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "unauthorized"}, 401)
|
||||
|
||||
def test_relay_collaboration_event_emits_update(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
payload = {"type": "mouse_move", "data": {"x": 1}, "timestamp": 123}
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_collaboration_event("sid-1", payload)
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "event_broadcasted"}, 200)
|
||||
socketio.emit.assert_called_once_with(
|
||||
"collaboration_update",
|
||||
{"type": "mouse_move", "userId": "u-1", "data": {"x": 1}, "timestamp": 123},
|
||||
room="wf-1",
|
||||
skip_sid="sid-1",
|
||||
)
|
||||
|
||||
def test_relay_graph_event_unauthorized(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = None
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_graph_event("sid-1", {"nodes": []})
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "unauthorized"}, 401)
|
||||
|
||||
def test_disconnect_session_no_mapping(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = None
|
||||
|
||||
# Act
|
||||
collaboration_service.disconnect_session("sid-1")
|
||||
|
||||
# Assert
|
||||
repository.delete_session.assert_not_called()
|
||||
|
||||
def test_disconnect_session_cleans_up(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "handle_leader_disconnect") as handle_leader_disconnect,
|
||||
patch.object(collaboration_service, "broadcast_online_users") as broadcast_online_users,
|
||||
):
|
||||
# Act
|
||||
collaboration_service.disconnect_session("sid-1")
|
||||
|
||||
# Assert
|
||||
repository.delete_session.assert_called_once_with("wf-1", "sid-1")
|
||||
handle_leader_disconnect.assert_called_once_with("wf-1", "sid-1")
|
||||
broadcast_online_users.assert_called_once_with("wf-1")
|
||||
|
||||
def test_get_or_set_leader_returns_active_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
|
||||
with patch.object(collaboration_service, "is_session_active", return_value=True):
|
||||
# Act
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-1"
|
||||
repository.set_leader_if_absent.assert_not_called()
|
||||
|
||||
def test_get_or_set_leader_replaces_dead_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
repository.set_leader_if_absent.return_value = True
|
||||
|
||||
with (
|
||||
patch.object(collaboration_service, "is_session_active", return_value=False),
|
||||
patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change,
|
||||
):
|
||||
# Act
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-2"
|
||||
repository.delete_session.assert_called_once_with("wf-1", "sid-1")
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
|
||||
def test_get_or_set_leader_falls_back_to_existing(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.side_effect = [None, "sid-3"]
|
||||
repository.set_leader_if_absent.return_value = False
|
||||
|
||||
# Act
|
||||
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
assert result == "sid-3"
|
||||
|
||||
def test_handle_leader_disconnect_elects_new(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
repository.get_session_sids.return_value = ["sid-2"]
|
||||
|
||||
with patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change:
|
||||
# Act
|
||||
collaboration_service.handle_leader_disconnect("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
|
||||
def test_handle_leader_disconnect_clears_when_empty(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
repository.get_session_sids.return_value = []
|
||||
|
||||
# Act
|
||||
collaboration_service.handle_leader_disconnect("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
repository.delete_leader.assert_called_once_with("wf-1")
|
||||
|
||||
def test_broadcast_online_users_sorts_and_emits(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.list_sessions.return_value = [
|
||||
{"user_id": "u-1", "username": "A", "avatar": None, "sid": "sid-1", "connected_at": 3},
|
||||
{"user_id": "u-2", "username": "B", "avatar": None, "sid": "sid-2", "connected_at": 1},
|
||||
]
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
|
||||
# Act
|
||||
collaboration_service.broadcast_online_users("wf-1")
|
||||
|
||||
# Assert
|
||||
socketio.emit.assert_called_once_with(
|
||||
"online_users",
|
||||
{
|
||||
"workflow_id": "wf-1",
|
||||
"users": [
|
||||
{"user_id": "u-2", "username": "B", "avatar": None, "sid": "sid-2", "connected_at": 1},
|
||||
{"user_id": "u-1", "username": "A", "avatar": None, "sid": "sid-1", "connected_at": 3},
|
||||
],
|
||||
"leader": "sid-1",
|
||||
},
|
||||
room="wf-1",
|
||||
)
|
||||
|
||||
def test_refresh_session_state_expires_active_leader(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = "sid-1"
|
||||
|
||||
with patch.object(collaboration_service, "is_session_active", return_value=True):
|
||||
# Act
|
||||
collaboration_service.refresh_session_state("wf-1", "sid-1")
|
||||
|
||||
# Assert
|
||||
repository.refresh_session_state.assert_called_once_with("wf-1", "sid-1")
|
||||
repository.expire_leader.assert_called_once_with("wf-1")
|
||||
repository.set_leader.assert_not_called()
|
||||
|
||||
def test_refresh_session_state_sets_leader_when_missing(
|
||||
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
|
||||
) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, _socketio = service
|
||||
repository.get_current_leader.return_value = None
|
||||
|
||||
with patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change:
|
||||
# Act
|
||||
collaboration_service.refresh_session_state("wf-1", "sid-2")
|
||||
|
||||
# Assert
|
||||
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
|
||||
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
|
||||
|
||||
def test_relay_graph_event_emits_update(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
|
||||
# Arrange
|
||||
collaboration_service, repository, socketio = service
|
||||
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
|
||||
|
||||
# Act
|
||||
result = collaboration_service.relay_graph_event("sid-1", {"nodes": []})
|
||||
|
||||
# Assert
|
||||
assert result == ({"msg": "graph_update_broadcasted"}, 200)
|
||||
repository.refresh_session_state.assert_called_once_with("wf-1", "sid-1")
|
||||
socketio.emit.assert_called_once_with("graph_update", {"nodes": []}, room="wf-1", skip_sid="sid-1")
|
||||
245
api/tests/unit_tests/services/test_workflow_comment_service.py
Normal file
245
api/tests/unit_tests/services/test_workflow_comment_service.py
Normal file
@ -0,0 +1,245 @@
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
from werkzeug.exceptions import Forbidden, NotFound
|
||||
|
||||
from services import workflow_comment_service as service_module
|
||||
from services.workflow_comment_service import WorkflowCommentService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session(monkeypatch: pytest.MonkeyPatch) -> Mock:
|
||||
session = Mock()
|
||||
context_manager = MagicMock()
|
||||
context_manager.__enter__.return_value = session
|
||||
context_manager.__exit__.return_value = False
|
||||
mock_db = MagicMock()
|
||||
mock_db.engine = Mock()
|
||||
monkeypatch.setattr(service_module, "Session", Mock(return_value=context_manager))
|
||||
monkeypatch.setattr(service_module, "db", mock_db)
|
||||
return session
|
||||
|
||||
|
||||
def _mock_scalars(result_list: list[object]) -> Mock:
|
||||
scalars = Mock()
|
||||
scalars.all.return_value = result_list
|
||||
return scalars
|
||||
|
||||
|
||||
class TestWorkflowCommentService:
|
||||
def test_validate_content_rejects_empty(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
WorkflowCommentService._validate_content(" ")
|
||||
|
||||
def test_validate_content_rejects_too_long(self) -> None:
|
||||
with pytest.raises(ValueError):
|
||||
WorkflowCommentService._validate_content("a" * 1001)
|
||||
|
||||
def test_create_comment_creates_mentions(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_at = "ts"
|
||||
|
||||
with (
|
||||
patch.object(service_module, "WorkflowComment", return_value=comment),
|
||||
patch.object(service_module, "WorkflowCommentMention", return_value=Mock()),
|
||||
patch.object(service_module, "uuid_value", side_effect=[True, False]),
|
||||
):
|
||||
result = WorkflowCommentService.create_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
created_by="user-1",
|
||||
content="hello",
|
||||
position_x=1.0,
|
||||
position_y=2.0,
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "comment-1", "created_at": "ts"}
|
||||
assert mock_session.add.call_args_list[0].args[0] is comment
|
||||
assert mock_session.add.call_count == 2
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_update_comment_raises_not_found(self, mock_session: Mock) -> None:
|
||||
mock_session.scalar.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="user-1",
|
||||
content="hello",
|
||||
)
|
||||
|
||||
def test_update_comment_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "owner"
|
||||
mock_session.scalar.return_value = comment
|
||||
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="intruder",
|
||||
content="hello",
|
||||
)
|
||||
|
||||
def test_update_comment_replaces_mentions(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.id = "comment-1"
|
||||
comment.created_by = "owner"
|
||||
mock_session.scalar.return_value = comment
|
||||
|
||||
existing_mentions = [Mock(), Mock()]
|
||||
mock_session.scalars.return_value = _mock_scalars(existing_mentions)
|
||||
|
||||
with patch.object(service_module, "uuid_value", side_effect=[True, False]):
|
||||
result = WorkflowCommentService.update_comment(
|
||||
tenant_id="tenant-1",
|
||||
app_id="app-1",
|
||||
comment_id="comment-1",
|
||||
user_id="owner",
|
||||
content="updated",
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "comment-1", "updated_at": comment.updated_at}
|
||||
assert mock_session.delete.call_count == 2
|
||||
assert mock_session.add.call_count == 1
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_delete_comment_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "owner"
|
||||
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.delete_comment("tenant-1", "app-1", "comment-1", "intruder")
|
||||
|
||||
def test_delete_comment_removes_related_entities(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.created_by = "owner"
|
||||
|
||||
mentions = [Mock(), Mock()]
|
||||
replies = [Mock()]
|
||||
mock_session.scalars.side_effect = [_mock_scalars(mentions), _mock_scalars(replies)]
|
||||
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
|
||||
WorkflowCommentService.delete_comment("tenant-1", "app-1", "comment-1", "owner")
|
||||
|
||||
assert mock_session.delete.call_count == 4
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_resolve_comment_sets_fields(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.resolved = False
|
||||
comment.resolved_at = None
|
||||
comment.resolved_by = None
|
||||
|
||||
with (
|
||||
patch.object(WorkflowCommentService, "get_comment", return_value=comment),
|
||||
patch.object(service_module, "naive_utc_now", return_value="now"),
|
||||
):
|
||||
result = WorkflowCommentService.resolve_comment("tenant-1", "app-1", "comment-1", "user-1")
|
||||
|
||||
assert result is comment
|
||||
assert comment.resolved is True
|
||||
assert comment.resolved_at == "now"
|
||||
assert comment.resolved_by == "user-1"
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_resolve_comment_noop_when_already_resolved(self, mock_session: Mock) -> None:
|
||||
comment = Mock()
|
||||
comment.resolved = True
|
||||
|
||||
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
|
||||
result = WorkflowCommentService.resolve_comment("tenant-1", "app-1", "comment-1", "user-1")
|
||||
|
||||
assert result is comment
|
||||
mock_session.commit.assert_not_called()
|
||||
|
||||
def test_create_reply_requires_comment(self, mock_session: Mock) -> None:
|
||||
mock_session.get.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.create_reply("comment-1", "hello", "user-1")
|
||||
|
||||
def test_create_reply_creates_mentions(self, mock_session: Mock) -> None:
|
||||
mock_session.get.return_value = Mock()
|
||||
reply = Mock()
|
||||
reply.id = "reply-1"
|
||||
reply.created_at = "ts"
|
||||
|
||||
with (
|
||||
patch.object(service_module, "WorkflowCommentReply", return_value=reply),
|
||||
patch.object(service_module, "WorkflowCommentMention", return_value=Mock()),
|
||||
patch.object(service_module, "uuid_value", side_effect=[True, False]),
|
||||
):
|
||||
result = WorkflowCommentService.create_reply(
|
||||
comment_id="comment-1",
|
||||
content="hello",
|
||||
created_by="user-1",
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "reply-1", "created_at": "ts"}
|
||||
assert mock_session.add.call_count == 2
|
||||
mock_session.commit.assert_called_once()
|
||||
|
||||
def test_update_reply_raises_not_found(self, mock_session: Mock) -> None:
|
||||
mock_session.get.return_value = None
|
||||
|
||||
with pytest.raises(NotFound):
|
||||
WorkflowCommentService.update_reply("reply-1", "user-1", "hello")
|
||||
|
||||
def test_update_reply_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.created_by = "owner"
|
||||
mock_session.get.return_value = reply
|
||||
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.update_reply("reply-1", "intruder", "hello")
|
||||
|
||||
def test_update_reply_replaces_mentions(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.id = "reply-1"
|
||||
reply.comment_id = "comment-1"
|
||||
reply.created_by = "owner"
|
||||
reply.updated_at = "updated"
|
||||
mock_session.get.return_value = reply
|
||||
mock_session.scalars.return_value = _mock_scalars([Mock()])
|
||||
|
||||
with patch.object(service_module, "uuid_value", side_effect=[True, False]):
|
||||
result = WorkflowCommentService.update_reply(
|
||||
reply_id="reply-1",
|
||||
user_id="owner",
|
||||
content="new",
|
||||
mentioned_user_ids=["user-2", "bad-id"],
|
||||
)
|
||||
|
||||
assert result == {"id": "reply-1", "updated_at": "updated"}
|
||||
assert mock_session.delete.call_count == 1
|
||||
assert mock_session.add.call_count == 1
|
||||
mock_session.commit.assert_called_once()
|
||||
mock_session.refresh.assert_called_once_with(reply)
|
||||
|
||||
def test_delete_reply_raises_forbidden(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.created_by = "owner"
|
||||
mock_session.get.return_value = reply
|
||||
|
||||
with pytest.raises(Forbidden):
|
||||
WorkflowCommentService.delete_reply("reply-1", "intruder")
|
||||
|
||||
def test_delete_reply_removes_mentions(self, mock_session: Mock) -> None:
|
||||
reply = Mock()
|
||||
reply.created_by = "owner"
|
||||
mock_session.get.return_value = reply
|
||||
mock_session.scalars.return_value = _mock_scalars([Mock(), Mock()])
|
||||
|
||||
WorkflowCommentService.delete_reply("reply-1", "owner")
|
||||
|
||||
assert mock_session.delete.call_count == 3
|
||||
mock_session.commit.assert_called_once()
|
||||
@ -10,7 +10,7 @@ This test suite covers:
|
||||
"""
|
||||
|
||||
import json
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import MagicMock, Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
@ -630,6 +630,79 @@ class TestWorkflowService:
|
||||
with pytest.raises(ValueError, match="Invalid app mode"):
|
||||
workflow_service.validate_features_structure(app, features)
|
||||
|
||||
# ==================== Draft Workflow Variable Update Tests ====================
|
||||
# These tests verify updating draft workflow environment/conversation variables
|
||||
|
||||
def test_update_draft_workflow_environment_variables_updates_workflow(self, workflow_service, mock_db_session):
|
||||
"""Test update_draft_workflow_environment_variables updates draft fields."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock()
|
||||
variables = [Mock()]
|
||||
|
||||
with (
|
||||
patch.object(workflow_service, "get_draft_workflow", return_value=workflow),
|
||||
patch("services.workflow_service.naive_utc_now", return_value="now"),
|
||||
):
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app,
|
||||
environment_variables=variables,
|
||||
account=account,
|
||||
)
|
||||
|
||||
assert workflow.environment_variables == variables
|
||||
assert workflow.updated_by == account.id
|
||||
assert workflow.updated_at == "now"
|
||||
mock_db_session.session.commit.assert_called_once()
|
||||
|
||||
def test_update_draft_workflow_environment_variables_raises_when_missing(self, workflow_service):
|
||||
"""Test update_draft_workflow_environment_variables raises when draft missing."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
|
||||
with patch.object(workflow_service, "get_draft_workflow", return_value=None):
|
||||
with pytest.raises(ValueError, match="No draft workflow found."):
|
||||
workflow_service.update_draft_workflow_environment_variables(
|
||||
app_model=app,
|
||||
environment_variables=[],
|
||||
account=account,
|
||||
)
|
||||
|
||||
def test_update_draft_workflow_conversation_variables_updates_workflow(self, workflow_service, mock_db_session):
|
||||
"""Test update_draft_workflow_conversation_variables updates draft fields."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock()
|
||||
variables = [Mock()]
|
||||
|
||||
with (
|
||||
patch.object(workflow_service, "get_draft_workflow", return_value=workflow),
|
||||
patch("services.workflow_service.naive_utc_now", return_value="now"),
|
||||
):
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app,
|
||||
conversation_variables=variables,
|
||||
account=account,
|
||||
)
|
||||
|
||||
assert workflow.conversation_variables == variables
|
||||
assert workflow.updated_by == account.id
|
||||
assert workflow.updated_at == "now"
|
||||
mock_db_session.session.commit.assert_called_once()
|
||||
|
||||
def test_update_draft_workflow_conversation_variables_raises_when_missing(self, workflow_service):
|
||||
"""Test update_draft_workflow_conversation_variables raises when draft missing."""
|
||||
app = TestWorkflowAssociatedDataFactory.create_app_mock()
|
||||
account = TestWorkflowAssociatedDataFactory.create_account_mock()
|
||||
|
||||
with patch.object(workflow_service, "get_draft_workflow", return_value=None):
|
||||
with pytest.raises(ValueError, match="No draft workflow found."):
|
||||
workflow_service.update_draft_workflow_conversation_variables(
|
||||
app_model=app,
|
||||
conversation_variables=[],
|
||||
account=account,
|
||||
)
|
||||
|
||||
# ==================== Publish Workflow Tests ====================
|
||||
# These tests verify creating published versions from draft workflows
|
||||
|
||||
|
||||
74
api/uv.lock
generated
74
api/uv.lock
generated
@ -583,6 +583,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/57/f4/a69c20ee4f660081a7dedb1ac57f29be9378e04edfcb90c526b923d4bebc/beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a", size = 142979, upload-time = "2023-04-07T15:02:50.77Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bidict"
|
||||
version = "0.23.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093, upload-time = "2024-02-18T19:09:05.748Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764, upload-time = "2024-02-18T19:09:04.156Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "billiard"
|
||||
version = "4.2.3"
|
||||
@ -1392,6 +1401,7 @@ dependencies = [
|
||||
{ name = "flask-restx" },
|
||||
{ name = "flask-sqlalchemy" },
|
||||
{ name = "gevent" },
|
||||
{ name = "gevent-websocket" },
|
||||
{ name = "gmpy2" },
|
||||
{ name = "google-api-core" },
|
||||
{ name = "google-api-python-client" },
|
||||
@ -1442,6 +1452,7 @@ dependencies = [
|
||||
{ name = "pypdfium2" },
|
||||
{ name = "python-docx" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "python-socketio" },
|
||||
{ name = "pyyaml" },
|
||||
{ name = "readabilipy" },
|
||||
{ name = "redis", extra = ["hiredis"] },
|
||||
@ -1591,6 +1602,7 @@ requires-dist = [
|
||||
{ name = "flask-restx", specifier = "~=1.3.0" },
|
||||
{ name = "flask-sqlalchemy", specifier = "~=3.1.1" },
|
||||
{ name = "gevent", specifier = "~=25.9.1" },
|
||||
{ name = "gevent-websocket", specifier = "~=0.10.1" },
|
||||
{ name = "gmpy2", specifier = "~=2.2.1" },
|
||||
{ name = "google-api-core", specifier = "==2.18.0" },
|
||||
{ name = "google-api-python-client", specifier = "==2.90.0" },
|
||||
@ -1641,6 +1653,7 @@ requires-dist = [
|
||||
{ name = "pypdfium2", specifier = "==5.2.0" },
|
||||
{ name = "python-docx", specifier = "~=1.1.0" },
|
||||
{ name = "python-dotenv", specifier = "==1.0.1" },
|
||||
{ name = "python-socketio", specifier = "~=5.13.0" },
|
||||
{ name = "pyyaml", specifier = "~=6.0.1" },
|
||||
{ name = "readabilipy", specifier = "~=0.3.0" },
|
||||
{ name = "redis", extras = ["hiredis"], specifier = "~=6.1.0" },
|
||||
@ -2213,6 +2226,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/98/caf06d5d22a7c129c1fb2fc1477306902a2c8ddfd399cd26bbbd4caf2141/gevent-25.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acd6bcd5feabf22c7c5174bd3b9535ee9f088d2bbce789f740ad8d6554b18f3", size = 1682837, upload-time = "2025-09-17T19:48:47.318Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gevent-websocket"
|
||||
version = "0.10.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "gevent" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/d2/6fa19239ff1ab072af40ebf339acd91fb97f34617c2ee625b8e34bf42393/gevent-websocket-0.10.1.tar.gz", hash = "sha256:7eaef32968290c9121f7c35b973e2cc302ffb076d018c9068d2f5ca8b2d85fb0", size = 18366, upload-time = "2017-03-12T22:46:05.68Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/84/2dc373eb6493e00c884cc11e6c059ec97abae2678d42f06bf780570b0193/gevent_websocket-0.10.1-py3-none-any.whl", hash = "sha256:17b67d91282f8f4c973eba0551183fc84f56f1c90c8f6b6b30256f31f66f5242", size = 22987, upload-time = "2017-03-12T22:46:03.611Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gitdb"
|
||||
version = "4.0.12"
|
||||
@ -5218,6 +5243,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-engineio"
|
||||
version = "4.12.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "simple-websocket" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/d8/63e5535ab21dc4998ba1cfe13690ccf122883a38f025dca24d6e56c05eba/python_engineio-4.12.3.tar.gz", hash = "sha256:35633e55ec30915e7fc8f7e34ca8d73ee0c080cec8a8cd04faf2d7396f0a7a7a", size = 91910, upload-time = "2025-09-28T06:31:36.765Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/f0/c5aa0a69fd9326f013110653543f36ece4913c17921f3e1dbd78e1b423ee/python_engineio-4.12.3-py3-none-any.whl", hash = "sha256:7c099abb2a27ea7ab429c04da86ab2d82698cdd6c52406cb73766fe454feb7e1", size = 59637, upload-time = "2025-09-28T06:31:35.354Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-http-client"
|
||||
version = "3.3.7"
|
||||
@ -5274,6 +5311,19 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788, upload-time = "2024-08-07T17:33:28.192Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "python-socketio"
|
||||
version = "5.13.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "bidict" },
|
||||
{ name = "python-engineio" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/21/1a/396d50ccf06ee539fa758ce5623b59a9cb27637fc4b2dc07ed08bf495e77/python_socketio-5.13.0.tar.gz", hash = "sha256:ac4e19a0302ae812e23b712ec8b6427ca0521f7c582d6abb096e36e24a263029", size = 121125, upload-time = "2025-04-12T15:46:59.933Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/32/b4fb8585d1be0f68bde7e110dffbcf354915f77ad8c778563f0ad9655c02/python_socketio-5.13.0-py3-none-any.whl", hash = "sha256:51f68d6499f2df8524668c24bcec13ba1414117cfb3a90115c559b601ab10caf", size = 77800, upload-time = "2025-04-12T15:46:58.412Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2025.2"
|
||||
@ -5773,6 +5823,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "simple-websocket"
|
||||
version = "1.1.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "wsproto" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/d4/bfa032f961103eba93de583b161f0e6a5b63cebb8f2c7d0c6e6efe1e3d2e/simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4", size = 17300, upload-time = "2024-10-10T22:39:31.412Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/52/59/0782e51887ac6b07ffd1570e0364cf901ebc36345fea669969d2084baebb/simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c", size = 13842, upload-time = "2024-10-10T22:39:29.645Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.17.0"
|
||||
@ -7193,6 +7255,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362, upload-time = "2023-11-09T06:33:28.271Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wsproto"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "h11" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425, upload-time = "2022-08-23T19:58:21.447Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226, upload-time = "2022-08-23T19:58:19.96Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xinference-client"
|
||||
version = "1.2.2"
|
||||
|
||||
@ -129,6 +129,10 @@ MIGRATION_ENABLED=true
|
||||
# The default value is 300 seconds.
|
||||
FILES_ACCESS_TIMEOUT=300
|
||||
|
||||
# Collaboration mode toggle
|
||||
# To open collaboration features, you also need to set SERVER_WORKER_CLASS=geventwebsocket.gunicorn.workers.GeventWebSocketWorker
|
||||
ENABLE_COLLABORATION_MODE=false
|
||||
|
||||
# Access token expiration time in minutes
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=60
|
||||
|
||||
@ -164,6 +168,7 @@ SERVER_WORKER_AMOUNT=1
|
||||
# Modifying it may also decrease throughput.
|
||||
#
|
||||
# It is strongly discouraged to change this parameter.
|
||||
# If enable collaboration mode, it must be set to geventwebsocket.gunicorn.workers.GeventWebSocketWorker
|
||||
SERVER_WORKER_CLASS=gevent
|
||||
|
||||
# Default number of worker connections, the default is 10.
|
||||
@ -401,6 +406,8 @@ CONSOLE_CORS_ALLOW_ORIGINS=*
|
||||
COOKIE_DOMAIN=
|
||||
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
# WebSocket server URL.
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY=5
|
||||
|
||||
# ------------------------------
|
||||
|
||||
@ -139,6 +139,7 @@ services:
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
|
||||
@ -33,6 +33,7 @@ x-shared-env: &shared-api-worker-env
|
||||
OPENAI_API_BASE: ${OPENAI_API_BASE:-https://api.openai.com/v1}
|
||||
MIGRATION_ENABLED: ${MIGRATION_ENABLED:-true}
|
||||
FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300}
|
||||
ENABLE_COLLABORATION_MODE: ${ENABLE_COLLABORATION_MODE:-false}
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60}
|
||||
REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS:-30}
|
||||
APP_DEFAULT_ACTIVE_REQUESTS: ${APP_DEFAULT_ACTIVE_REQUESTS:-0}
|
||||
@ -109,6 +110,7 @@ x-shared-env: &shared-api-worker-env
|
||||
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
|
||||
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
NEXT_PUBLIC_BATCH_CONCURRENCY: ${NEXT_PUBLIC_BATCH_CONCURRENCY:-5}
|
||||
STORAGE_TYPE: ${STORAGE_TYPE:-opendal}
|
||||
OPENDAL_SCHEME: ${OPENDAL_SCHEME:-fs}
|
||||
@ -824,6 +826,7 @@ services:
|
||||
APP_API_URL: ${APP_API_URL:-}
|
||||
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
|
||||
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
|
||||
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
|
||||
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
|
||||
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}
|
||||
|
||||
@ -14,6 +14,14 @@ server {
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
location /socket.io/ {
|
||||
proxy_pass http://api:5001;
|
||||
include proxy.conf;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
|
||||
location /v1 {
|
||||
proxy_pass http://api:5001;
|
||||
include proxy.conf;
|
||||
|
||||
@ -5,7 +5,7 @@ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Connection "";
|
||||
# proxy_set_header Connection "";
|
||||
proxy_buffering off;
|
||||
proxy_read_timeout ${NGINX_PROXY_READ_TIMEOUT};
|
||||
proxy_send_timeout ${NGINX_PROXY_SEND_TIMEOUT};
|
||||
|
||||
@ -14,6 +14,8 @@ NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
|
||||
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
|
||||
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
# WebSocket server URL.
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost:5001
|
||||
|
||||
# The API PREFIX for MARKETPLACE
|
||||
NEXT_PUBLIC_MARKETPLACE_API_PREFIX=https://marketplace.dify.ai/api/v1
|
||||
|
||||
@ -43,6 +43,8 @@ NEXT_PUBLIC_EDITION=SELF_HOSTED
|
||||
# example: http://cloud.dify.ai/console/api
|
||||
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
|
||||
NEXT_PUBLIC_COOKIE_DOMAIN=
|
||||
# WebSocket server URL.
|
||||
NEXT_PUBLIC_SOCKET_URL=ws://localhost:5001
|
||||
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
|
||||
# console or api domain.
|
||||
# example: http://udify.app/api
|
||||
|
||||
@ -5,7 +5,8 @@ import type { BlockEnum } from '@/app/components/workflow/types'
|
||||
import type { UpdateAppSiteCodeResponse } from '@/models/app'
|
||||
import type { App } from '@/types/app'
|
||||
import type { I18nKeysByPrefix } from '@/types/i18n'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import * as React from 'react'
|
||||
import { useCallback, useEffect, useMemo } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import AppCard from '@/app/components/app/overview/app-card'
|
||||
@ -14,6 +15,8 @@ import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import MCPServiceCard from '@/app/components/tools/mcp/mcp-service-card'
|
||||
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
|
||||
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
|
||||
import { isTriggerNode } from '@/app/components/workflow/types'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import {
|
||||
@ -74,28 +77,59 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
|
||||
? buildTriggerModeMessage(t('mcp.server.title', { ns: 'tools' }))
|
||||
: null
|
||||
|
||||
const updateAppDetail = async () => {
|
||||
const updateAppDetail = useCallback(async () => {
|
||||
try {
|
||||
const res = await fetchAppDetail({ url: '/apps', id: appId })
|
||||
setAppDetail({ ...res })
|
||||
}
|
||||
catch (error) { console.error(error) }
|
||||
}
|
||||
catch (error) {
|
||||
console.error(error)
|
||||
}
|
||||
}, [appId, setAppDetail])
|
||||
|
||||
const handleCallbackResult = (err: Error | null, message?: I18nKeysByPrefix<'common', 'actionMsg.'>) => {
|
||||
const type = err ? 'error' : 'success'
|
||||
|
||||
message ||= (type === 'success' ? 'modifiedSuccessfully' : 'modifiedUnsuccessfully')
|
||||
|
||||
if (type === 'success')
|
||||
if (type === 'success') {
|
||||
updateAppDetail()
|
||||
|
||||
// Emit collaboration event to notify other clients of app state changes
|
||||
const socket = webSocketClient.getSocket(appId)
|
||||
if (socket) {
|
||||
socket.emit('collaboration_event', {
|
||||
type: 'app_state_update',
|
||||
data: { timestamp: Date.now() },
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
notify({
|
||||
type,
|
||||
message: t(`actionMsg.${message}`, { ns: 'common' }) as string,
|
||||
})
|
||||
}
|
||||
|
||||
// Listen for collaborative app state updates from other clients
|
||||
useEffect(() => {
|
||||
if (!appId)
|
||||
return
|
||||
|
||||
const unsubscribe = collaborationManager.onAppStateUpdate(async () => {
|
||||
try {
|
||||
// Update app detail when other clients modify app state
|
||||
await updateAppDetail()
|
||||
}
|
||||
catch (error) {
|
||||
console.error('app state update failed:', error)
|
||||
}
|
||||
})
|
||||
|
||||
return unsubscribe
|
||||
}, [appId, updateAppDetail])
|
||||
|
||||
const onChangeSiteStatus = async (value: boolean) => {
|
||||
const [err] = await asyncRunSafe<App>(
|
||||
updateAppSiteStatus({
|
||||
|
||||
@ -14,7 +14,7 @@ import {
|
||||
import dynamic from 'next/dynamic'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useCallback, useState } from 'react'
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useContext } from 'use-context-selector'
|
||||
import CardView from '@/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/card-view'
|
||||
@ -22,10 +22,12 @@ import { useStore as useAppStore } from '@/app/components/app/store'
|
||||
import Button from '@/app/components/base/button'
|
||||
import ContentDialog from '@/app/components/base/content-dialog'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
|
||||
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useProviderContext } from '@/context/provider-context'
|
||||
import { copyApp, deleteApp, exportAppConfig, updateAppInfo } from '@/service/apps'
|
||||
import { copyApp, deleteApp, exportAppConfig, fetchAppDetail, updateAppInfo } from '@/service/apps'
|
||||
import { useInvalidateAppList } from '@/service/use-apps'
|
||||
import { fetchWorkflowDraft } from '@/service/workflow'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
@ -77,6 +79,19 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
|
||||
const [secretEnvList, setSecretEnvList] = useState<EnvironmentVariable[]>([])
|
||||
const [showExportWarning, setShowExportWarning] = useState(false)
|
||||
|
||||
const emitAppMetaUpdate = useCallback(() => {
|
||||
if (!appDetail?.id)
|
||||
return
|
||||
const socket = webSocketClient.getSocket(appDetail.id)
|
||||
if (socket) {
|
||||
socket.emit('collaboration_event', {
|
||||
type: 'app_meta_update',
|
||||
data: { timestamp: Date.now() },
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
}, [appDetail])
|
||||
|
||||
const onEdit: CreateAppModalProps['onConfirm'] = useCallback(async ({
|
||||
name,
|
||||
icon_type,
|
||||
@ -105,11 +120,12 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
|
||||
message: t('editDone', { ns: 'app' }),
|
||||
})
|
||||
setAppDetail(app)
|
||||
emitAppMetaUpdate()
|
||||
}
|
||||
catch {
|
||||
notify({ type: 'error', message: t('editFailed', { ns: 'app' }) })
|
||||
}
|
||||
}, [appDetail, notify, setAppDetail, t])
|
||||
}, [appDetail, notify, setAppDetail, t, emitAppMetaUpdate])
|
||||
|
||||
const onCopy: DuplicateAppModalProps['onConfirm'] = async ({ name, icon_type, icon, icon_background }) => {
|
||||
if (!appDetail)
|
||||
@ -207,6 +223,23 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
|
||||
setShowConfirmDelete(false)
|
||||
}, [appDetail, invalidateAppList, notify, onPlanInfoChanged, replace, setAppDetail, t])
|
||||
|
||||
useEffect(() => {
|
||||
if (!appDetail?.id)
|
||||
return
|
||||
|
||||
const unsubscribe = collaborationManager.onAppMetaUpdate(async () => {
|
||||
try {
|
||||
const res = await fetchAppDetail({ url: '/apps', id: appDetail.id })
|
||||
setAppDetail({ ...res })
|
||||
}
|
||||
catch (error) {
|
||||
console.error('failed to refresh app detail from collaboration update:', error)
|
||||
}
|
||||
})
|
||||
|
||||
return unsubscribe
|
||||
}, [appDetail?.id, setAppDetail])
|
||||
|
||||
const { isCurrentWorkspaceEditor } = useAppContext()
|
||||
|
||||
if (!appDetail)
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
import type { AppPublisherProps } from '@/app/components/app/app-publisher'
|
||||
import type { ModelAndParameter } from '@/app/components/app/configuration/debug/types'
|
||||
import type { FileUpload } from '@/app/components/base/features/types'
|
||||
import type { PublishWorkflowParams } from '@/types/workflow'
|
||||
import { produce } from 'immer'
|
||||
import * as React from 'react'
|
||||
import { useCallback, useState } from 'react'
|
||||
@ -13,7 +14,7 @@ import { SupportUploadFileTypes } from '@/app/components/workflow/types'
|
||||
import { Resolution } from '@/types/app'
|
||||
|
||||
type Props = Omit<AppPublisherProps, 'onPublish'> & {
|
||||
onPublish?: (modelAndParameter?: ModelAndParameter, features?: any) => Promise<any> | any
|
||||
onPublish?: (params?: ModelAndParameter | PublishWorkflowParams, features?: any) => Promise<any> | any
|
||||
publishedConfig?: any
|
||||
resetAppConfig?: () => void
|
||||
}
|
||||
@ -62,8 +63,8 @@ const FeaturesWrappedAppPublisher = (props: Props) => {
|
||||
setRestoreConfirmOpen(false)
|
||||
}, [featuresStore, props])
|
||||
|
||||
const handlePublish = useCallback((modelAndParameter?: ModelAndParameter) => {
|
||||
return props.onPublish?.(modelAndParameter, features)
|
||||
const handlePublish = useCallback((params?: ModelAndParameter | PublishWorkflowParams) => {
|
||||
return props.onPublish?.(params, features)
|
||||
}, [features, props])
|
||||
|
||||
return (
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
import type { ModelAndParameter } from '../configuration/debug/types'
|
||||
import type { CollaborationUpdate } from '@/app/components/workflow/collaboration/types/collaboration'
|
||||
import type { InputVar, Variable } from '@/app/components/workflow/types'
|
||||
import type { InstalledApp } from '@/models/explore'
|
||||
import type { I18nKeysByPrefix } from '@/types/i18n'
|
||||
import type { PublishWorkflowParams } from '@/types/workflow'
|
||||
import {
|
||||
@ -18,6 +20,7 @@ import { useKeyPress } from 'ahooks'
|
||||
import {
|
||||
memo,
|
||||
useCallback,
|
||||
useContext,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useState,
|
||||
@ -35,6 +38,9 @@ import {
|
||||
} from '@/app/components/base/portal-to-follow-elem'
|
||||
import UpgradeBtn from '@/app/components/billing/upgrade-btn'
|
||||
import WorkflowToolConfigureButton from '@/app/components/tools/workflow-tool/configure-button'
|
||||
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
|
||||
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
|
||||
import { WorkflowContext } from '@/app/components/workflow/context'
|
||||
import { appDefaultIconBackground } from '@/config'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
|
||||
@ -43,6 +49,8 @@ import { AccessMode } from '@/models/access-control'
|
||||
import { useAppWhiteListSubjects, useGetUserCanAccessApp } from '@/service/access-control'
|
||||
import { fetchAppDetailDirect } from '@/service/apps'
|
||||
import { fetchInstalledAppList } from '@/service/explore'
|
||||
import { useInvalidateAppWorkflow } from '@/service/use-workflow'
|
||||
import { fetchPublishedWorkflow } from '@/service/workflow'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { basePath } from '@/utils/var'
|
||||
import Divider from '../../base/divider'
|
||||
@ -56,6 +64,10 @@ import SuggestedAction from './suggested-action'
|
||||
|
||||
type AccessModeLabel = I18nKeysByPrefix<'app', 'accessControlDialog.accessItems.'>
|
||||
|
||||
type InstalledAppsResponse = {
|
||||
installed_apps?: InstalledApp[]
|
||||
}
|
||||
|
||||
const ACCESS_MODE_MAP: Record<AccessMode, { label: AccessModeLabel, icon: React.ElementType }> = {
|
||||
[AccessMode.ORGANIZATION]: {
|
||||
label: 'organization',
|
||||
@ -102,8 +114,8 @@ export type AppPublisherProps = {
|
||||
debugWithMultipleModel?: boolean
|
||||
multipleModelConfigs?: ModelAndParameter[]
|
||||
/** modelAndParameter is passed when debugWithMultipleModel is true */
|
||||
onPublish?: (params?: any) => Promise<any> | any
|
||||
onRestore?: () => Promise<any> | any
|
||||
onPublish?: (params?: ModelAndParameter | PublishWorkflowParams) => Promise<void> | void
|
||||
onRestore?: () => Promise<void> | void
|
||||
onToggle?: (state: boolean) => void
|
||||
crossAxisOffset?: number
|
||||
toolPublished?: boolean
|
||||
@ -146,6 +158,7 @@ const AppPublisher = ({
|
||||
const [isAppAccessSet, setIsAppAccessSet] = useState(true)
|
||||
const [embeddingModalOpen, setEmbeddingModalOpen] = useState(false)
|
||||
|
||||
const workflowStore = useContext(WorkflowContext)
|
||||
const appDetail = useAppStore(state => state.appDetail)
|
||||
const setAppDetail = useAppStore(s => s.setAppDetail)
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
@ -158,6 +171,7 @@ const AppPublisher = ({
|
||||
|
||||
const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp, refetch } = useGetUserCanAccessApp({ appId: appDetail?.id, enabled: false })
|
||||
const { data: appAccessSubjects, isLoading: isGettingAppWhiteListSubjects } = useAppWhiteListSubjects(appDetail?.id, open && systemFeatures.webapp_auth.enabled && appDetail?.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS)
|
||||
const invalidateAppWorkflow = useInvalidateAppWorkflow()
|
||||
const openAsyncWindow = useAsyncWindowOpen()
|
||||
|
||||
const noAccessPermission = useMemo(() => systemFeatures.webapp_auth.enabled && appDetail && appDetail.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result, [systemFeatures, appDetail, userCanAccessApp])
|
||||
@ -193,12 +207,39 @@ const AppPublisher = ({
|
||||
try {
|
||||
await onPublish?.(params)
|
||||
setPublished(true)
|
||||
|
||||
const appId = appDetail?.id
|
||||
const socket = appId ? webSocketClient.getSocket(appId) : null
|
||||
console.warn('[app-publisher] publish success', {
|
||||
appId,
|
||||
hasSocket: Boolean(socket),
|
||||
})
|
||||
if (appId)
|
||||
invalidateAppWorkflow(appId)
|
||||
else
|
||||
console.warn('[app-publisher] missing appId, skip workflow invalidate and socket emit')
|
||||
if (socket) {
|
||||
const timestamp = Date.now()
|
||||
socket.emit('collaboration_event', {
|
||||
type: 'app_publish_update',
|
||||
data: {
|
||||
action: 'published',
|
||||
timestamp,
|
||||
},
|
||||
timestamp,
|
||||
})
|
||||
}
|
||||
else if (appId) {
|
||||
console.warn('[app-publisher] socket not ready, skip collaboration_event emit', { appId })
|
||||
}
|
||||
|
||||
trackEvent('app_published_time', { action_mode: 'app', app_id: appDetail?.id, app_name: appDetail?.name })
|
||||
}
|
||||
catch {
|
||||
catch (error) {
|
||||
console.warn('[app-publisher] publish failed', error)
|
||||
setPublished(false)
|
||||
}
|
||||
}, [appDetail, onPublish])
|
||||
}, [appDetail, onPublish, invalidateAppWorkflow])
|
||||
|
||||
const handleRestore = useCallback(async () => {
|
||||
try {
|
||||
@ -227,9 +268,10 @@ const AppPublisher = ({
|
||||
await openAsyncWindow(async () => {
|
||||
if (!appDetail?.id)
|
||||
throw new Error('App not found')
|
||||
const { installed_apps }: any = await fetchInstalledAppList(appDetail?.id) || {}
|
||||
if (installed_apps?.length > 0)
|
||||
return `${basePath}/explore/installed/${installed_apps[0].id}`
|
||||
const response = (await fetchInstalledAppList(appDetail?.id)) as InstalledAppsResponse
|
||||
const installedApps = response?.installed_apps
|
||||
if (installedApps?.length)
|
||||
return `${basePath}/explore/installed/${installedApps[0].id}`
|
||||
throw new Error('No app found in Explore')
|
||||
}, {
|
||||
onError: (err) => {
|
||||
@ -257,6 +299,29 @@ const AppPublisher = ({
|
||||
handlePublish()
|
||||
}, { exactMatch: true, useCapture: true })
|
||||
|
||||
useEffect(() => {
|
||||
const appId = appDetail?.id
|
||||
if (!appId)
|
||||
return
|
||||
|
||||
const unsubscribe = collaborationManager.onAppPublishUpdate((update: CollaborationUpdate) => {
|
||||
const action = typeof update.data.action === 'string' ? update.data.action : undefined
|
||||
if (action === 'published') {
|
||||
invalidateAppWorkflow(appId)
|
||||
fetchPublishedWorkflow(`/apps/${appId}/workflows/publish`)
|
||||
.then((publishedWorkflow) => {
|
||||
if (publishedWorkflow?.created_at)
|
||||
workflowStore?.getState().setPublishedAt(publishedWorkflow.created_at)
|
||||
})
|
||||
.catch((error) => {
|
||||
console.warn('[app-publisher] refresh published workflow failed', error)
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
return unsubscribe
|
||||
}, [appDetail?.id, invalidateAppWorkflow, workflowStore])
|
||||
|
||||
const hasPublishedVersion = !!publishedAt
|
||||
const workflowToolDisabled = !hasPublishedVersion || !workflowToolAvailable
|
||||
const workflowToolMessage = workflowToolDisabled ? t('common.workflowAsToolDisabledHint', { ns: 'workflow' }) : undefined
|
||||
|
||||
@ -18,6 +18,7 @@ import type {
|
||||
TextToSpeechConfig,
|
||||
} from '@/models/debug'
|
||||
import type { ModelConfig as BackendModelConfig, UserInputFormItem, VisionSettings } from '@/types/app'
|
||||
import type { PublishWorkflowParams } from '@/types/workflow'
|
||||
import { CodeBracketIcon } from '@heroicons/react/20/solid'
|
||||
import { useBoolean, useGetState } from 'ahooks'
|
||||
import { clone } from 'es-toolkit/object'
|
||||
@ -760,7 +761,8 @@ const Configuration: FC = () => {
|
||||
else { return promptEmpty }
|
||||
})()
|
||||
const contextVarEmpty = mode === AppModeEnum.COMPLETION && dataSets.length > 0 && !hasSetContextVar
|
||||
const onPublish = async (modelAndParameter?: ModelAndParameter, features?: FeaturesData) => {
|
||||
const onPublish = async (params?: ModelAndParameter | PublishWorkflowParams, features?: FeaturesData) => {
|
||||
const modelAndParameter = params && 'model' in params ? params : undefined
|
||||
const modelId = modelAndParameter?.model || modelConfig.model_id
|
||||
const promptTemplate = modelConfig.configs.prompt_template
|
||||
const promptVariables = modelConfig.configs.prompt_variables
|
||||
|
||||
@ -1,6 +1,3 @@
|
||||
/**
|
||||
* @vitest-environment jsdom
|
||||
*/
|
||||
import type { ReactNode } from 'react'
|
||||
import type { ModalContextState } from '@/context/modal-context'
|
||||
import type { ProviderContextState } from '@/context/provider-context'
|
||||
|
||||
@ -5,6 +5,7 @@ import type { HtmlContentProps } from '@/app/components/base/popover'
|
||||
import type { Tag } from '@/app/components/base/tag-management/constant'
|
||||
import type { CreateAppModalProps } from '@/app/components/explore/create-app-modal'
|
||||
import type { EnvironmentVariable } from '@/app/components/workflow/types'
|
||||
import type { WorkflowOnlineUser } from '@/models/app'
|
||||
import type { App } from '@/types/app'
|
||||
import { RiBuildingLine, RiGlobalLine, RiLockLine, RiMoreFill, RiVerifiedBadgeLine } from '@remixicon/react'
|
||||
import dynamic from 'next/dynamic'
|
||||
@ -20,6 +21,7 @@ import CustomPopover from '@/app/components/base/popover'
|
||||
import TagSelector from '@/app/components/base/tag-management/selector'
|
||||
import Toast, { ToastContext } from '@/app/components/base/toast'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import { UserAvatarList } from '@/app/components/base/user-avatar-list'
|
||||
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
@ -58,9 +60,10 @@ const AccessControl = dynamic(() => import('@/app/components/app/app-access-cont
|
||||
export type AppCardProps = {
|
||||
app: App
|
||||
onRefresh?: () => void
|
||||
onlineUsers?: WorkflowOnlineUser[]
|
||||
}
|
||||
|
||||
const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
||||
const AppCard = ({ app, onRefresh, onlineUsers = [] }: AppCardProps) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
|
||||
@ -348,6 +351,19 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
||||
return `${t('segment.editedAt', { ns: 'datasetDocuments' })} ${timeText}`
|
||||
}, [app.updated_at, app.created_at])
|
||||
|
||||
const onlineUserAvatars = useMemo(() => {
|
||||
if (!onlineUsers.length)
|
||||
return []
|
||||
|
||||
return onlineUsers
|
||||
.map(user => ({
|
||||
id: user.user_id || user.sid || '',
|
||||
name: user.username || 'User',
|
||||
avatar_url: user.avatar || undefined,
|
||||
}))
|
||||
.filter(user => !!user.id)
|
||||
}, [onlineUsers])
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
@ -400,6 +416,11 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
|
||||
</Tooltip>
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
{onlineUserAvatars.length > 0 && (
|
||||
<UserAvatarList users={onlineUserAvatars} maxVisible={3} size={20} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="title-wrapper h-[90px] px-[14px] text-xs leading-normal text-text-tertiary">
|
||||
<div
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import { act, fireEvent, render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { useStore as useTagStore } from '@/app/components/base/tag-management/store'
|
||||
@ -141,9 +142,13 @@ vi.mock('@/app/components/base/tag-management/filter', () => ({
|
||||
}))
|
||||
|
||||
// Mock config
|
||||
vi.mock('@/config', () => ({
|
||||
NEED_REFRESH_APP_LIST_KEY: 'needRefreshAppList',
|
||||
}))
|
||||
vi.mock('@/config', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof import('@/config')>()
|
||||
return {
|
||||
...actual,
|
||||
NEED_REFRESH_APP_LIST_KEY: 'needRefreshAppList',
|
||||
}
|
||||
})
|
||||
|
||||
// Mock pay hook
|
||||
vi.mock('@/hooks/use-pay', () => ({
|
||||
@ -234,6 +239,21 @@ beforeAll(() => {
|
||||
} as unknown as typeof IntersectionObserver
|
||||
})
|
||||
|
||||
const renderList = () => {
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
},
|
||||
},
|
||||
})
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<List />
|
||||
</QueryClientProvider>,
|
||||
)
|
||||
}
|
||||
|
||||
describe('List', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
@ -260,13 +280,13 @@ describe('List', () => {
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render without crashing', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
// Tab slider renders app type tabs
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render tab slider with all app types', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.types.workflow')).toBeInTheDocument()
|
||||
@ -277,48 +297,48 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should render search input', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
// Input component renders a searchbox
|
||||
expect(screen.getByRole('textbox')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render tag filter', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
// Tag filter renders with placeholder text
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render created by me checkbox', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
expect(screen.getByText('app.showMyCreatedAppsOnly')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render app cards when apps exist', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByTestId('app-card-app-1')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('app-card-app-2')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render new app card for editors', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
expect(screen.getByTestId('new-app-card')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render footer when branding is disabled', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
expect(screen.getByTestId('footer')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render drop DSL hint for editors', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
expect(screen.getByText('app.newApp.dropDSLToCreateApp')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Tab Navigation', () => {
|
||||
it('should call setActiveTab when tab is clicked', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
fireEvent.click(screen.getByText('app.types.workflow'))
|
||||
|
||||
@ -326,7 +346,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should call setActiveTab for all tab', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
fireEvent.click(screen.getByText('app.types.all'))
|
||||
|
||||
@ -336,12 +356,12 @@ describe('List', () => {
|
||||
|
||||
describe('Search Functionality', () => {
|
||||
it('should render search input field', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
expect(screen.getByRole('textbox')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle search input change', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
const input = screen.getByRole('textbox')
|
||||
fireEvent.change(input, { target: { value: 'test search' } })
|
||||
@ -350,7 +370,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should handle search input interaction', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
const input = screen.getByRole('textbox')
|
||||
expect(input).toBeInTheDocument()
|
||||
@ -360,7 +380,7 @@ describe('List', () => {
|
||||
// Set initial keywords to make clear button visible
|
||||
mockQueryState.keywords = 'existing search'
|
||||
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// Find and click clear button (Input component uses .group class for clear icon container)
|
||||
const clearButton = document.querySelector('.group')
|
||||
@ -375,12 +395,12 @@ describe('List', () => {
|
||||
|
||||
describe('Tag Filter', () => {
|
||||
it('should render tag filter component', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render tag filter with placeholder', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// Tag filter is rendered
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
@ -389,12 +409,12 @@ describe('List', () => {
|
||||
|
||||
describe('Created By Me Filter', () => {
|
||||
it('should render checkbox with correct label', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
expect(screen.getByText('app.showMyCreatedAppsOnly')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle checkbox change', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// Checkbox component uses data-testid="checkbox-{id}"
|
||||
// CheckboxWithLabel doesn't pass testId, so id is undefined
|
||||
@ -409,7 +429,7 @@ describe('List', () => {
|
||||
it('should not render new app card for non-editors', () => {
|
||||
mockIsCurrentWorkspaceEditor.mockReturnValue(false)
|
||||
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.queryByTestId('new-app-card')).not.toBeInTheDocument()
|
||||
})
|
||||
@ -417,7 +437,7 @@ describe('List', () => {
|
||||
it('should not render drop DSL hint for non-editors', () => {
|
||||
mockIsCurrentWorkspaceEditor.mockReturnValue(false)
|
||||
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.queryByText(/drop dsl file to create app/i)).not.toBeInTheDocument()
|
||||
})
|
||||
@ -427,7 +447,7 @@ describe('List', () => {
|
||||
it('should redirect dataset operators to datasets page', () => {
|
||||
mockIsCurrentWorkspaceDatasetOperator.mockReturnValue(true)
|
||||
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(mockReplace).toHaveBeenCalledWith('/datasets')
|
||||
})
|
||||
@ -437,7 +457,7 @@ describe('List', () => {
|
||||
it('should call refetch when refresh key is set in localStorage', () => {
|
||||
localStorage.setItem('needRefreshAppList', '1')
|
||||
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(mockRefetch).toHaveBeenCalled()
|
||||
expect(localStorage.getItem('needRefreshAppList')).toBeNull()
|
||||
@ -446,22 +466,23 @@ describe('List', () => {
|
||||
|
||||
describe('Edge Cases', () => {
|
||||
it('should handle multiple renders without issues', () => {
|
||||
const { rerender } = render(<List />)
|
||||
const { unmount } = renderList()
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
|
||||
rerender(<List />)
|
||||
unmount()
|
||||
renderList()
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render app cards correctly', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByText('Test App 1')).toBeInTheDocument()
|
||||
expect(screen.getByText('Test App 2')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render with all filter options visible', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByRole('textbox')).toBeInTheDocument()
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
@ -471,14 +492,14 @@ describe('List', () => {
|
||||
|
||||
describe('Dragging State', () => {
|
||||
it('should show drop hint when DSL feature is enabled for editors', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
expect(screen.getByText('app.newApp.dropDSLToCreateApp')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('App Type Tabs', () => {
|
||||
it('should render all app type tabs', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByText('app.types.all')).toBeInTheDocument()
|
||||
expect(screen.getByText('app.types.workflow')).toBeInTheDocument()
|
||||
@ -489,7 +510,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should call setActiveTab for each app type', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
const appTypeTexts = [
|
||||
{ mode: AppModeEnum.WORKFLOW, text: 'app.types.workflow' },
|
||||
@ -508,7 +529,7 @@ describe('List', () => {
|
||||
|
||||
describe('Search and Filter Integration', () => {
|
||||
it('should display search input with correct attributes', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
const input = screen.getByRole('textbox')
|
||||
expect(input).toBeInTheDocument()
|
||||
@ -516,13 +537,13 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should have tag filter component', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display created by me label', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByText('app.showMyCreatedAppsOnly')).toBeInTheDocument()
|
||||
})
|
||||
@ -530,14 +551,14 @@ describe('List', () => {
|
||||
|
||||
describe('App List Display', () => {
|
||||
it('should display all app cards from data', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByTestId('app-card-app-1')).toBeInTheDocument()
|
||||
expect(screen.getByTestId('app-card-app-2')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should display app names correctly', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByText('Test App 1')).toBeInTheDocument()
|
||||
expect(screen.getByText('Test App 2')).toBeInTheDocument()
|
||||
@ -546,7 +567,7 @@ describe('List', () => {
|
||||
|
||||
describe('Footer Visibility', () => {
|
||||
it('should render footer when branding is disabled', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByTestId('footer')).toBeInTheDocument()
|
||||
})
|
||||
@ -558,14 +579,14 @@ describe('List', () => {
|
||||
describe('Additional Coverage', () => {
|
||||
it('should render dragging state overlay when dragging', () => {
|
||||
mockDragging = true
|
||||
const { container } = render(<List />)
|
||||
const { container } = renderList()
|
||||
|
||||
// Component should render successfully with dragging state
|
||||
expect(container).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should handle app mode filter in query params', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
const workflowTab = screen.getByText('app.types.workflow')
|
||||
fireEvent.click(workflowTab)
|
||||
@ -574,7 +595,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should render new app card for editors', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
expect(screen.getByTestId('new-app-card')).toBeInTheDocument()
|
||||
})
|
||||
@ -582,7 +603,7 @@ describe('List', () => {
|
||||
|
||||
describe('DSL File Drop', () => {
|
||||
it('should handle DSL file drop and show modal', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// Simulate DSL file drop via the callback
|
||||
const mockFile = new File(['test content'], 'test.yml', { type: 'application/yaml' })
|
||||
@ -596,7 +617,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should close DSL modal when onClose is called', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// Open modal via DSL file drop
|
||||
const mockFile = new File(['test content'], 'test.yml', { type: 'application/yaml' })
|
||||
@ -614,7 +635,7 @@ describe('List', () => {
|
||||
})
|
||||
|
||||
it('should close DSL modal and refetch when onSuccess is called', () => {
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// Open modal via DSL file drop
|
||||
const mockFile = new File(['test content'], 'test.yml', { type: 'application/yaml' })
|
||||
@ -637,7 +658,7 @@ describe('List', () => {
|
||||
describe('Tag Filter Change', () => {
|
||||
it('should handle tag filter value change', () => {
|
||||
vi.useFakeTimers()
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// TagFilter component is rendered
|
||||
expect(screen.getByTestId('tag-filter')).toBeInTheDocument()
|
||||
@ -661,7 +682,7 @@ describe('List', () => {
|
||||
|
||||
it('should handle empty tag filter selection', () => {
|
||||
vi.useFakeTimers()
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// Trigger tag filter change with empty array
|
||||
act(() => {
|
||||
@ -683,7 +704,7 @@ describe('List', () => {
|
||||
describe('Infinite Scroll', () => {
|
||||
it('should call fetchNextPage when intersection observer triggers', () => {
|
||||
mockServiceState.hasNextPage = true
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// Simulate intersection
|
||||
if (intersectionCallback) {
|
||||
@ -700,7 +721,7 @@ describe('List', () => {
|
||||
|
||||
it('should not call fetchNextPage when not intersecting', () => {
|
||||
mockServiceState.hasNextPage = true
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
// Simulate non-intersection
|
||||
if (intersectionCallback) {
|
||||
@ -718,7 +739,7 @@ describe('List', () => {
|
||||
it('should not call fetchNextPage when loading', () => {
|
||||
mockServiceState.hasNextPage = true
|
||||
mockServiceState.isLoading = true
|
||||
render(<List />)
|
||||
renderList()
|
||||
|
||||
if (intersectionCallback) {
|
||||
act(() => {
|
||||
@ -736,7 +757,7 @@ describe('List', () => {
|
||||
describe('Error State', () => {
|
||||
it('should handle error state in useEffect', () => {
|
||||
mockServiceState.error = new Error('Test error')
|
||||
const { container } = render(<List />)
|
||||
const { container } = renderList()
|
||||
|
||||
// Component should still render
|
||||
expect(container).toBeInTheDocument()
|
||||
|
||||
@ -9,13 +9,14 @@ import {
|
||||
RiMessage3Line,
|
||||
RiRobot3Line,
|
||||
} from '@remixicon/react'
|
||||
import { useQuery } from '@tanstack/react-query'
|
||||
import { useDebounceFn } from 'ahooks'
|
||||
import dynamic from 'next/dynamic'
|
||||
import {
|
||||
useRouter,
|
||||
} from 'next/navigation'
|
||||
import { parseAsString, useQueryState } from 'nuqs'
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Input from '@/app/components/base/input'
|
||||
import TabSliderNew from '@/app/components/base/tab-slider-new'
|
||||
@ -26,6 +27,7 @@ import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import { CheckModal } from '@/hooks/use-pay'
|
||||
import { fetchWorkflowOnlineUsers } from '@/service/apps'
|
||||
import { useInfiniteAppList } from '@/service/use-apps'
|
||||
import { AppModeEnum } from '@/types/app'
|
||||
import { cn } from '@/utils/classnames'
|
||||
@ -116,6 +118,37 @@ const List: FC<Props> = ({
|
||||
refetch,
|
||||
} = useInfiniteAppList(appListQueryParams, { enabled: !isCurrentWorkspaceDatasetOperator })
|
||||
|
||||
const apps = useMemo(() => data?.pages?.flatMap(page => page.data) ?? [], [data])
|
||||
|
||||
const workflowIds = useMemo(() => {
|
||||
const ids = new Set<string>()
|
||||
apps.forEach((appItem) => {
|
||||
const workflowId = appItem.id
|
||||
if (!workflowId)
|
||||
return
|
||||
|
||||
if (appItem.mode === 'workflow' || appItem.mode === 'advanced-chat')
|
||||
ids.add(workflowId)
|
||||
})
|
||||
return Array.from(ids)
|
||||
}, [apps])
|
||||
|
||||
const { data: onlineUsersByWorkflow = {}, refetch: refreshOnlineUsers } = useQuery({
|
||||
queryKey: ['apps', 'workflow-online-users', workflowIds],
|
||||
queryFn: () => fetchWorkflowOnlineUsers({ workflowIds }),
|
||||
enabled: workflowIds.length > 0,
|
||||
})
|
||||
|
||||
useEffect(() => {
|
||||
const timer = window.setInterval(() => {
|
||||
refetch()
|
||||
if (workflowIds.length)
|
||||
refreshOnlineUsers()
|
||||
}, 10000)
|
||||
|
||||
return () => window.clearInterval(timer)
|
||||
}, [workflowIds, refetch, refreshOnlineUsers])
|
||||
|
||||
useEffect(() => {
|
||||
if (controlRefreshList > 0) {
|
||||
refetch()
|
||||
@ -254,7 +287,7 @@ const List: FC<Props> = ({
|
||||
|
||||
if (hasAnyApp) {
|
||||
return pages.flatMap(({ data: apps }) => apps).map(app => (
|
||||
<AppCard key={app.id} app={app} onRefresh={refetch} />
|
||||
<AppCard key={app.id} app={app} onRefresh={refetch} onlineUsers={onlineUsersByWorkflow?.[app.id] ?? []} />
|
||||
))
|
||||
}
|
||||
|
||||
|
||||
@ -35,12 +35,14 @@ describe('Avatar', () => {
|
||||
it.each([
|
||||
{ size: undefined, expected: '30px', label: 'default (30px)' },
|
||||
{ size: 50, expected: '50px', label: 'custom (50px)' },
|
||||
])('should apply $label size to img element', ({ size, expected }) => {
|
||||
])('should apply $label size to avatar container', ({ size, expected }) => {
|
||||
const props = { name: 'Test', avatar: 'https://example.com/avatar.jpg', size }
|
||||
|
||||
render(<Avatar {...props} />)
|
||||
|
||||
expect(screen.getByRole('img')).toHaveStyle({
|
||||
const img = screen.getByRole('img')
|
||||
const wrapper = img.parentElement as HTMLElement
|
||||
expect(wrapper).toHaveStyle({
|
||||
width: expected,
|
||||
height: expected,
|
||||
fontSize: expected,
|
||||
@ -60,7 +62,7 @@ describe('Avatar', () => {
|
||||
})
|
||||
|
||||
describe('className prop', () => {
|
||||
it('should merge className with default avatar classes on img', () => {
|
||||
it('should merge className with default avatar classes on container', () => {
|
||||
const props = {
|
||||
name: 'Test',
|
||||
avatar: 'https://example.com/avatar.jpg',
|
||||
@ -70,8 +72,9 @@ describe('Avatar', () => {
|
||||
render(<Avatar {...props} />)
|
||||
|
||||
const img = screen.getByRole('img')
|
||||
expect(img).toHaveClass('custom-class')
|
||||
expect(img).toHaveClass('shrink-0', 'flex', 'items-center', 'rounded-full', 'bg-primary-600')
|
||||
const wrapper = img.parentElement as HTMLElement
|
||||
expect(wrapper).toHaveClass('custom-class')
|
||||
expect(wrapper).toHaveClass('shrink-0', 'flex', 'items-center', 'rounded-full', 'bg-primary-600')
|
||||
})
|
||||
|
||||
it('should merge className with default avatar classes on fallback div', () => {
|
||||
@ -277,10 +280,11 @@ describe('Avatar', () => {
|
||||
render(<Avatar {...props} />)
|
||||
|
||||
const img = screen.getByRole('img')
|
||||
const wrapper = img.parentElement as HTMLElement
|
||||
expect(img).toHaveAttribute('alt', 'Test User')
|
||||
expect(img).toHaveAttribute('src', 'https://example.com/avatar.jpg')
|
||||
expect(img).toHaveStyle({ width: '64px', height: '64px' })
|
||||
expect(img).toHaveClass('custom-avatar')
|
||||
expect(wrapper).toHaveStyle({ width: '64px', height: '64px' })
|
||||
expect(wrapper).toHaveClass('custom-avatar')
|
||||
|
||||
// Trigger load to verify onError callback
|
||||
fireEvent.load(img)
|
||||
|
||||
@ -9,6 +9,7 @@ export type AvatarProps = {
|
||||
className?: string
|
||||
textClassName?: string
|
||||
onError?: (x: boolean) => void
|
||||
backgroundColor?: string
|
||||
}
|
||||
const Avatar = ({
|
||||
name,
|
||||
@ -17,9 +18,18 @@ const Avatar = ({
|
||||
className,
|
||||
textClassName,
|
||||
onError,
|
||||
backgroundColor,
|
||||
}: AvatarProps) => {
|
||||
const avatarClassName = 'shrink-0 flex items-center rounded-full bg-primary-600'
|
||||
const style = { width: `${size}px`, height: `${size}px`, fontSize: `${size}px`, lineHeight: `${size}px` }
|
||||
const avatarClassName = backgroundColor
|
||||
? 'shrink-0 flex items-center rounded-full'
|
||||
: 'shrink-0 flex items-center rounded-full bg-primary-600'
|
||||
const style = {
|
||||
width: `${size}px`,
|
||||
height: `${size}px`,
|
||||
fontSize: `${size}px`,
|
||||
lineHeight: `${size}px`,
|
||||
...(backgroundColor && !avatar ? { backgroundColor } : {}),
|
||||
}
|
||||
const [imgError, setImgError] = useState(false)
|
||||
|
||||
const handleError = () => {
|
||||
@ -35,14 +45,18 @@ const Avatar = ({
|
||||
|
||||
if (avatar && !imgError) {
|
||||
return (
|
||||
<img
|
||||
<span
|
||||
className={cn(avatarClassName, className)}
|
||||
style={style}
|
||||
alt={name}
|
||||
src={avatar}
|
||||
onError={handleError}
|
||||
onLoad={() => onError?.(false)}
|
||||
/>
|
||||
>
|
||||
<img
|
||||
className="h-full w-full rounded-full object-cover"
|
||||
alt={name}
|
||||
src={avatar}
|
||||
onError={handleError}
|
||||
onLoad={() => onError?.(false)}
|
||||
/>
|
||||
</span>
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ const ContentDialog = ({
|
||||
<Transition
|
||||
show={show}
|
||||
as="div"
|
||||
className="absolute left-0 top-0 z-30 box-border h-full w-full p-2"
|
||||
className="absolute left-0 top-0 z-[70] box-border h-full w-full p-2"
|
||||
>
|
||||
<TransitionChild>
|
||||
<div
|
||||
|
||||
@ -0,0 +1,4 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M0 4C0 1.79086 1.79086 0 4 0H12C14.2091 0 16 1.79086 16 4V12C16 14.2091 14.2091 16 12 16H4C1.79086 16 0 14.2091 0 12V4Z" fill="white" fill-opacity="0.12"/>
|
||||
<path d="M3.42756 8.7358V7.62784H10.8764C11.2003 7.62784 11.4957 7.5483 11.7628 7.3892C12.0298 7.23011 12.2415 7.01705 12.3977 6.75C12.5568 6.48295 12.6364 6.1875 12.6364 5.86364C12.6364 5.53977 12.5568 5.24574 12.3977 4.98153C12.2386 4.71449 12.0256 4.50142 11.7585 4.34233C11.4943 4.18324 11.2003 4.10369 10.8764 4.10369H10.3991V3H10.8764C11.4048 3 11.8849 3.12926 12.3168 3.38778C12.7486 3.64631 13.0938 3.99148 13.3523 4.4233C13.6108 4.85511 13.7401 5.33523 13.7401 5.86364C13.7401 6.25852 13.6648 6.62926 13.5142 6.97585C13.3665 7.32244 13.1619 7.62784 12.9006 7.89205C12.6392 8.15625 12.3352 8.36364 11.9886 8.5142C11.642 8.66193 11.2713 8.7358 10.8764 8.7358H3.42756ZM6.16761 12.0554L2.29403 8.18182L6.16761 4.30824L6.9304 5.07102L3.81534 8.18182L6.9304 11.2926L6.16761 12.0554Z" fill="white"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="12" viewBox="0 0 14 12" fill="none">
|
||||
<path d="M12.3334 4C12.3334 2.52725 11.1395 1.33333 9.66671 1.33333H4.33337C2.86062 1.33333 1.66671 2.52724 1.66671 4V10.6667H9.66671C11.1395 10.6667 12.3334 9.47274 12.3334 8V4ZM7.66671 6.66667V8H4.33337V6.66667H7.66671ZM9.66671 4V5.33333H4.33337V4H9.66671ZM13.6667 8C13.6667 10.2091 11.8758 12 9.66671 12H0.333374V4C0.333374 1.79086 2.12424 0 4.33337 0H9.66671C11.8758 0 13.6667 1.79086 13.6667 4V8Z" fill="currentColor"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 527 B |
@ -0,0 +1,36 @@
|
||||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"width": "16",
|
||||
"height": "16",
|
||||
"viewBox": "0 0 16 16",
|
||||
"fill": "none",
|
||||
"xmlns": "http://www.w3.org/2000/svg"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M0 4C0 1.79086 1.79086 0 4 0H12C14.2091 0 16 1.79086 16 4V12C16 14.2091 14.2091 16 12 16H4C1.79086 16 0 14.2091 0 12V4Z",
|
||||
"fill": "white",
|
||||
"fill-opacity": "0.12"
|
||||
},
|
||||
"children": []
|
||||
},
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M3.42756 8.7358V7.62784H10.8764C11.2003 7.62784 11.4957 7.5483 11.7628 7.3892C12.0298 7.23011 12.2415 7.01705 12.3977 6.75C12.5568 6.48295 12.6364 6.1875 12.6364 5.86364C12.6364 5.53977 12.5568 5.24574 12.3977 4.98153C12.2386 4.71449 12.0256 4.50142 11.7585 4.34233C11.4943 4.18324 11.2003 4.10369 10.8764 4.10369H10.3991V3H10.8764C11.4048 3 11.8849 3.12926 12.3168 3.38778C12.7486 3.64631 13.0938 3.99148 13.3523 4.4233C13.6108 4.85511 13.7401 5.33523 13.7401 5.86364C13.7401 6.25852 13.6648 6.62926 13.5142 6.97585C13.3665 7.32244 13.1619 7.62784 12.9006 7.89205C12.6392 8.15625 12.3352 8.36364 11.9886 8.5142C11.642 8.66193 11.2713 8.7358 10.8764 8.7358H3.42756ZM6.16761 12.0554L2.29403 8.18182L6.16761 4.30824L6.9304 5.07102L3.81534 8.18182L6.9304 11.2926L6.16761 12.0554Z",
|
||||
"fill": "white"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "EnterKey"
|
||||
}
|
||||
20
web/app/components/base/icons/src/public/common/EnterKey.tsx
Normal file
20
web/app/components/base/icons/src/public/common/EnterKey.tsx
Normal file
@ -0,0 +1,20 @@
|
||||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './EnterKey.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'EnterKey'
|
||||
|
||||
export default Icon
|
||||
@ -1,6 +1,7 @@
|
||||
export { default as D } from './D'
|
||||
export { default as DiagonalDividingLine } from './DiagonalDividingLine'
|
||||
export { default as Dify } from './Dify'
|
||||
export { default as EnterKey } from './EnterKey'
|
||||
export { default as Gdpr } from './Gdpr'
|
||||
export { default as Github } from './Github'
|
||||
export { default as Highlight } from './Highlight'
|
||||
|
||||
26
web/app/components/base/icons/src/public/other/Comment.json
Normal file
26
web/app/components/base/icons/src/public/other/Comment.json
Normal file
@ -0,0 +1,26 @@
|
||||
{
|
||||
"icon": {
|
||||
"type": "element",
|
||||
"isRootNode": true,
|
||||
"name": "svg",
|
||||
"attributes": {
|
||||
"xmlns": "http://www.w3.org/2000/svg",
|
||||
"width": "14",
|
||||
"height": "12",
|
||||
"viewBox": "0 0 14 12",
|
||||
"fill": "none"
|
||||
},
|
||||
"children": [
|
||||
{
|
||||
"type": "element",
|
||||
"name": "path",
|
||||
"attributes": {
|
||||
"d": "M12.3334 4C12.3334 2.52725 11.1395 1.33333 9.66671 1.33333H4.33337C2.86062 1.33333 1.66671 2.52724 1.66671 4V10.6667H9.66671C11.1395 10.6667 12.3334 9.47274 12.3334 8V4ZM7.66671 6.66667V8H4.33337V6.66667H7.66671ZM9.66671 4V5.33333H4.33337V4H9.66671ZM13.6667 8C13.6667 10.2091 11.8758 12 9.66671 12H0.333374V4C0.333374 1.79086 2.12424 0 4.33337 0H9.66671C11.8758 0 13.6667 1.79086 13.6667 4V8Z",
|
||||
"fill": "currentColor"
|
||||
},
|
||||
"children": []
|
||||
}
|
||||
]
|
||||
},
|
||||
"name": "Comment"
|
||||
}
|
||||
20
web/app/components/base/icons/src/public/other/Comment.tsx
Normal file
20
web/app/components/base/icons/src/public/other/Comment.tsx
Normal file
@ -0,0 +1,20 @@
|
||||
// GENERATE BY script
|
||||
// DON NOT EDIT IT MANUALLY
|
||||
|
||||
import type { IconData } from '@/app/components/base/icons/IconBase'
|
||||
import * as React from 'react'
|
||||
import IconBase from '@/app/components/base/icons/IconBase'
|
||||
import data from './Comment.json'
|
||||
|
||||
const Icon = (
|
||||
{
|
||||
ref,
|
||||
...props
|
||||
}: React.SVGProps<SVGSVGElement> & {
|
||||
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
|
||||
},
|
||||
) => <IconBase {...props} ref={ref} data={data as IconData} />
|
||||
|
||||
Icon.displayName = 'Comment'
|
||||
|
||||
export default Icon
|
||||
@ -1,3 +1,4 @@
|
||||
export { default as Comment } from './Comment'
|
||||
export { default as DefaultToolIcon } from './DefaultToolIcon'
|
||||
export { default as Icon3Dots } from './Icon3Dots'
|
||||
export { default as Message3Fill } from './Message3Fill'
|
||||
|
||||
@ -17,6 +17,7 @@ import type {
|
||||
} from './types'
|
||||
import { CodeNode } from '@lexical/code'
|
||||
import { LexicalComposer } from '@lexical/react/LexicalComposer'
|
||||
import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext'
|
||||
import { ContentEditable } from '@lexical/react/LexicalContentEditable'
|
||||
import { LexicalErrorBoundary } from '@lexical/react/LexicalErrorBoundary'
|
||||
import { HistoryPlugin } from '@lexical/react/LexicalHistoryPlugin'
|
||||
@ -81,6 +82,29 @@ import {
|
||||
} from './plugins/workflow-variable-block'
|
||||
import { textToEditorState } from './utils'
|
||||
|
||||
const ValueSyncPlugin: FC<{ value?: string }> = ({ value }) => {
|
||||
const [editor] = useLexicalComposerContext()
|
||||
|
||||
useEffect(() => {
|
||||
if (value === undefined)
|
||||
return
|
||||
|
||||
const incomingValue = value ?? ''
|
||||
const shouldUpdate = editor.getEditorState().read(() => {
|
||||
const currentText = $getRoot().getChildren().map(node => node.getTextContent()).join('\n')
|
||||
return currentText !== incomingValue
|
||||
})
|
||||
|
||||
if (!shouldUpdate)
|
||||
return
|
||||
|
||||
const editorState = editor.parseEditorState(textToEditorState(incomingValue))
|
||||
editor.setEditorState(editorState)
|
||||
}, [editor, value])
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export type PromptEditorProps = {
|
||||
instanceId?: string
|
||||
compact?: boolean
|
||||
@ -294,6 +318,7 @@ const PromptEditor: FC<PromptEditorProps> = ({
|
||||
<VariableValueBlock />
|
||||
)
|
||||
}
|
||||
<ValueSyncPlugin value={value} />
|
||||
<OnChangePlugin onChange={handleEditorChange} />
|
||||
<OnBlurBlock onBlur={onBlur} onFocus={onFocus} />
|
||||
<UpdateBlock instanceId={instanceId} />
|
||||
|
||||
79
web/app/components/base/user-avatar-list/index.tsx
Normal file
79
web/app/components/base/user-avatar-list/index.tsx
Normal file
@ -0,0 +1,79 @@
|
||||
import type { FC } from 'react'
|
||||
import { memo } from 'react'
|
||||
import Avatar from '@/app/components/base/avatar'
|
||||
import { getUserColor } from '@/app/components/workflow/collaboration/utils/user-color'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
|
||||
type User = {
|
||||
id: string
|
||||
name: string
|
||||
avatar_url?: string | null
|
||||
}
|
||||
|
||||
type UserAvatarListProps = {
|
||||
users: User[]
|
||||
maxVisible?: number
|
||||
size?: number
|
||||
className?: string
|
||||
showCount?: boolean
|
||||
}
|
||||
|
||||
export const UserAvatarList: FC<UserAvatarListProps> = memo(({
|
||||
users,
|
||||
maxVisible = 3,
|
||||
size = 24,
|
||||
className = '',
|
||||
showCount = true,
|
||||
}) => {
|
||||
const { userProfile } = useAppContext()
|
||||
if (!users.length)
|
||||
return null
|
||||
|
||||
const shouldShowCount = showCount && users.length > maxVisible
|
||||
const actualMaxVisible = shouldShowCount ? Math.max(1, maxVisible - 1) : maxVisible
|
||||
const visibleUsers = users.slice(0, actualMaxVisible)
|
||||
const remainingCount = users.length - actualMaxVisible
|
||||
|
||||
const currentUserId = userProfile?.id
|
||||
|
||||
return (
|
||||
<div className={`flex items-center -space-x-1 ${className}`}>
|
||||
{visibleUsers.map((user, index) => {
|
||||
const isCurrentUser = user.id === currentUserId
|
||||
const userColor = isCurrentUser ? undefined : getUserColor(user.id)
|
||||
return (
|
||||
<div
|
||||
key={`${user.id}-${index}`}
|
||||
className="relative"
|
||||
style={{ zIndex: visibleUsers.length - index }}
|
||||
>
|
||||
<Avatar
|
||||
name={user.name}
|
||||
avatar={user.avatar_url || null}
|
||||
size={size}
|
||||
className="ring-2 ring-components-panel-bg"
|
||||
backgroundColor={userColor}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
},
|
||||
|
||||
)}
|
||||
{shouldShowCount && remainingCount > 0 && (
|
||||
<div
|
||||
className="flex items-center justify-center rounded-full bg-gray-500 text-[10px] leading-none text-white ring-2 ring-components-panel-bg"
|
||||
style={{
|
||||
zIndex: 0,
|
||||
width: size,
|
||||
height: size,
|
||||
}}
|
||||
>
|
||||
+
|
||||
{remainingCount}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
})
|
||||
|
||||
UserAvatarList.displayName = 'UserAvatarList'
|
||||
@ -1,6 +1,3 @@
|
||||
/**
|
||||
* @vitest-environment jsdom
|
||||
*/
|
||||
import type { Mock } from 'vitest'
|
||||
import type { CrawlOptions, CrawlResultItem } from '@/models/datasets'
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
|
||||
@ -1,499 +0,0 @@
|
||||
import type { DocumentContextValue } from '@/app/components/datasets/documents/detail/context'
|
||||
import type { ChildChunkDetail, ChunkingMode, ParentMode } from '@/models/datasets'
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import ChildSegmentList from './child-segment-list'
|
||||
|
||||
// ============================================================================
|
||||
// Hoisted Mocks
|
||||
// ============================================================================
|
||||
|
||||
const {
|
||||
mockParentMode,
|
||||
mockCurrChildChunk,
|
||||
} = vi.hoisted(() => ({
|
||||
mockParentMode: { current: 'paragraph' as ParentMode },
|
||||
mockCurrChildChunk: { current: { childChunkInfo: undefined, showModal: false } as { childChunkInfo?: ChildChunkDetail, showModal: boolean } },
|
||||
}))
|
||||
|
||||
// Mock react-i18next
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string, options?: { count?: number, ns?: string }) => {
|
||||
if (key === 'segment.childChunks')
|
||||
return options?.count === 1 ? 'child chunk' : 'child chunks'
|
||||
if (key === 'segment.searchResults')
|
||||
return 'search results'
|
||||
if (key === 'segment.edited')
|
||||
return 'edited'
|
||||
if (key === 'operation.add')
|
||||
return 'Add'
|
||||
const prefix = options?.ns ? `${options.ns}.` : ''
|
||||
return `${prefix}${key}`
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
// Mock document context
|
||||
vi.mock('../context', () => ({
|
||||
useDocumentContext: (selector: (value: DocumentContextValue) => unknown) => {
|
||||
const value: DocumentContextValue = {
|
||||
datasetId: 'test-dataset-id',
|
||||
documentId: 'test-document-id',
|
||||
docForm: 'text' as ChunkingMode,
|
||||
parentMode: mockParentMode.current,
|
||||
}
|
||||
return selector(value)
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock segment list context
|
||||
vi.mock('./index', () => ({
|
||||
useSegmentListContext: (selector: (value: { currChildChunk: { childChunkInfo?: ChildChunkDetail, showModal: boolean } }) => unknown) => {
|
||||
return selector({ currChildChunk: mockCurrChildChunk.current })
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock skeleton component
|
||||
vi.mock('./skeleton/full-doc-list-skeleton', () => ({
|
||||
default: () => <div data-testid="full-doc-list-skeleton">Loading...</div>,
|
||||
}))
|
||||
|
||||
// Mock Empty component
|
||||
vi.mock('./common/empty', () => ({
|
||||
default: ({ onClearFilter }: { onClearFilter: () => void }) => (
|
||||
<div data-testid="empty-component">
|
||||
<button onClick={onClearFilter}>Clear Filter</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// Mock FormattedText and EditSlice
|
||||
vi.mock('../../../formatted-text/formatted', () => ({
|
||||
FormattedText: ({ children, className }: { children: React.ReactNode, className?: string }) => (
|
||||
<div data-testid="formatted-text" className={className}>{children}</div>
|
||||
),
|
||||
}))
|
||||
|
||||
vi.mock('../../../formatted-text/flavours/edit-slice', () => ({
|
||||
EditSlice: ({ label, text, onDelete, onClick, labelClassName, contentClassName }: {
|
||||
label: string
|
||||
text: string
|
||||
onDelete: () => void
|
||||
onClick: (e: React.MouseEvent) => void
|
||||
labelClassName?: string
|
||||
contentClassName?: string
|
||||
}) => (
|
||||
<div data-testid="edit-slice" onClick={onClick}>
|
||||
<span data-testid="edit-slice-label" className={labelClassName}>{label}</span>
|
||||
<span data-testid="edit-slice-content" className={contentClassName}>{text}</span>
|
||||
<button
|
||||
data-testid="delete-button"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onDelete()
|
||||
}}
|
||||
>
|
||||
Delete
|
||||
</button>
|
||||
</div>
|
||||
),
|
||||
}))
|
||||
|
||||
// ============================================================================
|
||||
// Test Data Factories
|
||||
// ============================================================================
|
||||
|
||||
const createMockChildChunk = (overrides: Partial<ChildChunkDetail> = {}): ChildChunkDetail => ({
|
||||
id: `child-${Math.random().toString(36).substr(2, 9)}`,
|
||||
position: 1,
|
||||
segment_id: 'segment-1',
|
||||
content: 'Child chunk content',
|
||||
word_count: 100,
|
||||
created_at: 1700000000,
|
||||
updated_at: 1700000000,
|
||||
type: 'automatic',
|
||||
...overrides,
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// Tests
|
||||
// ============================================================================
|
||||
|
||||
describe('ChildSegmentList', () => {
|
||||
const defaultProps = {
|
||||
childChunks: [] as ChildChunkDetail[],
|
||||
parentChunkId: 'parent-1',
|
||||
enabled: true,
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockParentMode.current = 'paragraph'
|
||||
mockCurrChildChunk.current = { childChunkInfo: undefined, showModal: false }
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render with empty child chunks', () => {
|
||||
render(<ChildSegmentList {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText(/child chunks/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render child chunks when provided', () => {
|
||||
const childChunks = [
|
||||
createMockChildChunk({ id: 'child-1', position: 1, content: 'First chunk' }),
|
||||
createMockChildChunk({ id: 'child-2', position: 2, content: 'Second chunk' }),
|
||||
]
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} childChunks={childChunks} />)
|
||||
|
||||
// In paragraph mode, content is collapsed by default
|
||||
expect(screen.getByText(/2 child chunks/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render total count correctly with total prop in full-doc mode', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
const childChunks = [createMockChildChunk()]
|
||||
|
||||
// Pass inputValue="" to ensure isSearching is false
|
||||
render(<ChildSegmentList {...defaultProps} childChunks={childChunks} total={5} isLoading={false} inputValue="" />)
|
||||
|
||||
expect(screen.getByText(/5 child chunks/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render loading skeleton in full-doc mode when loading', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} isLoading={true} />)
|
||||
|
||||
expect(screen.getByTestId('full-doc-list-skeleton')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not render loading skeleton when not loading', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} isLoading={false} />)
|
||||
|
||||
expect(screen.queryByTestId('full-doc-list-skeleton')).not.toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Paragraph Mode', () => {
|
||||
beforeEach(() => {
|
||||
mockParentMode.current = 'paragraph'
|
||||
})
|
||||
|
||||
it('should show collapse icon in paragraph mode', () => {
|
||||
const childChunks = [createMockChildChunk()]
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} childChunks={childChunks} />)
|
||||
|
||||
// Check for collapse/expand behavior
|
||||
const totalRow = screen.getByText(/1 child chunk/i).closest('div')
|
||||
expect(totalRow).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should toggle collapsed state when clicked', () => {
|
||||
const childChunks = [createMockChildChunk({ content: 'Test content' })]
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} childChunks={childChunks} />)
|
||||
|
||||
// Initially collapsed in paragraph mode - content should not be visible
|
||||
expect(screen.queryByTestId('formatted-text')).not.toBeInTheDocument()
|
||||
|
||||
// Find and click the toggle area
|
||||
const toggleArea = screen.getByText(/1 child chunk/i).closest('div')
|
||||
|
||||
// Click to expand
|
||||
if (toggleArea)
|
||||
fireEvent.click(toggleArea)
|
||||
|
||||
// After expansion, content should be visible
|
||||
expect(screen.getByTestId('formatted-text')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should apply opacity when disabled', () => {
|
||||
const { container } = render(<ChildSegmentList {...defaultProps} enabled={false} />)
|
||||
|
||||
const wrapper = container.firstChild
|
||||
expect(wrapper).toHaveClass('opacity-50')
|
||||
})
|
||||
|
||||
it('should not apply opacity when enabled', () => {
|
||||
const { container } = render(<ChildSegmentList {...defaultProps} enabled={true} />)
|
||||
|
||||
const wrapper = container.firstChild
|
||||
expect(wrapper).not.toHaveClass('opacity-50')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Full-Doc Mode', () => {
|
||||
beforeEach(() => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
})
|
||||
|
||||
it('should show content by default in full-doc mode', () => {
|
||||
const childChunks = [createMockChildChunk({ content: 'Full doc content' })]
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} childChunks={childChunks} isLoading={false} />)
|
||||
|
||||
expect(screen.getByTestId('formatted-text')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render search input in full-doc mode', () => {
|
||||
render(<ChildSegmentList {...defaultProps} inputValue="" handleInputChange={vi.fn()} />)
|
||||
|
||||
const input = document.querySelector('input')
|
||||
expect(input).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call handleInputChange when input changes', () => {
|
||||
const handleInputChange = vi.fn()
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} inputValue="" handleInputChange={handleInputChange} />)
|
||||
|
||||
const input = document.querySelector('input')
|
||||
if (input) {
|
||||
fireEvent.change(input, { target: { value: 'test search' } })
|
||||
expect(handleInputChange).toHaveBeenCalledWith('test search')
|
||||
}
|
||||
})
|
||||
|
||||
it('should show search results text when searching', () => {
|
||||
render(<ChildSegmentList {...defaultProps} inputValue="search term" total={3} />)
|
||||
|
||||
expect(screen.getByText(/3 search results/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should show empty component when no results and searching', () => {
|
||||
render(
|
||||
<ChildSegmentList
|
||||
{...defaultProps}
|
||||
childChunks={[]}
|
||||
inputValue="search term"
|
||||
onClearFilter={vi.fn()}
|
||||
isLoading={false}
|
||||
/>,
|
||||
)
|
||||
|
||||
expect(screen.getByTestId('empty-component')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call onClearFilter when clear button clicked in empty state', () => {
|
||||
const onClearFilter = vi.fn()
|
||||
|
||||
render(
|
||||
<ChildSegmentList
|
||||
{...defaultProps}
|
||||
childChunks={[]}
|
||||
inputValue="search term"
|
||||
onClearFilter={onClearFilter}
|
||||
isLoading={false}
|
||||
/>,
|
||||
)
|
||||
|
||||
const clearButton = screen.getByText('Clear Filter')
|
||||
fireEvent.click(clearButton)
|
||||
|
||||
expect(onClearFilter).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Child Chunk Items', () => {
|
||||
it('should render edited label when chunk is edited', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
const editedChunk = createMockChildChunk({
|
||||
id: 'edited-chunk',
|
||||
position: 1,
|
||||
created_at: 1700000000,
|
||||
updated_at: 1700000001, // Different from created_at
|
||||
})
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} childChunks={[editedChunk]} isLoading={false} />)
|
||||
|
||||
expect(screen.getByText(/C-1 · edited/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should not show edited label when chunk is not edited', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
const normalChunk = createMockChildChunk({
|
||||
id: 'normal-chunk',
|
||||
position: 2,
|
||||
created_at: 1700000000,
|
||||
updated_at: 1700000000, // Same as created_at
|
||||
})
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} childChunks={[normalChunk]} isLoading={false} />)
|
||||
|
||||
expect(screen.getByText('C-2')).toBeInTheDocument()
|
||||
expect(screen.queryByText(/edited/i)).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should call onClickSlice when chunk is clicked', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
const onClickSlice = vi.fn()
|
||||
const chunk = createMockChildChunk({ id: 'clickable-chunk' })
|
||||
|
||||
render(
|
||||
<ChildSegmentList
|
||||
{...defaultProps}
|
||||
childChunks={[chunk]}
|
||||
onClickSlice={onClickSlice}
|
||||
isLoading={false}
|
||||
/>,
|
||||
)
|
||||
|
||||
const editSlice = screen.getByTestId('edit-slice')
|
||||
fireEvent.click(editSlice)
|
||||
|
||||
expect(onClickSlice).toHaveBeenCalledWith(chunk)
|
||||
})
|
||||
|
||||
it('should call onDelete when delete button is clicked', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
const onDelete = vi.fn()
|
||||
const chunk = createMockChildChunk({ id: 'deletable-chunk', segment_id: 'seg-1' })
|
||||
|
||||
render(
|
||||
<ChildSegmentList
|
||||
{...defaultProps}
|
||||
childChunks={[chunk]}
|
||||
onDelete={onDelete}
|
||||
isLoading={false}
|
||||
/>,
|
||||
)
|
||||
|
||||
const deleteButton = screen.getByTestId('delete-button')
|
||||
fireEvent.click(deleteButton)
|
||||
|
||||
expect(onDelete).toHaveBeenCalledWith('seg-1', 'deletable-chunk')
|
||||
})
|
||||
|
||||
it('should apply focused styles when chunk is currently selected', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
const chunk = createMockChildChunk({ id: 'focused-chunk' })
|
||||
mockCurrChildChunk.current = { childChunkInfo: chunk, showModal: true }
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} childChunks={[chunk]} isLoading={false} />)
|
||||
|
||||
const label = screen.getByTestId('edit-slice-label')
|
||||
expect(label).toHaveClass('bg-state-accent-solid')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Add Button', () => {
|
||||
it('should call handleAddNewChildChunk when Add button is clicked', () => {
|
||||
const handleAddNewChildChunk = vi.fn()
|
||||
|
||||
render(
|
||||
<ChildSegmentList
|
||||
{...defaultProps}
|
||||
handleAddNewChildChunk={handleAddNewChildChunk}
|
||||
parentChunkId="parent-123"
|
||||
/>,
|
||||
)
|
||||
|
||||
const addButton = screen.getByText('Add')
|
||||
fireEvent.click(addButton)
|
||||
|
||||
expect(handleAddNewChildChunk).toHaveBeenCalledWith('parent-123')
|
||||
})
|
||||
|
||||
it('should disable Add button when loading in full-doc mode', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} isLoading={true} />)
|
||||
|
||||
const addButton = screen.getByText('Add')
|
||||
expect(addButton).toBeDisabled()
|
||||
})
|
||||
|
||||
it('should stop propagation when Add button is clicked', () => {
|
||||
const handleAddNewChildChunk = vi.fn()
|
||||
const parentClickHandler = vi.fn()
|
||||
|
||||
render(
|
||||
<div onClick={parentClickHandler}>
|
||||
<ChildSegmentList
|
||||
{...defaultProps}
|
||||
handleAddNewChildChunk={handleAddNewChildChunk}
|
||||
/>
|
||||
</div>,
|
||||
)
|
||||
|
||||
const addButton = screen.getByText('Add')
|
||||
fireEvent.click(addButton)
|
||||
|
||||
expect(handleAddNewChildChunk).toHaveBeenCalled()
|
||||
// Parent should not be called due to stopPropagation
|
||||
})
|
||||
})
|
||||
|
||||
describe('computeTotalInfo function', () => {
|
||||
it('should return search results when searching in full-doc mode', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} inputValue="search" total={10} />)
|
||||
|
||||
expect(screen.getByText(/10 search results/i)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should return "--" when total is 0 in full-doc mode', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} total={0} />)
|
||||
|
||||
// When total is 0, displayText is '--'
|
||||
expect(screen.getByText(/--/)).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should use childChunks length in paragraph mode', () => {
|
||||
mockParentMode.current = 'paragraph'
|
||||
const childChunks = [
|
||||
createMockChildChunk(),
|
||||
createMockChildChunk(),
|
||||
createMockChildChunk(),
|
||||
]
|
||||
|
||||
render(<ChildSegmentList {...defaultProps} childChunks={childChunks} />)
|
||||
|
||||
expect(screen.getByText(/3 child chunks/i)).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Focused State', () => {
|
||||
it('should not apply opacity when focused even if disabled', () => {
|
||||
const { container } = render(
|
||||
<ChildSegmentList {...defaultProps} enabled={false} focused={true} />,
|
||||
)
|
||||
|
||||
const wrapper = container.firstChild
|
||||
expect(wrapper).not.toHaveClass('opacity-50')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Input clear button', () => {
|
||||
it('should call handleInputChange with empty string when clear is clicked', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
const handleInputChange = vi.fn()
|
||||
|
||||
render(
|
||||
<ChildSegmentList
|
||||
{...defaultProps}
|
||||
inputValue="test"
|
||||
handleInputChange={handleInputChange}
|
||||
/>,
|
||||
)
|
||||
|
||||
// Find the clear button (it's the showClearIcon button in Input)
|
||||
const input = document.querySelector('input')
|
||||
if (input) {
|
||||
// Trigger clear by simulating the input's onClear
|
||||
const clearButton = document.querySelector('[class*="cursor-pointer"]')
|
||||
if (clearButton)
|
||||
fireEvent.click(clearButton)
|
||||
}
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1,7 +1,7 @@
|
||||
import type { FC } from 'react'
|
||||
import type { ChildChunkDetail } from '@/models/datasets'
|
||||
import { RiArrowDownSLine, RiArrowRightSLine } from '@remixicon/react'
|
||||
import { useState } from 'react'
|
||||
import { useMemo, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import Input from '@/app/components/base/input'
|
||||
@ -29,37 +29,6 @@ type IChildSegmentCardProps = {
|
||||
focused?: boolean
|
||||
}
|
||||
|
||||
function computeTotalInfo(
|
||||
isFullDocMode: boolean,
|
||||
isSearching: boolean,
|
||||
total: number | undefined,
|
||||
childChunksLength: number,
|
||||
): { displayText: string, count: number, translationKey: 'segment.searchResults' | 'segment.childChunks' } {
|
||||
if (isSearching) {
|
||||
const count = total ?? 0
|
||||
return {
|
||||
displayText: count === 0 ? '--' : String(formatNumber(count)),
|
||||
count,
|
||||
translationKey: 'segment.searchResults',
|
||||
}
|
||||
}
|
||||
|
||||
if (isFullDocMode) {
|
||||
const count = total ?? 0
|
||||
return {
|
||||
displayText: count === 0 ? '--' : String(formatNumber(count)),
|
||||
count,
|
||||
translationKey: 'segment.childChunks',
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
displayText: String(formatNumber(childChunksLength)),
|
||||
count: childChunksLength,
|
||||
translationKey: 'segment.childChunks',
|
||||
}
|
||||
}
|
||||
|
||||
const ChildSegmentList: FC<IChildSegmentCardProps> = ({
|
||||
childChunks,
|
||||
parentChunkId,
|
||||
@ -80,87 +49,59 @@ const ChildSegmentList: FC<IChildSegmentCardProps> = ({
|
||||
|
||||
const [collapsed, setCollapsed] = useState(true)
|
||||
|
||||
const isParagraphMode = parentMode === 'paragraph'
|
||||
const isFullDocMode = parentMode === 'full-doc'
|
||||
const isSearching = inputValue !== '' && isFullDocMode
|
||||
const contentOpacity = (enabled || focused) ? '' : 'opacity-50 group-hover/card:opacity-100'
|
||||
const { displayText, count, translationKey } = computeTotalInfo(isFullDocMode, isSearching, total, childChunks.length)
|
||||
const totalText = `${displayText} ${t(translationKey, { ns: 'datasetDocuments', count })}`
|
||||
|
||||
const toggleCollapse = () => setCollapsed(prev => !prev)
|
||||
const showContent = (isFullDocMode && !isLoading) || !collapsed
|
||||
const hoverVisibleClass = isParagraphMode ? 'hidden group-hover/card:inline-block' : ''
|
||||
|
||||
const renderCollapseIcon = () => {
|
||||
if (!isParagraphMode)
|
||||
return null
|
||||
const Icon = collapsed ? RiArrowRightSLine : RiArrowDownSLine
|
||||
return <Icon className={cn('mr-0.5 h-4 w-4 text-text-secondary', collapsed && 'opacity-50')} />
|
||||
const toggleCollapse = () => {
|
||||
setCollapsed(!collapsed)
|
||||
}
|
||||
|
||||
const renderChildChunkItem = (childChunk: ChildChunkDetail) => {
|
||||
const isEdited = childChunk.updated_at !== childChunk.created_at
|
||||
const isFocused = currChildChunk?.childChunkInfo?.id === childChunk.id
|
||||
const label = isEdited
|
||||
? `C-${childChunk.position} · ${t('segment.edited', { ns: 'datasetDocuments' })}`
|
||||
: `C-${childChunk.position}`
|
||||
const isParagraphMode = useMemo(() => {
|
||||
return parentMode === 'paragraph'
|
||||
}, [parentMode])
|
||||
|
||||
return (
|
||||
<EditSlice
|
||||
key={childChunk.id}
|
||||
label={label}
|
||||
text={childChunk.content}
|
||||
onDelete={() => onDelete?.(childChunk.segment_id, childChunk.id)}
|
||||
className="child-chunk"
|
||||
labelClassName={isFocused ? 'bg-state-accent-solid text-text-primary-on-surface' : ''}
|
||||
labelInnerClassName="text-[10px] font-semibold align-bottom leading-6"
|
||||
contentClassName={cn('!leading-6', isFocused ? 'bg-state-accent-hover-alt text-text-primary' : 'text-text-secondary')}
|
||||
showDivider={false}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onClickSlice?.(childChunk)
|
||||
}}
|
||||
offsetOptions={({ rects }) => ({
|
||||
mainAxis: isFullDocMode ? -rects.floating.width : 12 - rects.floating.width,
|
||||
crossAxis: (20 - rects.floating.height) / 2,
|
||||
})}
|
||||
/>
|
||||
)
|
||||
}
|
||||
const isFullDocMode = useMemo(() => {
|
||||
return parentMode === 'full-doc'
|
||||
}, [parentMode])
|
||||
|
||||
const renderContent = () => {
|
||||
if (childChunks.length > 0) {
|
||||
return (
|
||||
<FormattedText className={cn('flex w-full flex-col !leading-6', isParagraphMode ? 'gap-y-2' : 'gap-y-3')}>
|
||||
{childChunks.map(renderChildChunkItem)}
|
||||
</FormattedText>
|
||||
)
|
||||
const contentOpacity = useMemo(() => {
|
||||
return (enabled || focused) ? '' : 'opacity-50 group-hover/card:opacity-100'
|
||||
}, [enabled, focused])
|
||||
|
||||
const totalText = useMemo(() => {
|
||||
const isSearch = inputValue !== '' && isFullDocMode
|
||||
if (!isSearch) {
|
||||
const text = isFullDocMode
|
||||
? !total
|
||||
? '--'
|
||||
: formatNumber(total)
|
||||
: formatNumber(childChunks.length)
|
||||
const count = isFullDocMode
|
||||
? text === '--'
|
||||
? 0
|
||||
: total
|
||||
: childChunks.length
|
||||
return `${text} ${t('segment.childChunks', { ns: 'datasetDocuments', count })}`
|
||||
}
|
||||
if (inputValue !== '') {
|
||||
return (
|
||||
<div className="h-full w-full">
|
||||
<Empty onClearFilter={onClearFilter!} />
|
||||
</div>
|
||||
)
|
||||
else {
|
||||
const text = !total ? '--' : formatNumber(total)
|
||||
const count = text === '--' ? 0 : total
|
||||
return `${count} ${t('segment.searchResults', { ns: 'datasetDocuments', count })}`
|
||||
}
|
||||
return null
|
||||
}
|
||||
}, [isFullDocMode, total, childChunks.length, inputValue])
|
||||
|
||||
return (
|
||||
<div className={cn(
|
||||
'flex flex-col',
|
||||
contentOpacity,
|
||||
isParagraphMode ? 'pb-2 pt-1' : 'grow px-3',
|
||||
isFullDocMode && isLoading && 'overflow-y-hidden',
|
||||
(isFullDocMode && isLoading) && 'overflow-y-hidden',
|
||||
)}
|
||||
>
|
||||
{isFullDocMode && <Divider type="horizontal" className="my-1 h-px bg-divider-subtle" />}
|
||||
<div className={cn('flex items-center justify-between', isFullDocMode && 'sticky -top-2 left-0 bg-background-default pb-3 pt-2')}>
|
||||
{isFullDocMode ? <Divider type="horizontal" className="my-1 h-px bg-divider-subtle" /> : null}
|
||||
<div className={cn('flex items-center justify-between', isFullDocMode ? 'sticky -top-2 left-0 bg-background-default pb-3 pt-2' : '')}>
|
||||
<div
|
||||
className={cn(
|
||||
'flex h-7 items-center rounded-lg pl-1 pr-3',
|
||||
isParagraphMode && 'cursor-pointer',
|
||||
isParagraphMode && collapsed && 'bg-dataset-child-chunk-expand-btn-bg',
|
||||
(isParagraphMode && collapsed) && 'bg-dataset-child-chunk-expand-btn-bg',
|
||||
isFullDocMode && 'pl-0',
|
||||
)}
|
||||
onClick={(event) => {
|
||||
@ -168,15 +109,23 @@ const ChildSegmentList: FC<IChildSegmentCardProps> = ({
|
||||
toggleCollapse()
|
||||
}}
|
||||
>
|
||||
{renderCollapseIcon()}
|
||||
{
|
||||
isParagraphMode
|
||||
? collapsed
|
||||
? (
|
||||
<RiArrowRightSLine className="mr-0.5 h-4 w-4 text-text-secondary opacity-50" />
|
||||
)
|
||||
: (<RiArrowDownSLine className="mr-0.5 h-4 w-4 text-text-secondary" />)
|
||||
: null
|
||||
}
|
||||
<span className="system-sm-semibold-uppercase text-text-secondary">{totalText}</span>
|
||||
<span className={cn('pl-1.5 text-xs font-medium text-text-quaternary', hoverVisibleClass)}>·</span>
|
||||
<span className={cn('pl-1.5 text-xs font-medium text-text-quaternary', isParagraphMode ? 'hidden group-hover/card:inline-block' : '')}>·</span>
|
||||
<button
|
||||
type="button"
|
||||
className={cn(
|
||||
'system-xs-semibold-uppercase px-1.5 py-1 text-components-button-secondary-accent-text',
|
||||
hoverVisibleClass,
|
||||
isFullDocMode && isLoading && 'text-components-button-secondary-accent-text-disabled',
|
||||
isParagraphMode ? 'hidden group-hover/card:inline-block' : '',
|
||||
(isFullDocMode && isLoading) ? 'text-components-button-secondary-accent-text-disabled' : '',
|
||||
)}
|
||||
onClick={(event) => {
|
||||
event.stopPropagation()
|
||||
@ -187,28 +136,70 @@ const ChildSegmentList: FC<IChildSegmentCardProps> = ({
|
||||
{t('operation.add', { ns: 'common' })}
|
||||
</button>
|
||||
</div>
|
||||
{isFullDocMode && (
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="!w-52"
|
||||
value={inputValue}
|
||||
onChange={e => handleInputChange?.(e.target.value)}
|
||||
onClear={() => handleInputChange?.('')}
|
||||
/>
|
||||
)}
|
||||
{isFullDocMode
|
||||
? (
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="!w-52"
|
||||
value={inputValue}
|
||||
onChange={e => handleInputChange?.(e.target.value)}
|
||||
onClear={() => handleInputChange?.('')}
|
||||
/>
|
||||
)
|
||||
: null}
|
||||
</div>
|
||||
{isLoading && <FullDocListSkeleton />}
|
||||
{showContent && (
|
||||
<div className={cn('flex gap-x-0.5', isFullDocMode ? 'mb-6 grow' : 'items-center')}>
|
||||
{isParagraphMode && (
|
||||
<div className="self-stretch">
|
||||
<Divider type="vertical" className="mx-[7px] w-[2px] bg-text-accent-secondary" />
|
||||
{isLoading ? <FullDocListSkeleton /> : null}
|
||||
{((isFullDocMode && !isLoading) || !collapsed)
|
||||
? (
|
||||
<div className={cn('flex gap-x-0.5', isFullDocMode ? 'mb-6 grow' : 'items-center')}>
|
||||
{isParagraphMode && (
|
||||
<div className="self-stretch">
|
||||
<Divider type="vertical" className="mx-[7px] w-[2px] bg-text-accent-secondary" />
|
||||
</div>
|
||||
)}
|
||||
{childChunks.length > 0
|
||||
? (
|
||||
<FormattedText className={cn('flex w-full flex-col !leading-6', isParagraphMode ? 'gap-y-2' : 'gap-y-3')}>
|
||||
{childChunks.map((childChunk) => {
|
||||
const edited = childChunk.updated_at !== childChunk.created_at
|
||||
const focused = currChildChunk?.childChunkInfo?.id === childChunk.id
|
||||
return (
|
||||
<EditSlice
|
||||
key={childChunk.id}
|
||||
label={`C-${childChunk.position}${edited ? ` · ${t('segment.edited', { ns: 'datasetDocuments' })}` : ''}`}
|
||||
text={childChunk.content}
|
||||
onDelete={() => onDelete?.(childChunk.segment_id, childChunk.id)}
|
||||
className="child-chunk"
|
||||
labelClassName={focused ? 'bg-state-accent-solid text-text-primary-on-surface' : ''}
|
||||
labelInnerClassName="text-[10px] font-semibold align-bottom leading-6"
|
||||
contentClassName={cn('!leading-6', focused ? 'bg-state-accent-hover-alt text-text-primary' : 'text-text-secondary')}
|
||||
showDivider={false}
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
onClickSlice?.(childChunk)
|
||||
}}
|
||||
offsetOptions={({ rects }) => {
|
||||
return {
|
||||
mainAxis: isFullDocMode ? -rects.floating.width : 12 - rects.floating.width,
|
||||
crossAxis: (20 - rects.floating.height) / 2,
|
||||
}
|
||||
}}
|
||||
/>
|
||||
)
|
||||
})}
|
||||
</FormattedText>
|
||||
)
|
||||
: inputValue !== ''
|
||||
? (
|
||||
<div className="h-full w-full">
|
||||
<Empty onClearFilter={onClearFilter!} />
|
||||
</div>
|
||||
)
|
||||
: null}
|
||||
</div>
|
||||
)}
|
||||
{renderContent()}
|
||||
</div>
|
||||
)}
|
||||
)
|
||||
: null}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@ -17,31 +17,6 @@ type DrawerProps = {
|
||||
needCheckChunks?: boolean
|
||||
}
|
||||
|
||||
const SIDE_POSITION_CLASS = {
|
||||
right: 'right-0',
|
||||
left: 'left-0',
|
||||
bottom: 'bottom-0',
|
||||
top: 'top-0',
|
||||
} as const
|
||||
|
||||
function containsTarget(selector: string, target: Node | null): boolean {
|
||||
const elements = document.querySelectorAll(selector)
|
||||
return Array.from(elements).some(el => el?.contains(target))
|
||||
}
|
||||
|
||||
function shouldReopenChunkDetail(
|
||||
isClickOnChunk: boolean,
|
||||
isClickOnChildChunk: boolean,
|
||||
segmentModalOpen: boolean,
|
||||
childChunkModalOpen: boolean,
|
||||
): boolean {
|
||||
if (segmentModalOpen && isClickOnChildChunk)
|
||||
return true
|
||||
if (childChunkModalOpen && isClickOnChunk && !isClickOnChildChunk)
|
||||
return true
|
||||
return !isClickOnChunk && !isClickOnChildChunk
|
||||
}
|
||||
|
||||
const Drawer = ({
|
||||
open,
|
||||
onClose,
|
||||
@ -66,22 +41,22 @@ const Drawer = ({
|
||||
|
||||
const shouldCloseDrawer = useCallback((target: Node | null) => {
|
||||
const panelContent = panelContentRef.current
|
||||
if (!panelContent || !target)
|
||||
if (!panelContent)
|
||||
return false
|
||||
|
||||
if (panelContent.contains(target))
|
||||
return false
|
||||
|
||||
if (containsTarget('.image-previewer', target))
|
||||
return false
|
||||
|
||||
if (!needCheckChunks)
|
||||
return true
|
||||
|
||||
const isClickOnChunk = containsTarget('.chunk-card', target)
|
||||
const isClickOnChildChunk = containsTarget('.child-chunk', target)
|
||||
return shouldReopenChunkDetail(isClickOnChunk, isClickOnChildChunk, currSegment.showModal, currChildChunk.showModal)
|
||||
}, [currSegment.showModal, currChildChunk.showModal, needCheckChunks])
|
||||
const chunks = document.querySelectorAll('.chunk-card')
|
||||
const childChunks = document.querySelectorAll('.child-chunk')
|
||||
const imagePreviewer = document.querySelector('.image-previewer')
|
||||
const isClickOnChunk = Array.from(chunks).some((chunk) => {
|
||||
return chunk && chunk.contains(target)
|
||||
})
|
||||
const isClickOnChildChunk = Array.from(childChunks).some((chunk) => {
|
||||
return chunk && chunk.contains(target)
|
||||
})
|
||||
const reopenChunkDetail = (currSegment.showModal && isClickOnChildChunk)
|
||||
|| (currChildChunk.showModal && isClickOnChunk && !isClickOnChildChunk) || (!isClickOnChunk && !isClickOnChildChunk)
|
||||
const isClickOnImagePreviewer = imagePreviewer && imagePreviewer.contains(target)
|
||||
return target && !panelContent.contains(target) && (!needCheckChunks || reopenChunkDetail) && !isClickOnImagePreviewer
|
||||
}, [currSegment, currChildChunk, needCheckChunks])
|
||||
|
||||
const onDownCapture = useCallback((e: PointerEvent) => {
|
||||
if (!open || modal)
|
||||
@ -102,27 +77,32 @@ const Drawer = ({
|
||||
|
||||
const isHorizontal = side === 'left' || side === 'right'
|
||||
|
||||
const overlayPointerEvents = modal && open ? 'pointer-events-auto' : 'pointer-events-none'
|
||||
|
||||
const content = (
|
||||
<div className="pointer-events-none fixed inset-0 z-[9999]">
|
||||
{showOverlay && (
|
||||
<div
|
||||
onClick={modal ? onClose : undefined}
|
||||
aria-hidden="true"
|
||||
className={cn(
|
||||
'fixed inset-0 bg-black/30 opacity-0 transition-opacity duration-200 ease-in',
|
||||
open && 'opacity-100',
|
||||
overlayPointerEvents,
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
{showOverlay
|
||||
? (
|
||||
<div
|
||||
onClick={modal ? onClose : undefined}
|
||||
aria-hidden="true"
|
||||
className={cn(
|
||||
'fixed inset-0 bg-black/30 opacity-0 transition-opacity duration-200 ease-in',
|
||||
open && 'opacity-100',
|
||||
modal && open ? 'pointer-events-auto' : 'pointer-events-none',
|
||||
)}
|
||||
/>
|
||||
)
|
||||
: null}
|
||||
|
||||
{/* Drawer panel */}
|
||||
<div
|
||||
role="dialog"
|
||||
aria-modal={modal ? 'true' : 'false'}
|
||||
className={cn(
|
||||
'pointer-events-auto fixed flex flex-col',
|
||||
SIDE_POSITION_CLASS[side],
|
||||
side === 'right' && 'right-0',
|
||||
side === 'left' && 'left-0',
|
||||
side === 'bottom' && 'bottom-0',
|
||||
side === 'top' && 'top-0',
|
||||
isHorizontal ? 'h-screen' : 'w-screen',
|
||||
panelClassName,
|
||||
)}
|
||||
@ -134,10 +114,7 @@ const Drawer = ({
|
||||
</div>
|
||||
)
|
||||
|
||||
if (!open)
|
||||
return null
|
||||
|
||||
return createPortal(content, document.body)
|
||||
return open && createPortal(content, document.body)
|
||||
}
|
||||
|
||||
export default Drawer
|
||||
|
||||
@ -1,129 +0,0 @@
|
||||
import { fireEvent, render, screen } from '@testing-library/react'
|
||||
import Empty from './empty'
|
||||
|
||||
// Mock react-i18next
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
if (key === 'segment.empty')
|
||||
return 'No results found'
|
||||
if (key === 'segment.clearFilter')
|
||||
return 'Clear Filter'
|
||||
return key
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
describe('Empty Component', () => {
|
||||
const defaultProps = {
|
||||
onClearFilter: vi.fn(),
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Rendering', () => {
|
||||
it('should render empty state message', () => {
|
||||
render(<Empty {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText('No results found')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render clear filter button', () => {
|
||||
render(<Empty {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText('Clear Filter')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render icon', () => {
|
||||
const { container } = render(<Empty {...defaultProps} />)
|
||||
|
||||
// Check for the icon container
|
||||
const iconContainer = container.querySelector('.shadow-lg')
|
||||
expect(iconContainer).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should render decorative lines', () => {
|
||||
const { container } = render(<Empty {...defaultProps} />)
|
||||
|
||||
// Check for SVG lines
|
||||
const svgs = container.querySelectorAll('svg')
|
||||
expect(svgs.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should render background cards', () => {
|
||||
const { container } = render(<Empty {...defaultProps} />)
|
||||
|
||||
// Check for background empty cards (10 of them)
|
||||
const backgroundCards = container.querySelectorAll('.rounded-xl.bg-background-section-burn')
|
||||
expect(backgroundCards.length).toBe(10)
|
||||
})
|
||||
|
||||
it('should render mask overlay', () => {
|
||||
const { container } = render(<Empty {...defaultProps} />)
|
||||
|
||||
const maskOverlay = container.querySelector('.bg-dataset-chunk-list-mask-bg')
|
||||
expect(maskOverlay).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Interactions', () => {
|
||||
it('should call onClearFilter when clear filter button is clicked', () => {
|
||||
const onClearFilter = vi.fn()
|
||||
|
||||
render(<Empty onClearFilter={onClearFilter} />)
|
||||
|
||||
const clearButton = screen.getByText('Clear Filter')
|
||||
fireEvent.click(clearButton)
|
||||
|
||||
expect(onClearFilter).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Memoization', () => {
|
||||
it('should be memoized', () => {
|
||||
// Empty is wrapped with React.memo
|
||||
const { rerender } = render(<Empty {...defaultProps} />)
|
||||
|
||||
// Same props should not cause re-render issues
|
||||
rerender(<Empty {...defaultProps} />)
|
||||
|
||||
expect(screen.getByText('No results found')).toBeInTheDocument()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('EmptyCard Component', () => {
|
||||
it('should render within Empty component', () => {
|
||||
const { container } = render(<Empty onClearFilter={vi.fn()} />)
|
||||
|
||||
// EmptyCard renders as background cards
|
||||
const emptyCards = container.querySelectorAll('.h-32.w-full')
|
||||
expect(emptyCards.length).toBe(10)
|
||||
})
|
||||
|
||||
it('should have correct opacity', () => {
|
||||
const { container } = render(<Empty onClearFilter={vi.fn()} />)
|
||||
|
||||
const emptyCards = container.querySelectorAll('.opacity-30')
|
||||
expect(emptyCards.length).toBe(10)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Line Component', () => {
|
||||
it('should render SVG lines within Empty component', () => {
|
||||
const { container } = render(<Empty onClearFilter={vi.fn()} />)
|
||||
|
||||
// Line components render as SVG elements (4 Line components + 1 icon SVG)
|
||||
const lines = container.querySelectorAll('svg')
|
||||
expect(lines.length).toBeGreaterThanOrEqual(4)
|
||||
})
|
||||
|
||||
it('should have gradient definition', () => {
|
||||
const { container } = render(<Empty onClearFilter={vi.fn()} />)
|
||||
|
||||
const gradients = container.querySelectorAll('linearGradient')
|
||||
expect(gradients.length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
@ -1,151 +0,0 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { FileEntity } from '@/app/components/datasets/common/image-uploader/types'
|
||||
import type { ChildChunkDetail, ChunkingMode, SegmentDetailModel } from '@/models/datasets'
|
||||
import NewSegment from '@/app/components/datasets/documents/detail/new-segment'
|
||||
import ChildSegmentDetail from '../child-segment-detail'
|
||||
import FullScreenDrawer from '../common/full-screen-drawer'
|
||||
import NewChildSegment from '../new-child-segment'
|
||||
import SegmentDetail from '../segment-detail'
|
||||
|
||||
type DrawerGroupProps = {
|
||||
// Segment detail drawer
|
||||
currSegment: {
|
||||
segInfo?: SegmentDetailModel
|
||||
showModal: boolean
|
||||
isEditMode?: boolean
|
||||
}
|
||||
onCloseSegmentDetail: () => void
|
||||
onUpdateSegment: (
|
||||
segmentId: string,
|
||||
question: string,
|
||||
answer: string,
|
||||
keywords: string[],
|
||||
attachments: FileEntity[],
|
||||
needRegenerate?: boolean,
|
||||
) => Promise<void>
|
||||
isRegenerationModalOpen: boolean
|
||||
setIsRegenerationModalOpen: (open: boolean) => void
|
||||
// New segment drawer
|
||||
showNewSegmentModal: boolean
|
||||
onCloseNewSegmentModal: () => void
|
||||
onSaveNewSegment: () => void
|
||||
viewNewlyAddedChunk: () => void
|
||||
// Child segment detail drawer
|
||||
currChildChunk: {
|
||||
childChunkInfo?: ChildChunkDetail
|
||||
showModal: boolean
|
||||
}
|
||||
currChunkId: string
|
||||
onCloseChildSegmentDetail: () => void
|
||||
onUpdateChildChunk: (segmentId: string, childChunkId: string, content: string) => Promise<void>
|
||||
// New child segment drawer
|
||||
showNewChildSegmentModal: boolean
|
||||
onCloseNewChildChunkModal: () => void
|
||||
onSaveNewChildChunk: (newChildChunk?: ChildChunkDetail) => void
|
||||
viewNewlyAddedChildChunk: () => void
|
||||
// Common props
|
||||
fullScreen: boolean
|
||||
docForm: ChunkingMode
|
||||
}
|
||||
|
||||
const DrawerGroup: FC<DrawerGroupProps> = ({
|
||||
// Segment detail drawer
|
||||
currSegment,
|
||||
onCloseSegmentDetail,
|
||||
onUpdateSegment,
|
||||
isRegenerationModalOpen,
|
||||
setIsRegenerationModalOpen,
|
||||
// New segment drawer
|
||||
showNewSegmentModal,
|
||||
onCloseNewSegmentModal,
|
||||
onSaveNewSegment,
|
||||
viewNewlyAddedChunk,
|
||||
// Child segment detail drawer
|
||||
currChildChunk,
|
||||
currChunkId,
|
||||
onCloseChildSegmentDetail,
|
||||
onUpdateChildChunk,
|
||||
// New child segment drawer
|
||||
showNewChildSegmentModal,
|
||||
onCloseNewChildChunkModal,
|
||||
onSaveNewChildChunk,
|
||||
viewNewlyAddedChildChunk,
|
||||
// Common props
|
||||
fullScreen,
|
||||
docForm,
|
||||
}) => {
|
||||
return (
|
||||
<>
|
||||
{/* Edit or view segment detail */}
|
||||
<FullScreenDrawer
|
||||
isOpen={currSegment.showModal}
|
||||
fullScreen={fullScreen}
|
||||
onClose={onCloseSegmentDetail}
|
||||
showOverlay={false}
|
||||
needCheckChunks
|
||||
modal={isRegenerationModalOpen}
|
||||
>
|
||||
<SegmentDetail
|
||||
key={currSegment.segInfo?.id}
|
||||
segInfo={currSegment.segInfo ?? { id: '' }}
|
||||
docForm={docForm}
|
||||
isEditMode={currSegment.isEditMode}
|
||||
onUpdate={onUpdateSegment}
|
||||
onCancel={onCloseSegmentDetail}
|
||||
onModalStateChange={setIsRegenerationModalOpen}
|
||||
/>
|
||||
</FullScreenDrawer>
|
||||
|
||||
{/* Create New Segment */}
|
||||
<FullScreenDrawer
|
||||
isOpen={showNewSegmentModal}
|
||||
fullScreen={fullScreen}
|
||||
onClose={onCloseNewSegmentModal}
|
||||
modal
|
||||
>
|
||||
<NewSegment
|
||||
docForm={docForm}
|
||||
onCancel={onCloseNewSegmentModal}
|
||||
onSave={onSaveNewSegment}
|
||||
viewNewlyAddedChunk={viewNewlyAddedChunk}
|
||||
/>
|
||||
</FullScreenDrawer>
|
||||
|
||||
{/* Edit or view child segment detail */}
|
||||
<FullScreenDrawer
|
||||
isOpen={currChildChunk.showModal}
|
||||
fullScreen={fullScreen}
|
||||
onClose={onCloseChildSegmentDetail}
|
||||
showOverlay={false}
|
||||
needCheckChunks
|
||||
>
|
||||
<ChildSegmentDetail
|
||||
key={currChildChunk.childChunkInfo?.id}
|
||||
chunkId={currChunkId}
|
||||
childChunkInfo={currChildChunk.childChunkInfo ?? { id: '' }}
|
||||
docForm={docForm}
|
||||
onUpdate={onUpdateChildChunk}
|
||||
onCancel={onCloseChildSegmentDetail}
|
||||
/>
|
||||
</FullScreenDrawer>
|
||||
|
||||
{/* Create New Child Segment */}
|
||||
<FullScreenDrawer
|
||||
isOpen={showNewChildSegmentModal}
|
||||
fullScreen={fullScreen}
|
||||
onClose={onCloseNewChildChunkModal}
|
||||
modal
|
||||
>
|
||||
<NewChildSegment
|
||||
chunkId={currChunkId}
|
||||
onCancel={onCloseNewChildChunkModal}
|
||||
onSave={onSaveNewChildChunk}
|
||||
viewNewlyAddedChildChunk={viewNewlyAddedChildChunk}
|
||||
/>
|
||||
</FullScreenDrawer>
|
||||
</>
|
||||
)
|
||||
}
|
||||
|
||||
export default DrawerGroup
|
||||
@ -1,3 +0,0 @@
|
||||
export { default as DrawerGroup } from './drawer-group'
|
||||
export { default as MenuBar } from './menu-bar'
|
||||
export { FullDocModeContent, GeneralModeContent } from './segment-list-content'
|
||||
@ -1,76 +0,0 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { Item } from '@/app/components/base/select'
|
||||
import Checkbox from '@/app/components/base/checkbox'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import Input from '@/app/components/base/input'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import DisplayToggle from '../display-toggle'
|
||||
import StatusItem from '../status-item'
|
||||
import s from '../style.module.css'
|
||||
|
||||
type MenuBarProps = {
|
||||
isAllSelected: boolean
|
||||
isSomeSelected: boolean
|
||||
onSelectedAll: () => void
|
||||
isLoading: boolean
|
||||
totalText: string
|
||||
statusList: Item[]
|
||||
selectDefaultValue: 'all' | 0 | 1
|
||||
onChangeStatus: (item: Item) => void
|
||||
inputValue: string
|
||||
onInputChange: (value: string) => void
|
||||
isCollapsed: boolean
|
||||
toggleCollapsed: () => void
|
||||
}
|
||||
|
||||
const MenuBar: FC<MenuBarProps> = ({
|
||||
isAllSelected,
|
||||
isSomeSelected,
|
||||
onSelectedAll,
|
||||
isLoading,
|
||||
totalText,
|
||||
statusList,
|
||||
selectDefaultValue,
|
||||
onChangeStatus,
|
||||
inputValue,
|
||||
onInputChange,
|
||||
isCollapsed,
|
||||
toggleCollapsed,
|
||||
}) => {
|
||||
return (
|
||||
<div className={s.docSearchWrapper}>
|
||||
<Checkbox
|
||||
className="shrink-0"
|
||||
checked={isAllSelected}
|
||||
indeterminate={!isAllSelected && isSomeSelected}
|
||||
onCheck={onSelectedAll}
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<div className="system-sm-semibold-uppercase flex-1 pl-5 text-text-secondary">{totalText}</div>
|
||||
<SimpleSelect
|
||||
onSelect={onChangeStatus}
|
||||
items={statusList}
|
||||
defaultValue={selectDefaultValue}
|
||||
className={s.select}
|
||||
wrapperClassName="h-fit mr-2"
|
||||
optionWrapClassName="w-[160px]"
|
||||
optionClassName="p-0"
|
||||
renderOption={({ item, selected }) => <StatusItem item={item} selected={selected} />}
|
||||
notClearable
|
||||
/>
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="!w-52"
|
||||
value={inputValue}
|
||||
onChange={e => onInputChange(e.target.value)}
|
||||
onClear={() => onInputChange('')}
|
||||
/>
|
||||
<Divider type="vertical" className="mx-3 h-3.5" />
|
||||
<DisplayToggle isCollapsed={isCollapsed} toggleCollapsed={toggleCollapsed} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default MenuBar
|
||||
@ -1,127 +0,0 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { ChildChunkDetail, SegmentDetailModel } from '@/models/datasets'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import ChildSegmentList from '../child-segment-list'
|
||||
import SegmentCard from '../segment-card'
|
||||
import SegmentList from '../segment-list'
|
||||
|
||||
type FullDocModeContentProps = {
|
||||
segments: SegmentDetailModel[]
|
||||
childSegments: ChildChunkDetail[]
|
||||
isLoadingSegmentList: boolean
|
||||
isLoadingChildSegmentList: boolean
|
||||
currSegmentId?: string
|
||||
onClickCard: (detail: SegmentDetailModel, isEditMode?: boolean) => void
|
||||
onDeleteChildChunk: (segmentId: string, childChunkId: string) => Promise<void>
|
||||
handleInputChange: (value: string) => void
|
||||
handleAddNewChildChunk: (parentChunkId: string) => void
|
||||
onClickSlice: (detail: ChildChunkDetail) => void
|
||||
archived?: boolean
|
||||
childChunkTotal: number
|
||||
inputValue: string
|
||||
onClearFilter: () => void
|
||||
}
|
||||
|
||||
export const FullDocModeContent: FC<FullDocModeContentProps> = ({
|
||||
segments,
|
||||
childSegments,
|
||||
isLoadingSegmentList,
|
||||
isLoadingChildSegmentList,
|
||||
currSegmentId,
|
||||
onClickCard,
|
||||
onDeleteChildChunk,
|
||||
handleInputChange,
|
||||
handleAddNewChildChunk,
|
||||
onClickSlice,
|
||||
archived,
|
||||
childChunkTotal,
|
||||
inputValue,
|
||||
onClearFilter,
|
||||
}) => {
|
||||
const firstSegment = segments[0]
|
||||
|
||||
return (
|
||||
<div className={cn(
|
||||
'flex grow flex-col overflow-x-hidden',
|
||||
(isLoadingSegmentList || isLoadingChildSegmentList) ? 'overflow-y-hidden' : 'overflow-y-auto',
|
||||
)}
|
||||
>
|
||||
<SegmentCard
|
||||
detail={firstSegment}
|
||||
onClick={() => onClickCard(firstSegment)}
|
||||
loading={isLoadingSegmentList}
|
||||
focused={{
|
||||
segmentIndex: currSegmentId === firstSegment?.id,
|
||||
segmentContent: currSegmentId === firstSegment?.id,
|
||||
}}
|
||||
/>
|
||||
<ChildSegmentList
|
||||
parentChunkId={firstSegment?.id}
|
||||
onDelete={onDeleteChildChunk}
|
||||
childChunks={childSegments}
|
||||
handleInputChange={handleInputChange}
|
||||
handleAddNewChildChunk={handleAddNewChildChunk}
|
||||
onClickSlice={onClickSlice}
|
||||
enabled={!archived}
|
||||
total={childChunkTotal}
|
||||
inputValue={inputValue}
|
||||
onClearFilter={onClearFilter}
|
||||
isLoading={isLoadingSegmentList || isLoadingChildSegmentList}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
type GeneralModeContentProps = {
|
||||
segmentListRef: React.RefObject<HTMLDivElement | null>
|
||||
embeddingAvailable: boolean
|
||||
isLoadingSegmentList: boolean
|
||||
segments: SegmentDetailModel[]
|
||||
selectedSegmentIds: string[]
|
||||
onSelected: (segId: string) => void
|
||||
onChangeSwitch: (enable: boolean, segId?: string) => Promise<void>
|
||||
onDelete: (segId?: string) => Promise<void>
|
||||
onClickCard: (detail: SegmentDetailModel, isEditMode?: boolean) => void
|
||||
archived?: boolean
|
||||
onDeleteChildChunk: (segmentId: string, childChunkId: string) => Promise<void>
|
||||
handleAddNewChildChunk: (parentChunkId: string) => void
|
||||
onClickSlice: (detail: ChildChunkDetail) => void
|
||||
onClearFilter: () => void
|
||||
}
|
||||
|
||||
export const GeneralModeContent: FC<GeneralModeContentProps> = ({
|
||||
segmentListRef,
|
||||
embeddingAvailable,
|
||||
isLoadingSegmentList,
|
||||
segments,
|
||||
selectedSegmentIds,
|
||||
onSelected,
|
||||
onChangeSwitch,
|
||||
onDelete,
|
||||
onClickCard,
|
||||
archived,
|
||||
onDeleteChildChunk,
|
||||
handleAddNewChildChunk,
|
||||
onClickSlice,
|
||||
onClearFilter,
|
||||
}) => {
|
||||
return (
|
||||
<SegmentList
|
||||
ref={segmentListRef}
|
||||
embeddingAvailable={embeddingAvailable}
|
||||
isLoading={isLoadingSegmentList}
|
||||
items={segments}
|
||||
selectedSegmentIds={selectedSegmentIds}
|
||||
onSelected={onSelected}
|
||||
onChangeSwitch={onChangeSwitch}
|
||||
onDelete={onDelete}
|
||||
onClick={onClickCard}
|
||||
archived={archived}
|
||||
onDeleteChildChunk={onDeleteChildChunk}
|
||||
handleAddNewChildChunk={handleAddNewChildChunk}
|
||||
onClickSlice={onClickSlice}
|
||||
onClearFilter={onClearFilter}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@ -1,14 +0,0 @@
|
||||
export { useChildSegmentData } from './use-child-segment-data'
|
||||
export type { UseChildSegmentDataReturn } from './use-child-segment-data'
|
||||
|
||||
export { useModalState } from './use-modal-state'
|
||||
export type { CurrChildChunkType, CurrSegmentType, UseModalStateReturn } from './use-modal-state'
|
||||
|
||||
export { useSearchFilter } from './use-search-filter'
|
||||
export type { UseSearchFilterReturn } from './use-search-filter'
|
||||
|
||||
export { useSegmentListData } from './use-segment-list-data'
|
||||
export type { UseSegmentListDataReturn } from './use-segment-list-data'
|
||||
|
||||
export { useSegmentSelection } from './use-segment-selection'
|
||||
export type { UseSegmentSelectionReturn } from './use-segment-selection'
|
||||
@ -1,568 +0,0 @@
|
||||
import type { DocumentContextValue } from '@/app/components/datasets/documents/detail/context'
|
||||
import type { ChildChunkDetail, ChildSegmentsResponse, ChunkingMode, ParentMode, SegmentDetailModel } from '@/models/datasets'
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { useChildSegmentData } from './use-child-segment-data'
|
||||
|
||||
// Type for mutation callbacks
|
||||
type MutationResponse = { data: ChildChunkDetail }
|
||||
type MutationCallbacks = {
|
||||
onSuccess: (res: MutationResponse) => void
|
||||
onSettled: () => void
|
||||
}
|
||||
type _ErrorCallback = { onSuccess?: () => void, onError: () => void }
|
||||
|
||||
// ============================================================================
|
||||
// Hoisted Mocks
|
||||
// ============================================================================
|
||||
|
||||
const {
|
||||
mockParentMode,
|
||||
mockDatasetId,
|
||||
mockDocumentId,
|
||||
mockNotify,
|
||||
mockEventEmitter,
|
||||
mockQueryClient,
|
||||
mockChildSegmentListData,
|
||||
mockDeleteChildSegment,
|
||||
mockUpdateChildSegment,
|
||||
mockInvalidChildSegmentList,
|
||||
} = vi.hoisted(() => ({
|
||||
mockParentMode: { current: 'paragraph' as ParentMode },
|
||||
mockDatasetId: { current: 'test-dataset-id' },
|
||||
mockDocumentId: { current: 'test-document-id' },
|
||||
mockNotify: vi.fn(),
|
||||
mockEventEmitter: { emit: vi.fn(), on: vi.fn(), off: vi.fn() },
|
||||
mockQueryClient: { setQueryData: vi.fn() },
|
||||
mockChildSegmentListData: { current: { data: [] as ChildChunkDetail[], total: 0, total_pages: 0 } as ChildSegmentsResponse | undefined },
|
||||
mockDeleteChildSegment: vi.fn(),
|
||||
mockUpdateChildSegment: vi.fn(),
|
||||
mockInvalidChildSegmentList: vi.fn(),
|
||||
}))
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => {
|
||||
if (key === 'actionMsg.modifiedSuccessfully')
|
||||
return 'Modified successfully'
|
||||
if (key === 'actionMsg.modifiedUnsuccessfully')
|
||||
return 'Modified unsuccessfully'
|
||||
if (key === 'segment.contentEmpty')
|
||||
return 'Content cannot be empty'
|
||||
return key
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@tanstack/react-query', async () => {
|
||||
const actual = await vi.importActual('@tanstack/react-query')
|
||||
return {
|
||||
...actual,
|
||||
useQueryClient: () => mockQueryClient,
|
||||
}
|
||||
})
|
||||
|
||||
vi.mock('../../context', () => ({
|
||||
useDocumentContext: (selector: (value: DocumentContextValue) => unknown) => {
|
||||
const value: DocumentContextValue = {
|
||||
datasetId: mockDatasetId.current,
|
||||
documentId: mockDocumentId.current,
|
||||
docForm: 'text' as ChunkingMode,
|
||||
parentMode: mockParentMode.current,
|
||||
}
|
||||
return selector(value)
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
useToastContext: () => ({ notify: mockNotify }),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/event-emitter', () => ({
|
||||
useEventEmitterContextContext: () => ({ eventEmitter: mockEventEmitter }),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/knowledge/use-segment', () => ({
|
||||
useChildSegmentList: () => ({
|
||||
isLoading: false,
|
||||
data: mockChildSegmentListData.current,
|
||||
}),
|
||||
useChildSegmentListKey: ['segment', 'childChunkList'],
|
||||
useDeleteChildSegment: () => ({ mutateAsync: mockDeleteChildSegment }),
|
||||
useUpdateChildSegment: () => ({ mutateAsync: mockUpdateChildSegment }),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-base', () => ({
|
||||
useInvalid: () => mockInvalidChildSegmentList,
|
||||
}))
|
||||
|
||||
// ============================================================================
|
||||
// Test Utilities
|
||||
// ============================================================================
|
||||
|
||||
const createQueryClient = () => new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
})
|
||||
|
||||
const createWrapper = () => {
|
||||
const queryClient = createQueryClient()
|
||||
return ({ children }: { children: React.ReactNode }) =>
|
||||
React.createElement(QueryClientProvider, { client: queryClient }, children)
|
||||
}
|
||||
|
||||
const createMockChildChunk = (overrides: Partial<ChildChunkDetail> = {}): ChildChunkDetail => ({
|
||||
id: `child-${Math.random().toString(36).substr(2, 9)}`,
|
||||
position: 1,
|
||||
segment_id: 'segment-1',
|
||||
content: 'Child chunk content',
|
||||
word_count: 100,
|
||||
created_at: 1700000000,
|
||||
updated_at: 1700000000,
|
||||
type: 'automatic',
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const createMockSegment = (overrides: Partial<SegmentDetailModel> = {}): SegmentDetailModel => ({
|
||||
id: 'segment-1',
|
||||
position: 1,
|
||||
document_id: 'doc-1',
|
||||
content: 'Test content',
|
||||
sign_content: 'Test signed content',
|
||||
word_count: 100,
|
||||
tokens: 50,
|
||||
keywords: [],
|
||||
index_node_id: 'index-1',
|
||||
index_node_hash: 'hash-1',
|
||||
hit_count: 0,
|
||||
enabled: true,
|
||||
disabled_at: 0,
|
||||
disabled_by: '',
|
||||
status: 'completed',
|
||||
created_by: 'user-1',
|
||||
created_at: 1700000000,
|
||||
indexing_at: 1700000100,
|
||||
completed_at: 1700000200,
|
||||
error: null,
|
||||
stopped_at: 0,
|
||||
updated_at: 1700000000,
|
||||
attachments: [],
|
||||
child_chunks: [],
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const defaultOptions = {
|
||||
searchValue: '',
|
||||
currentPage: 1,
|
||||
limit: 10,
|
||||
segments: [createMockSegment()] as SegmentDetailModel[],
|
||||
currChunkId: 'segment-1',
|
||||
isFullDocMode: true,
|
||||
onCloseChildSegmentDetail: vi.fn(),
|
||||
refreshChunkListDataWithDetailChanged: vi.fn(),
|
||||
updateSegmentInCache: vi.fn(),
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Tests
|
||||
// ============================================================================
|
||||
|
||||
describe('useChildSegmentData', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockParentMode.current = 'paragraph'
|
||||
mockDatasetId.current = 'test-dataset-id'
|
||||
mockDocumentId.current = 'test-document-id'
|
||||
mockChildSegmentListData.current = { data: [], total: 0, total_pages: 0, page: 1, limit: 20 }
|
||||
})
|
||||
|
||||
describe('Initial State', () => {
|
||||
it('should return empty child segments initially', () => {
|
||||
const { result } = renderHook(() => useChildSegmentData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.childSegments).toEqual([])
|
||||
expect(result.current.isLoadingChildSegmentList).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('resetChildList', () => {
|
||||
it('should call invalidChildSegmentList', () => {
|
||||
const { result } = renderHook(() => useChildSegmentData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.resetChildList()
|
||||
})
|
||||
|
||||
expect(mockInvalidChildSegmentList).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('onDeleteChildChunk', () => {
|
||||
it('should delete child chunk and update parent cache in paragraph mode', async () => {
|
||||
mockParentMode.current = 'paragraph'
|
||||
const updateSegmentInCache = vi.fn()
|
||||
|
||||
mockDeleteChildSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
updateSegmentInCache,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onDeleteChildChunk('seg-1', 'child-1')
|
||||
})
|
||||
|
||||
expect(mockDeleteChildSegment).toHaveBeenCalled()
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'success', message: 'Modified successfully' })
|
||||
expect(updateSegmentInCache).toHaveBeenCalledWith('seg-1', expect.any(Function))
|
||||
})
|
||||
|
||||
it('should delete child chunk and reset list in full-doc mode', async () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
|
||||
mockDeleteChildSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onDeleteChildChunk('seg-1', 'child-1')
|
||||
})
|
||||
|
||||
expect(mockInvalidChildSegmentList).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should notify error on failure', async () => {
|
||||
mockDeleteChildSegment.mockImplementation(async (_params, { onError }: { onError: () => void }) => {
|
||||
onError()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onDeleteChildChunk('seg-1', 'child-1')
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'error', message: 'Modified unsuccessfully' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleUpdateChildChunk', () => {
|
||||
it('should validate empty content', async () => {
|
||||
const { result } = renderHook(() => useChildSegmentData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateChildChunk('seg-1', 'child-1', ' ')
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'error', message: 'Content cannot be empty' })
|
||||
expect(mockUpdateChildSegment).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should update child chunk and parent cache in paragraph mode', async () => {
|
||||
mockParentMode.current = 'paragraph'
|
||||
const updateSegmentInCache = vi.fn()
|
||||
const onCloseChildSegmentDetail = vi.fn()
|
||||
const refreshChunkListDataWithDetailChanged = vi.fn()
|
||||
|
||||
mockUpdateChildSegment.mockImplementation(async (_params, { onSuccess, onSettled }: MutationCallbacks) => {
|
||||
onSuccess({
|
||||
data: createMockChildChunk({
|
||||
content: 'updated content',
|
||||
type: 'customized',
|
||||
word_count: 50,
|
||||
updated_at: 1700000001,
|
||||
}),
|
||||
})
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
updateSegmentInCache,
|
||||
onCloseChildSegmentDetail,
|
||||
refreshChunkListDataWithDetailChanged,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateChildChunk('seg-1', 'child-1', 'updated content')
|
||||
})
|
||||
|
||||
expect(mockUpdateChildSegment).toHaveBeenCalled()
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'success', message: 'Modified successfully' })
|
||||
expect(onCloseChildSegmentDetail).toHaveBeenCalled()
|
||||
expect(updateSegmentInCache).toHaveBeenCalled()
|
||||
expect(refreshChunkListDataWithDetailChanged).toHaveBeenCalled()
|
||||
expect(mockEventEmitter.emit).toHaveBeenCalledWith('update-child-segment')
|
||||
expect(mockEventEmitter.emit).toHaveBeenCalledWith('update-child-segment-done')
|
||||
})
|
||||
|
||||
it('should update child chunk cache in full-doc mode', async () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
const onCloseChildSegmentDetail = vi.fn()
|
||||
|
||||
mockUpdateChildSegment.mockImplementation(async (_params, { onSuccess, onSettled }: MutationCallbacks) => {
|
||||
onSuccess({
|
||||
data: createMockChildChunk({
|
||||
content: 'updated content',
|
||||
type: 'customized',
|
||||
word_count: 50,
|
||||
updated_at: 1700000001,
|
||||
}),
|
||||
})
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
onCloseChildSegmentDetail,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateChildChunk('seg-1', 'child-1', 'updated content')
|
||||
})
|
||||
|
||||
expect(mockQueryClient.setQueryData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('onSaveNewChildChunk', () => {
|
||||
it('should update parent cache in paragraph mode', () => {
|
||||
mockParentMode.current = 'paragraph'
|
||||
const updateSegmentInCache = vi.fn()
|
||||
const refreshChunkListDataWithDetailChanged = vi.fn()
|
||||
const newChildChunk = createMockChildChunk({ id: 'new-child' })
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
updateSegmentInCache,
|
||||
refreshChunkListDataWithDetailChanged,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.onSaveNewChildChunk(newChildChunk)
|
||||
})
|
||||
|
||||
expect(updateSegmentInCache).toHaveBeenCalled()
|
||||
expect(refreshChunkListDataWithDetailChanged).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should reset child list in full-doc mode', () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.onSaveNewChildChunk(createMockChildChunk())
|
||||
})
|
||||
|
||||
expect(mockInvalidChildSegmentList).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('viewNewlyAddedChildChunk', () => {
|
||||
it('should set needScrollToBottom and not reset when adding new page', () => {
|
||||
mockChildSegmentListData.current = { data: [], total: 10, total_pages: 1, page: 1, limit: 20 }
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
limit: 10,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.viewNewlyAddedChildChunk()
|
||||
})
|
||||
|
||||
expect(result.current.needScrollToBottom.current).toBe(true)
|
||||
})
|
||||
|
||||
it('should call resetChildList when not adding new page', () => {
|
||||
mockChildSegmentListData.current = { data: [], total: 5, total_pages: 1, page: 1, limit: 20 }
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
limit: 10,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.viewNewlyAddedChildChunk()
|
||||
})
|
||||
|
||||
expect(mockInvalidChildSegmentList).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Query disabled states', () => {
|
||||
it('should disable query when not in fullDocMode', () => {
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
isFullDocMode: false,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
// Query should be disabled but hook should still work
|
||||
expect(result.current.childSegments).toEqual([])
|
||||
})
|
||||
|
||||
it('should disable query when segments is empty', () => {
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
segments: [],
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.childSegments).toEqual([])
|
||||
})
|
||||
})
|
||||
|
||||
describe('Cache update callbacks', () => {
|
||||
it('should use updateSegmentInCache when deleting in paragraph mode', async () => {
|
||||
mockParentMode.current = 'paragraph'
|
||||
const updateSegmentInCache = vi.fn()
|
||||
|
||||
mockDeleteChildSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
updateSegmentInCache,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onDeleteChildChunk('seg-1', 'child-1')
|
||||
})
|
||||
|
||||
expect(updateSegmentInCache).toHaveBeenCalledWith('seg-1', expect.any(Function))
|
||||
|
||||
// Verify the updater function filters correctly
|
||||
const updaterFn = updateSegmentInCache.mock.calls[0][1]
|
||||
const testSegment = createMockSegment({
|
||||
child_chunks: [
|
||||
createMockChildChunk({ id: 'child-1' }),
|
||||
createMockChildChunk({ id: 'child-2' }),
|
||||
],
|
||||
})
|
||||
const updatedSegment = updaterFn(testSegment)
|
||||
expect(updatedSegment.child_chunks).toHaveLength(1)
|
||||
expect(updatedSegment.child_chunks[0].id).toBe('child-2')
|
||||
})
|
||||
|
||||
it('should use updateSegmentInCache when updating in paragraph mode', async () => {
|
||||
mockParentMode.current = 'paragraph'
|
||||
const updateSegmentInCache = vi.fn()
|
||||
const onCloseChildSegmentDetail = vi.fn()
|
||||
const refreshChunkListDataWithDetailChanged = vi.fn()
|
||||
|
||||
mockUpdateChildSegment.mockImplementation(async (_params, { onSuccess, onSettled }: MutationCallbacks) => {
|
||||
onSuccess({
|
||||
data: createMockChildChunk({
|
||||
id: 'child-1',
|
||||
content: 'new content',
|
||||
type: 'customized',
|
||||
word_count: 50,
|
||||
updated_at: 1700000001,
|
||||
}),
|
||||
})
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
updateSegmentInCache,
|
||||
onCloseChildSegmentDetail,
|
||||
refreshChunkListDataWithDetailChanged,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateChildChunk('seg-1', 'child-1', 'new content')
|
||||
})
|
||||
|
||||
expect(updateSegmentInCache).toHaveBeenCalledWith('seg-1', expect.any(Function))
|
||||
|
||||
// Verify the updater function maps correctly
|
||||
const updaterFn = updateSegmentInCache.mock.calls[0][1]
|
||||
const testSegment = createMockSegment({
|
||||
child_chunks: [
|
||||
createMockChildChunk({ id: 'child-1', content: 'old content' }),
|
||||
createMockChildChunk({ id: 'child-2', content: 'other content' }),
|
||||
],
|
||||
})
|
||||
const updatedSegment = updaterFn(testSegment)
|
||||
expect(updatedSegment.child_chunks).toHaveLength(2)
|
||||
expect(updatedSegment.child_chunks[0].content).toBe('new content')
|
||||
expect(updatedSegment.child_chunks[1].content).toBe('other content')
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateChildSegmentInCache in full-doc mode', () => {
|
||||
it('should use updateChildSegmentInCache when updating in full-doc mode', async () => {
|
||||
mockParentMode.current = 'full-doc'
|
||||
const onCloseChildSegmentDetail = vi.fn()
|
||||
|
||||
mockUpdateChildSegment.mockImplementation(async (_params, { onSuccess, onSettled }: MutationCallbacks) => {
|
||||
onSuccess({
|
||||
data: createMockChildChunk({
|
||||
id: 'child-1',
|
||||
content: 'new content',
|
||||
type: 'customized',
|
||||
word_count: 50,
|
||||
updated_at: 1700000001,
|
||||
}),
|
||||
})
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useChildSegmentData({
|
||||
...defaultOptions,
|
||||
onCloseChildSegmentDetail,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateChildChunk('seg-1', 'child-1', 'new content')
|
||||
})
|
||||
|
||||
expect(mockQueryClient.setQueryData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1,241 +0,0 @@
|
||||
import type { ChildChunkDetail, ChildSegmentsResponse, SegmentDetailModel, SegmentUpdater } from '@/models/datasets'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import {
|
||||
useChildSegmentList,
|
||||
useChildSegmentListKey,
|
||||
useDeleteChildSegment,
|
||||
useUpdateChildSegment,
|
||||
} from '@/service/knowledge/use-segment'
|
||||
import { useInvalid } from '@/service/use-base'
|
||||
import { useDocumentContext } from '../../context'
|
||||
|
||||
export type UseChildSegmentDataOptions = {
|
||||
searchValue: string
|
||||
currentPage: number
|
||||
limit: number
|
||||
segments: SegmentDetailModel[]
|
||||
currChunkId: string
|
||||
isFullDocMode: boolean
|
||||
onCloseChildSegmentDetail: () => void
|
||||
refreshChunkListDataWithDetailChanged: () => void
|
||||
updateSegmentInCache: (segmentId: string, updater: (seg: SegmentDetailModel) => SegmentDetailModel) => void
|
||||
}
|
||||
|
||||
export type UseChildSegmentDataReturn = {
|
||||
childSegments: ChildChunkDetail[]
|
||||
isLoadingChildSegmentList: boolean
|
||||
childChunkListData: ReturnType<typeof useChildSegmentList>['data']
|
||||
childSegmentListRef: React.RefObject<HTMLDivElement | null>
|
||||
needScrollToBottom: React.RefObject<boolean>
|
||||
// Operations
|
||||
onDeleteChildChunk: (segmentId: string, childChunkId: string) => Promise<void>
|
||||
handleUpdateChildChunk: (segmentId: string, childChunkId: string, content: string) => Promise<void>
|
||||
onSaveNewChildChunk: (newChildChunk?: ChildChunkDetail) => void
|
||||
resetChildList: () => void
|
||||
viewNewlyAddedChildChunk: () => void
|
||||
}
|
||||
|
||||
export const useChildSegmentData = (options: UseChildSegmentDataOptions): UseChildSegmentDataReturn => {
|
||||
const {
|
||||
searchValue,
|
||||
currentPage,
|
||||
limit,
|
||||
segments,
|
||||
currChunkId,
|
||||
isFullDocMode,
|
||||
onCloseChildSegmentDetail,
|
||||
refreshChunkListDataWithDetailChanged,
|
||||
updateSegmentInCache,
|
||||
} = options
|
||||
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useToastContext()
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
const datasetId = useDocumentContext(s => s.datasetId) || ''
|
||||
const documentId = useDocumentContext(s => s.documentId) || ''
|
||||
const parentMode = useDocumentContext(s => s.parentMode)
|
||||
|
||||
const childSegmentListRef = useRef<HTMLDivElement>(null)
|
||||
const needScrollToBottom = useRef(false)
|
||||
|
||||
// Build query params
|
||||
const queryParams = useMemo(() => ({
|
||||
page: currentPage === 0 ? 1 : currentPage,
|
||||
limit,
|
||||
keyword: searchValue,
|
||||
}), [currentPage, limit, searchValue])
|
||||
|
||||
const segmentId = segments[0]?.id || ''
|
||||
|
||||
// Build query key for optimistic updates
|
||||
const currentQueryKey = useMemo(() =>
|
||||
[...useChildSegmentListKey, datasetId, documentId, segmentId, queryParams], [datasetId, documentId, segmentId, queryParams])
|
||||
|
||||
// Fetch child segment list
|
||||
const { isLoading: isLoadingChildSegmentList, data: childChunkListData } = useChildSegmentList(
|
||||
{
|
||||
datasetId,
|
||||
documentId,
|
||||
segmentId,
|
||||
params: queryParams,
|
||||
},
|
||||
!isFullDocMode || segments.length === 0,
|
||||
)
|
||||
|
||||
// Derive child segments from query data
|
||||
const childSegments = useMemo(() => childChunkListData?.data || [], [childChunkListData])
|
||||
|
||||
const invalidChildSegmentList = useInvalid(useChildSegmentListKey)
|
||||
|
||||
// Scroll to bottom when child segments change
|
||||
useEffect(() => {
|
||||
if (childSegmentListRef.current && needScrollToBottom.current) {
|
||||
childSegmentListRef.current.scrollTo({ top: childSegmentListRef.current.scrollHeight, behavior: 'smooth' })
|
||||
needScrollToBottom.current = false
|
||||
}
|
||||
}, [childSegments])
|
||||
|
||||
const resetChildList = useCallback(() => {
|
||||
invalidChildSegmentList()
|
||||
}, [invalidChildSegmentList])
|
||||
|
||||
// Optimistic update helper for child segments
|
||||
const updateChildSegmentInCache = useCallback((
|
||||
childChunkId: string,
|
||||
updater: (chunk: ChildChunkDetail) => ChildChunkDetail,
|
||||
) => {
|
||||
queryClient.setQueryData<ChildSegmentsResponse>(currentQueryKey, (old) => {
|
||||
if (!old)
|
||||
return old
|
||||
return {
|
||||
...old,
|
||||
data: old.data.map(chunk => chunk.id === childChunkId ? updater(chunk) : chunk),
|
||||
}
|
||||
})
|
||||
}, [queryClient, currentQueryKey])
|
||||
|
||||
// Mutations
|
||||
const { mutateAsync: deleteChildSegment } = useDeleteChildSegment()
|
||||
const { mutateAsync: updateChildSegment } = useUpdateChildSegment()
|
||||
|
||||
const onDeleteChildChunk = useCallback(async (segmentIdParam: string, childChunkId: string) => {
|
||||
await deleteChildSegment(
|
||||
{ datasetId, documentId, segmentId: segmentIdParam, childChunkId },
|
||||
{
|
||||
onSuccess: () => {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
if (parentMode === 'paragraph') {
|
||||
// Update parent segment's child_chunks in cache
|
||||
updateSegmentInCache(segmentIdParam, seg => ({
|
||||
...seg,
|
||||
child_chunks: seg.child_chunks?.filter(chunk => chunk.id !== childChunkId),
|
||||
}))
|
||||
}
|
||||
else {
|
||||
resetChildList()
|
||||
}
|
||||
},
|
||||
onError: () => {
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
},
|
||||
},
|
||||
)
|
||||
}, [datasetId, documentId, parentMode, deleteChildSegment, updateSegmentInCache, resetChildList, t, notify])
|
||||
|
||||
const handleUpdateChildChunk = useCallback(async (
|
||||
segmentIdParam: string,
|
||||
childChunkId: string,
|
||||
content: string,
|
||||
) => {
|
||||
const params: SegmentUpdater = { content: '' }
|
||||
if (!content.trim()) {
|
||||
notify({ type: 'error', message: t('segment.contentEmpty', { ns: 'datasetDocuments' }) })
|
||||
return
|
||||
}
|
||||
|
||||
params.content = content
|
||||
|
||||
eventEmitter?.emit('update-child-segment')
|
||||
await updateChildSegment({ datasetId, documentId, segmentId: segmentIdParam, childChunkId, body: params }, {
|
||||
onSuccess: (res) => {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
onCloseChildSegmentDetail()
|
||||
|
||||
if (parentMode === 'paragraph') {
|
||||
// Update parent segment's child_chunks in cache
|
||||
updateSegmentInCache(segmentIdParam, seg => ({
|
||||
...seg,
|
||||
child_chunks: seg.child_chunks?.map(childSeg =>
|
||||
childSeg.id === childChunkId
|
||||
? {
|
||||
...childSeg,
|
||||
content: res.data.content,
|
||||
type: res.data.type,
|
||||
word_count: res.data.word_count,
|
||||
updated_at: res.data.updated_at,
|
||||
}
|
||||
: childSeg,
|
||||
),
|
||||
}))
|
||||
refreshChunkListDataWithDetailChanged()
|
||||
}
|
||||
else {
|
||||
updateChildSegmentInCache(childChunkId, chunk => ({
|
||||
...chunk,
|
||||
content: res.data.content,
|
||||
type: res.data.type,
|
||||
word_count: res.data.word_count,
|
||||
updated_at: res.data.updated_at,
|
||||
}))
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
eventEmitter?.emit('update-child-segment-done')
|
||||
},
|
||||
})
|
||||
}, [datasetId, documentId, parentMode, updateChildSegment, notify, eventEmitter, onCloseChildSegmentDetail, updateSegmentInCache, updateChildSegmentInCache, refreshChunkListDataWithDetailChanged, t])
|
||||
|
||||
const onSaveNewChildChunk = useCallback((newChildChunk?: ChildChunkDetail) => {
|
||||
if (parentMode === 'paragraph') {
|
||||
// Update parent segment's child_chunks in cache
|
||||
updateSegmentInCache(currChunkId, seg => ({
|
||||
...seg,
|
||||
child_chunks: [...(seg.child_chunks || []), newChildChunk!],
|
||||
}))
|
||||
refreshChunkListDataWithDetailChanged()
|
||||
}
|
||||
else {
|
||||
resetChildList()
|
||||
}
|
||||
}, [parentMode, currChunkId, updateSegmentInCache, refreshChunkListDataWithDetailChanged, resetChildList])
|
||||
|
||||
const viewNewlyAddedChildChunk = useCallback(() => {
|
||||
const totalPages = childChunkListData?.total_pages || 0
|
||||
const total = childChunkListData?.total || 0
|
||||
const newPage = Math.ceil((total + 1) / limit)
|
||||
needScrollToBottom.current = true
|
||||
|
||||
if (newPage > totalPages)
|
||||
return
|
||||
resetChildList()
|
||||
}, [childChunkListData, limit, resetChildList])
|
||||
|
||||
return {
|
||||
childSegments,
|
||||
isLoadingChildSegmentList,
|
||||
childChunkListData,
|
||||
childSegmentListRef,
|
||||
needScrollToBottom,
|
||||
onDeleteChildChunk,
|
||||
handleUpdateChildChunk,
|
||||
onSaveNewChildChunk,
|
||||
resetChildList,
|
||||
viewNewlyAddedChildChunk,
|
||||
}
|
||||
}
|
||||
@ -1,141 +0,0 @@
|
||||
import type { ChildChunkDetail, SegmentDetailModel } from '@/models/datasets'
|
||||
import { useCallback, useState } from 'react'
|
||||
|
||||
export type CurrSegmentType = {
|
||||
segInfo?: SegmentDetailModel
|
||||
showModal: boolean
|
||||
isEditMode?: boolean
|
||||
}
|
||||
|
||||
export type CurrChildChunkType = {
|
||||
childChunkInfo?: ChildChunkDetail
|
||||
showModal: boolean
|
||||
}
|
||||
|
||||
export type UseModalStateReturn = {
|
||||
// Segment detail modal
|
||||
currSegment: CurrSegmentType
|
||||
onClickCard: (detail: SegmentDetailModel, isEditMode?: boolean) => void
|
||||
onCloseSegmentDetail: () => void
|
||||
// Child segment detail modal
|
||||
currChildChunk: CurrChildChunkType
|
||||
currChunkId: string
|
||||
onClickSlice: (detail: ChildChunkDetail) => void
|
||||
onCloseChildSegmentDetail: () => void
|
||||
// New segment modal
|
||||
onCloseNewSegmentModal: () => void
|
||||
// New child segment modal
|
||||
showNewChildSegmentModal: boolean
|
||||
handleAddNewChildChunk: (parentChunkId: string) => void
|
||||
onCloseNewChildChunkModal: () => void
|
||||
// Regeneration modal
|
||||
isRegenerationModalOpen: boolean
|
||||
setIsRegenerationModalOpen: (open: boolean) => void
|
||||
// Full screen
|
||||
fullScreen: boolean
|
||||
toggleFullScreen: () => void
|
||||
setFullScreen: (fullScreen: boolean) => void
|
||||
// Collapsed state
|
||||
isCollapsed: boolean
|
||||
toggleCollapsed: () => void
|
||||
}
|
||||
|
||||
type UseModalStateOptions = {
|
||||
onNewSegmentModalChange: (state: boolean) => void
|
||||
}
|
||||
|
||||
export const useModalState = (options: UseModalStateOptions): UseModalStateReturn => {
|
||||
const { onNewSegmentModalChange } = options
|
||||
|
||||
// Segment detail modal state
|
||||
const [currSegment, setCurrSegment] = useState<CurrSegmentType>({ showModal: false })
|
||||
|
||||
// Child segment detail modal state
|
||||
const [currChildChunk, setCurrChildChunk] = useState<CurrChildChunkType>({ showModal: false })
|
||||
const [currChunkId, setCurrChunkId] = useState('')
|
||||
|
||||
// New child segment modal state
|
||||
const [showNewChildSegmentModal, setShowNewChildSegmentModal] = useState(false)
|
||||
|
||||
// Regeneration modal state
|
||||
const [isRegenerationModalOpen, setIsRegenerationModalOpen] = useState(false)
|
||||
|
||||
// Display state
|
||||
const [fullScreen, setFullScreen] = useState(false)
|
||||
const [isCollapsed, setIsCollapsed] = useState(true)
|
||||
|
||||
// Segment detail handlers
|
||||
const onClickCard = useCallback((detail: SegmentDetailModel, isEditMode = false) => {
|
||||
setCurrSegment({ segInfo: detail, showModal: true, isEditMode })
|
||||
}, [])
|
||||
|
||||
const onCloseSegmentDetail = useCallback(() => {
|
||||
setCurrSegment({ showModal: false })
|
||||
setFullScreen(false)
|
||||
}, [])
|
||||
|
||||
// Child segment detail handlers
|
||||
const onClickSlice = useCallback((detail: ChildChunkDetail) => {
|
||||
setCurrChildChunk({ childChunkInfo: detail, showModal: true })
|
||||
setCurrChunkId(detail.segment_id)
|
||||
}, [])
|
||||
|
||||
const onCloseChildSegmentDetail = useCallback(() => {
|
||||
setCurrChildChunk({ showModal: false })
|
||||
setFullScreen(false)
|
||||
}, [])
|
||||
|
||||
// New segment modal handlers
|
||||
const onCloseNewSegmentModal = useCallback(() => {
|
||||
onNewSegmentModalChange(false)
|
||||
setFullScreen(false)
|
||||
}, [onNewSegmentModalChange])
|
||||
|
||||
// New child segment modal handlers
|
||||
const handleAddNewChildChunk = useCallback((parentChunkId: string) => {
|
||||
setShowNewChildSegmentModal(true)
|
||||
setCurrChunkId(parentChunkId)
|
||||
}, [])
|
||||
|
||||
const onCloseNewChildChunkModal = useCallback(() => {
|
||||
setShowNewChildSegmentModal(false)
|
||||
setFullScreen(false)
|
||||
}, [])
|
||||
|
||||
// Display handlers - handles both direct calls and click events
|
||||
const toggleFullScreen = useCallback(() => {
|
||||
setFullScreen(prev => !prev)
|
||||
}, [])
|
||||
|
||||
const toggleCollapsed = useCallback(() => {
|
||||
setIsCollapsed(prev => !prev)
|
||||
}, [])
|
||||
|
||||
return {
|
||||
// Segment detail modal
|
||||
currSegment,
|
||||
onClickCard,
|
||||
onCloseSegmentDetail,
|
||||
// Child segment detail modal
|
||||
currChildChunk,
|
||||
currChunkId,
|
||||
onClickSlice,
|
||||
onCloseChildSegmentDetail,
|
||||
// New segment modal
|
||||
onCloseNewSegmentModal,
|
||||
// New child segment modal
|
||||
showNewChildSegmentModal,
|
||||
handleAddNewChildChunk,
|
||||
onCloseNewChildChunkModal,
|
||||
// Regeneration modal
|
||||
isRegenerationModalOpen,
|
||||
setIsRegenerationModalOpen,
|
||||
// Full screen
|
||||
fullScreen,
|
||||
toggleFullScreen,
|
||||
setFullScreen,
|
||||
// Collapsed state
|
||||
isCollapsed,
|
||||
toggleCollapsed,
|
||||
}
|
||||
}
|
||||
@ -1,85 +0,0 @@
|
||||
import type { Item } from '@/app/components/base/select'
|
||||
import { useDebounceFn } from 'ahooks'
|
||||
import { useCallback, useMemo, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
|
||||
export type SearchFilterState = {
|
||||
inputValue: string
|
||||
searchValue: string
|
||||
selectedStatus: boolean | 'all'
|
||||
}
|
||||
|
||||
export type UseSearchFilterReturn = {
|
||||
inputValue: string
|
||||
searchValue: string
|
||||
selectedStatus: boolean | 'all'
|
||||
statusList: Item[]
|
||||
selectDefaultValue: 'all' | 0 | 1
|
||||
handleInputChange: (value: string) => void
|
||||
onChangeStatus: (item: Item) => void
|
||||
onClearFilter: () => void
|
||||
resetPage: () => void
|
||||
}
|
||||
|
||||
type UseSearchFilterOptions = {
|
||||
onPageChange: (page: number) => void
|
||||
}
|
||||
|
||||
export const useSearchFilter = (options: UseSearchFilterOptions): UseSearchFilterReturn => {
|
||||
const { t } = useTranslation()
|
||||
const { onPageChange } = options
|
||||
|
||||
const [inputValue, setInputValue] = useState<string>('')
|
||||
const [searchValue, setSearchValue] = useState<string>('')
|
||||
const [selectedStatus, setSelectedStatus] = useState<boolean | 'all'>('all')
|
||||
|
||||
const statusList = useRef<Item[]>([
|
||||
{ value: 'all', name: t('list.index.all', { ns: 'datasetDocuments' }) },
|
||||
{ value: 0, name: t('list.status.disabled', { ns: 'datasetDocuments' }) },
|
||||
{ value: 1, name: t('list.status.enabled', { ns: 'datasetDocuments' }) },
|
||||
])
|
||||
|
||||
const { run: handleSearch } = useDebounceFn(() => {
|
||||
setSearchValue(inputValue)
|
||||
onPageChange(1)
|
||||
}, { wait: 500 })
|
||||
|
||||
const handleInputChange = useCallback((value: string) => {
|
||||
setInputValue(value)
|
||||
handleSearch()
|
||||
}, [handleSearch])
|
||||
|
||||
const onChangeStatus = useCallback(({ value }: Item) => {
|
||||
setSelectedStatus(value === 'all' ? 'all' : !!value)
|
||||
onPageChange(1)
|
||||
}, [onPageChange])
|
||||
|
||||
const onClearFilter = useCallback(() => {
|
||||
setInputValue('')
|
||||
setSearchValue('')
|
||||
setSelectedStatus('all')
|
||||
onPageChange(1)
|
||||
}, [onPageChange])
|
||||
|
||||
const resetPage = useCallback(() => {
|
||||
onPageChange(1)
|
||||
}, [onPageChange])
|
||||
|
||||
const selectDefaultValue = useMemo(() => {
|
||||
if (selectedStatus === 'all')
|
||||
return 'all'
|
||||
return selectedStatus ? 1 : 0
|
||||
}, [selectedStatus])
|
||||
|
||||
return {
|
||||
inputValue,
|
||||
searchValue,
|
||||
selectedStatus,
|
||||
statusList: statusList.current,
|
||||
selectDefaultValue,
|
||||
handleInputChange,
|
||||
onChangeStatus,
|
||||
onClearFilter,
|
||||
resetPage,
|
||||
}
|
||||
}
|
||||
@ -1,942 +0,0 @@
|
||||
import type { FileEntity } from '@/app/components/datasets/common/image-uploader/types'
|
||||
import type { DocumentContextValue } from '@/app/components/datasets/documents/detail/context'
|
||||
import type { ChunkingMode, ParentMode, SegmentDetailModel, SegmentsResponse } from '@/models/datasets'
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
|
||||
import { act, renderHook } from '@testing-library/react'
|
||||
import * as React from 'react'
|
||||
import { ChunkingMode as ChunkingModeEnum } from '@/models/datasets'
|
||||
import { ProcessStatus } from '../../segment-add'
|
||||
import { useSegmentListData } from './use-segment-list-data'
|
||||
|
||||
// Type for mutation callbacks
|
||||
type SegmentMutationResponse = { data: SegmentDetailModel }
|
||||
type SegmentMutationCallbacks = {
|
||||
onSuccess: (res: SegmentMutationResponse) => void
|
||||
onSettled: () => void
|
||||
}
|
||||
|
||||
// Mock file entity factory
|
||||
const createMockFileEntity = (overrides: Partial<FileEntity> = {}): FileEntity => ({
|
||||
id: 'file-1',
|
||||
name: 'test.png',
|
||||
size: 1024,
|
||||
extension: 'png',
|
||||
mimeType: 'image/png',
|
||||
progress: 100,
|
||||
uploadedId: undefined,
|
||||
base64Url: undefined,
|
||||
...overrides,
|
||||
})
|
||||
|
||||
// ============================================================================
|
||||
// Hoisted Mocks
|
||||
// ============================================================================
|
||||
|
||||
const {
|
||||
mockDocForm,
|
||||
mockParentMode,
|
||||
mockDatasetId,
|
||||
mockDocumentId,
|
||||
mockNotify,
|
||||
mockEventEmitter,
|
||||
mockQueryClient,
|
||||
mockSegmentListData,
|
||||
mockEnableSegment,
|
||||
mockDisableSegment,
|
||||
mockDeleteSegment,
|
||||
mockUpdateSegment,
|
||||
mockInvalidSegmentList,
|
||||
mockInvalidChunkListAll,
|
||||
mockInvalidChunkListEnabled,
|
||||
mockInvalidChunkListDisabled,
|
||||
mockPathname,
|
||||
} = vi.hoisted(() => ({
|
||||
mockDocForm: { current: 'text' as ChunkingMode },
|
||||
mockParentMode: { current: 'paragraph' as ParentMode },
|
||||
mockDatasetId: { current: 'test-dataset-id' },
|
||||
mockDocumentId: { current: 'test-document-id' },
|
||||
mockNotify: vi.fn(),
|
||||
mockEventEmitter: { emit: vi.fn(), on: vi.fn(), off: vi.fn() },
|
||||
mockQueryClient: { setQueryData: vi.fn() },
|
||||
mockSegmentListData: { current: { data: [] as SegmentDetailModel[], total: 0, total_pages: 0, has_more: false, limit: 20, page: 1 } as SegmentsResponse | undefined },
|
||||
mockEnableSegment: vi.fn(),
|
||||
mockDisableSegment: vi.fn(),
|
||||
mockDeleteSegment: vi.fn(),
|
||||
mockUpdateSegment: vi.fn(),
|
||||
mockInvalidSegmentList: vi.fn(),
|
||||
mockInvalidChunkListAll: vi.fn(),
|
||||
mockInvalidChunkListEnabled: vi.fn(),
|
||||
mockInvalidChunkListDisabled: vi.fn(),
|
||||
mockPathname: { current: '/datasets/test/documents/test' },
|
||||
}))
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('react-i18next', () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string, options?: { count?: number, ns?: string }) => {
|
||||
if (key === 'actionMsg.modifiedSuccessfully')
|
||||
return 'Modified successfully'
|
||||
if (key === 'actionMsg.modifiedUnsuccessfully')
|
||||
return 'Modified unsuccessfully'
|
||||
if (key === 'segment.contentEmpty')
|
||||
return 'Content cannot be empty'
|
||||
if (key === 'segment.questionEmpty')
|
||||
return 'Question cannot be empty'
|
||||
if (key === 'segment.answerEmpty')
|
||||
return 'Answer cannot be empty'
|
||||
if (key === 'segment.allFilesUploaded')
|
||||
return 'All files must be uploaded'
|
||||
if (key === 'segment.chunks')
|
||||
return options?.count === 1 ? 'chunk' : 'chunks'
|
||||
if (key === 'segment.parentChunks')
|
||||
return options?.count === 1 ? 'parent chunk' : 'parent chunks'
|
||||
if (key === 'segment.searchResults')
|
||||
return 'search results'
|
||||
return `${options?.ns || ''}.${key}`
|
||||
},
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('next/navigation', () => ({
|
||||
usePathname: () => mockPathname.current,
|
||||
}))
|
||||
|
||||
vi.mock('@tanstack/react-query', async () => {
|
||||
const actual = await vi.importActual('@tanstack/react-query')
|
||||
return {
|
||||
...actual,
|
||||
useQueryClient: () => mockQueryClient,
|
||||
}
|
||||
})
|
||||
|
||||
vi.mock('../../context', () => ({
|
||||
useDocumentContext: (selector: (value: DocumentContextValue) => unknown) => {
|
||||
const value: DocumentContextValue = {
|
||||
datasetId: mockDatasetId.current,
|
||||
documentId: mockDocumentId.current,
|
||||
docForm: mockDocForm.current,
|
||||
parentMode: mockParentMode.current,
|
||||
}
|
||||
return selector(value)
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/app/components/base/toast', () => ({
|
||||
useToastContext: () => ({ notify: mockNotify }),
|
||||
}))
|
||||
|
||||
vi.mock('@/context/event-emitter', () => ({
|
||||
useEventEmitterContextContext: () => ({ eventEmitter: mockEventEmitter }),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/knowledge/use-segment', () => ({
|
||||
useSegmentList: () => ({
|
||||
isLoading: false,
|
||||
data: mockSegmentListData.current,
|
||||
}),
|
||||
useSegmentListKey: ['segment', 'chunkList'],
|
||||
useChunkListAllKey: ['segment', 'chunkList', { enabled: 'all' }],
|
||||
useChunkListEnabledKey: ['segment', 'chunkList', { enabled: true }],
|
||||
useChunkListDisabledKey: ['segment', 'chunkList', { enabled: false }],
|
||||
useEnableSegment: () => ({ mutateAsync: mockEnableSegment }),
|
||||
useDisableSegment: () => ({ mutateAsync: mockDisableSegment }),
|
||||
useDeleteSegment: () => ({ mutateAsync: mockDeleteSegment }),
|
||||
useUpdateSegment: () => ({ mutateAsync: mockUpdateSegment }),
|
||||
}))
|
||||
|
||||
vi.mock('@/service/use-base', () => ({
|
||||
useInvalid: (key: unknown[]) => {
|
||||
const keyObj = key[2] as { enabled?: boolean | 'all' } | undefined
|
||||
if (keyObj?.enabled === 'all')
|
||||
return mockInvalidChunkListAll
|
||||
if (keyObj?.enabled === true)
|
||||
return mockInvalidChunkListEnabled
|
||||
if (keyObj?.enabled === false)
|
||||
return mockInvalidChunkListDisabled
|
||||
return mockInvalidSegmentList
|
||||
},
|
||||
}))
|
||||
|
||||
// ============================================================================
|
||||
// Test Utilities
|
||||
// ============================================================================
|
||||
|
||||
const createQueryClient = () => new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
})
|
||||
|
||||
const createWrapper = () => {
|
||||
const queryClient = createQueryClient()
|
||||
return ({ children }: { children: React.ReactNode }) =>
|
||||
React.createElement(QueryClientProvider, { client: queryClient }, children)
|
||||
}
|
||||
|
||||
const createMockSegment = (overrides: Partial<SegmentDetailModel> = {}): SegmentDetailModel => ({
|
||||
id: `segment-${Math.random().toString(36).substr(2, 9)}`,
|
||||
position: 1,
|
||||
document_id: 'doc-1',
|
||||
content: 'Test content',
|
||||
sign_content: 'Test signed content',
|
||||
word_count: 100,
|
||||
tokens: 50,
|
||||
keywords: [],
|
||||
index_node_id: 'index-1',
|
||||
index_node_hash: 'hash-1',
|
||||
hit_count: 0,
|
||||
enabled: true,
|
||||
disabled_at: 0,
|
||||
disabled_by: '',
|
||||
status: 'completed',
|
||||
created_by: 'user-1',
|
||||
created_at: 1700000000,
|
||||
indexing_at: 1700000100,
|
||||
completed_at: 1700000200,
|
||||
error: null,
|
||||
stopped_at: 0,
|
||||
updated_at: 1700000000,
|
||||
attachments: [],
|
||||
child_chunks: [],
|
||||
...overrides,
|
||||
})
|
||||
|
||||
const defaultOptions = {
|
||||
searchValue: '',
|
||||
selectedStatus: 'all' as boolean | 'all',
|
||||
selectedSegmentIds: [] as string[],
|
||||
importStatus: undefined as ProcessStatus | string | undefined,
|
||||
currentPage: 1,
|
||||
limit: 10,
|
||||
onCloseSegmentDetail: vi.fn(),
|
||||
clearSelection: vi.fn(),
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Tests
|
||||
// ============================================================================
|
||||
|
||||
describe('useSegmentListData', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockDocForm.current = ChunkingModeEnum.text as ChunkingMode
|
||||
mockParentMode.current = 'paragraph'
|
||||
mockDatasetId.current = 'test-dataset-id'
|
||||
mockDocumentId.current = 'test-document-id'
|
||||
mockSegmentListData.current = { data: [], total: 0, total_pages: 0, has_more: false, limit: 20, page: 1 }
|
||||
mockPathname.current = '/datasets/test/documents/test'
|
||||
})
|
||||
|
||||
describe('Initial State', () => {
|
||||
it('should return empty segments initially', () => {
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.segments).toEqual([])
|
||||
expect(result.current.isLoadingSegmentList).toBe(false)
|
||||
})
|
||||
|
||||
it('should compute isFullDocMode correctly', () => {
|
||||
mockDocForm.current = ChunkingModeEnum.parentChild
|
||||
mockParentMode.current = 'full-doc'
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.isFullDocMode).toBe(true)
|
||||
})
|
||||
|
||||
it('should compute isFullDocMode as false for text mode', () => {
|
||||
mockDocForm.current = ChunkingModeEnum.text
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.isFullDocMode).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('totalText computation', () => {
|
||||
it('should show chunks count when not searching', () => {
|
||||
mockSegmentListData.current = { data: [], total: 10, total_pages: 1, has_more: false, limit: 20, page: 1 }
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.totalText).toContain('10')
|
||||
expect(result.current.totalText).toContain('chunks')
|
||||
})
|
||||
|
||||
it('should show search results when searching', () => {
|
||||
mockSegmentListData.current = { data: [], total: 5, total_pages: 1, has_more: false, limit: 20, page: 1 }
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
searchValue: 'test',
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.totalText).toContain('5')
|
||||
expect(result.current.totalText).toContain('search results')
|
||||
})
|
||||
|
||||
it('should show search results when status is filtered', () => {
|
||||
mockSegmentListData.current = { data: [], total: 3, total_pages: 1, has_more: false, limit: 20, page: 1 }
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedStatus: true,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.totalText).toContain('search results')
|
||||
})
|
||||
|
||||
it('should show parent chunks in parentChild paragraph mode', () => {
|
||||
mockDocForm.current = ChunkingModeEnum.parentChild
|
||||
mockParentMode.current = 'paragraph'
|
||||
mockSegmentListData.current = { data: [], total: 7, total_pages: 1, has_more: false, limit: 20, page: 1 }
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.totalText).toContain('parent chunk')
|
||||
})
|
||||
|
||||
it('should show "--" when total is undefined', () => {
|
||||
mockSegmentListData.current = undefined
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(result.current.totalText).toContain('--')
|
||||
})
|
||||
})
|
||||
|
||||
describe('resetList', () => {
|
||||
it('should call clearSelection and invalidSegmentList', () => {
|
||||
const clearSelection = vi.fn()
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
clearSelection,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.resetList()
|
||||
})
|
||||
|
||||
expect(clearSelection).toHaveBeenCalled()
|
||||
expect(mockInvalidSegmentList).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('refreshChunkListWithStatusChanged', () => {
|
||||
it('should invalidate disabled and enabled when status is all', async () => {
|
||||
mockEnableSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedStatus: 'all',
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onChangeSwitch(true, 'seg-1')
|
||||
})
|
||||
|
||||
expect(mockInvalidChunkListDisabled).toHaveBeenCalled()
|
||||
expect(mockInvalidChunkListEnabled).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should invalidate segment list when status is not all', async () => {
|
||||
mockEnableSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedStatus: true,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onChangeSwitch(true, 'seg-1')
|
||||
})
|
||||
|
||||
expect(mockInvalidSegmentList).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('onChangeSwitch', () => {
|
||||
it('should call enableSegment when enable is true', async () => {
|
||||
mockEnableSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onChangeSwitch(true, 'seg-1')
|
||||
})
|
||||
|
||||
expect(mockEnableSegment).toHaveBeenCalled()
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'success', message: 'Modified successfully' })
|
||||
})
|
||||
|
||||
it('should call disableSegment when enable is false', async () => {
|
||||
mockDisableSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onChangeSwitch(false, 'seg-1')
|
||||
})
|
||||
|
||||
expect(mockDisableSegment).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should use selectedSegmentIds when segId is empty', async () => {
|
||||
mockEnableSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedSegmentIds: ['seg-1', 'seg-2'],
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onChangeSwitch(true, '')
|
||||
})
|
||||
|
||||
expect(mockEnableSegment).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ segmentIds: ['seg-1', 'seg-2'] }),
|
||||
expect.any(Object),
|
||||
)
|
||||
})
|
||||
|
||||
it('should notify error on failure', async () => {
|
||||
mockEnableSegment.mockImplementation(async (_params, { onError }: { onError: () => void }) => {
|
||||
onError()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onChangeSwitch(true, 'seg-1')
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'error', message: 'Modified unsuccessfully' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('onDelete', () => {
|
||||
it('should call deleteSegment and resetList on success', async () => {
|
||||
const clearSelection = vi.fn()
|
||||
mockDeleteSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
clearSelection,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onDelete('seg-1')
|
||||
})
|
||||
|
||||
expect(mockDeleteSegment).toHaveBeenCalled()
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'success', message: 'Modified successfully' })
|
||||
})
|
||||
|
||||
it('should clear selection when deleting batch (no segId)', async () => {
|
||||
const clearSelection = vi.fn()
|
||||
mockDeleteSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedSegmentIds: ['seg-1', 'seg-2'],
|
||||
clearSelection,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onDelete('')
|
||||
})
|
||||
|
||||
// clearSelection is called twice: once in resetList, once after
|
||||
expect(clearSelection).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should notify error on failure', async () => {
|
||||
mockDeleteSegment.mockImplementation(async (_params, { onError }: { onError: () => void }) => {
|
||||
onError()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onDelete('seg-1')
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'error', message: 'Modified unsuccessfully' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleUpdateSegment', () => {
|
||||
it('should validate empty content', async () => {
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', ' ', '', [], [])
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'error', message: 'Content cannot be empty' })
|
||||
expect(mockUpdateSegment).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should validate empty question in QA mode', async () => {
|
||||
mockDocForm.current = ChunkingModeEnum.qa
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', '', 'answer', [], [])
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'error', message: 'Question cannot be empty' })
|
||||
})
|
||||
|
||||
it('should validate empty answer in QA mode', async () => {
|
||||
mockDocForm.current = ChunkingModeEnum.qa
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', 'question', ' ', [], [])
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'error', message: 'Answer cannot be empty' })
|
||||
})
|
||||
|
||||
it('should validate attachments are uploaded', async () => {
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', 'content', '', [], [
|
||||
createMockFileEntity({ id: '1', name: 'test.png', uploadedId: undefined }),
|
||||
])
|
||||
})
|
||||
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'error', message: 'All files must be uploaded' })
|
||||
})
|
||||
|
||||
it('should call updateSegment with correct params', async () => {
|
||||
mockUpdateSegment.mockImplementation(async (_params, { onSuccess, onSettled }: SegmentMutationCallbacks) => {
|
||||
onSuccess({ data: createMockSegment() })
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const onCloseSegmentDetail = vi.fn()
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
onCloseSegmentDetail,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', 'updated content', '', ['keyword1'], [])
|
||||
})
|
||||
|
||||
expect(mockUpdateSegment).toHaveBeenCalled()
|
||||
expect(mockNotify).toHaveBeenCalledWith({ type: 'success', message: 'Modified successfully' })
|
||||
expect(onCloseSegmentDetail).toHaveBeenCalled()
|
||||
expect(mockEventEmitter.emit).toHaveBeenCalledWith('update-segment')
|
||||
expect(mockEventEmitter.emit).toHaveBeenCalledWith('update-segment-success')
|
||||
expect(mockEventEmitter.emit).toHaveBeenCalledWith('update-segment-done')
|
||||
})
|
||||
|
||||
it('should not close modal when needRegenerate is true', async () => {
|
||||
mockUpdateSegment.mockImplementation(async (_params, { onSuccess, onSettled }: SegmentMutationCallbacks) => {
|
||||
onSuccess({ data: createMockSegment() })
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const onCloseSegmentDetail = vi.fn()
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
onCloseSegmentDetail,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', 'content', '', [], [], true)
|
||||
})
|
||||
|
||||
expect(onCloseSegmentDetail).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should include attachments in params', async () => {
|
||||
mockUpdateSegment.mockImplementation(async (_params, { onSuccess, onSettled }: SegmentMutationCallbacks) => {
|
||||
onSuccess({ data: createMockSegment() })
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', 'content', '', [], [
|
||||
createMockFileEntity({ id: '1', name: 'test.png', uploadedId: 'uploaded-1' }),
|
||||
])
|
||||
})
|
||||
|
||||
expect(mockUpdateSegment).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
body: expect.objectContaining({ attachment_ids: ['uploaded-1'] }),
|
||||
}),
|
||||
expect.any(Object),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('viewNewlyAddedChunk', () => {
|
||||
it('should set needScrollToBottom and not call resetList when adding new page', () => {
|
||||
mockSegmentListData.current = { data: [], total: 10, total_pages: 1, has_more: false, limit: 20, page: 1 }
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
limit: 10,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.viewNewlyAddedChunk()
|
||||
})
|
||||
|
||||
expect(result.current.needScrollToBottom.current).toBe(true)
|
||||
})
|
||||
|
||||
it('should call resetList when not adding new page', () => {
|
||||
mockSegmentListData.current = { data: [], total: 5, total_pages: 1, has_more: false, limit: 20, page: 1 }
|
||||
|
||||
const clearSelection = vi.fn()
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
clearSelection,
|
||||
limit: 10,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.viewNewlyAddedChunk()
|
||||
})
|
||||
|
||||
// resetList should be called
|
||||
expect(clearSelection).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateSegmentInCache', () => {
|
||||
it('should call queryClient.setQueryData', () => {
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.updateSegmentInCache('seg-1', seg => ({ ...seg, enabled: false }))
|
||||
})
|
||||
|
||||
expect(mockQueryClient.setQueryData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Effect: pathname change', () => {
|
||||
it('should reset list when pathname changes', async () => {
|
||||
const clearSelection = vi.fn()
|
||||
|
||||
renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
clearSelection,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
// Initial call from effect
|
||||
expect(clearSelection).toHaveBeenCalled()
|
||||
expect(mockInvalidSegmentList).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Effect: import status', () => {
|
||||
it('should reset list when import status is COMPLETED', () => {
|
||||
const clearSelection = vi.fn()
|
||||
|
||||
renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
importStatus: ProcessStatus.COMPLETED,
|
||||
clearSelection,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
expect(clearSelection).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('refreshChunkListDataWithDetailChanged', () => {
|
||||
it('should call correct invalidation for status all', async () => {
|
||||
mockUpdateSegment.mockImplementation(async (_params, { onSuccess, onSettled }: SegmentMutationCallbacks) => {
|
||||
onSuccess({ data: createMockSegment() })
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedStatus: 'all',
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', 'content', '', [], [])
|
||||
})
|
||||
|
||||
expect(mockInvalidChunkListDisabled).toHaveBeenCalled()
|
||||
expect(mockInvalidChunkListEnabled).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call correct invalidation for status true', async () => {
|
||||
mockUpdateSegment.mockImplementation(async (_params, { onSuccess, onSettled }: SegmentMutationCallbacks) => {
|
||||
onSuccess({ data: createMockSegment() })
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedStatus: true,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', 'content', '', [], [])
|
||||
})
|
||||
|
||||
expect(mockInvalidChunkListAll).toHaveBeenCalled()
|
||||
expect(mockInvalidChunkListDisabled).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should call correct invalidation for status false', async () => {
|
||||
mockUpdateSegment.mockImplementation(async (_params, { onSuccess, onSettled }: SegmentMutationCallbacks) => {
|
||||
onSuccess({ data: createMockSegment() })
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedStatus: false,
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', 'content', '', [], [])
|
||||
})
|
||||
|
||||
expect(mockInvalidChunkListAll).toHaveBeenCalled()
|
||||
expect(mockInvalidChunkListEnabled).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('QA Mode validation', () => {
|
||||
it('should set content and answer for QA mode', async () => {
|
||||
mockDocForm.current = ChunkingModeEnum.qa as ChunkingMode
|
||||
|
||||
mockUpdateSegment.mockImplementation(async (_params, { onSuccess, onSettled }: SegmentMutationCallbacks) => {
|
||||
onSuccess({ data: createMockSegment() })
|
||||
onSettled()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.handleUpdateSegment('seg-1', 'question', 'answer', [], [])
|
||||
})
|
||||
|
||||
expect(mockUpdateSegment).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
body: expect.objectContaining({
|
||||
content: 'question',
|
||||
answer: 'answer',
|
||||
}),
|
||||
}),
|
||||
expect.any(Object),
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateSegmentsInCache', () => {
|
||||
it('should handle undefined old data', () => {
|
||||
mockQueryClient.setQueryData.mockImplementation((_key, updater) => {
|
||||
const result = typeof updater === 'function' ? updater(undefined) : updater
|
||||
return result
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
// Call updateSegmentInCache which should handle undefined gracefully
|
||||
act(() => {
|
||||
result.current.updateSegmentInCache('seg-1', seg => ({ ...seg, enabled: false }))
|
||||
})
|
||||
|
||||
expect(mockQueryClient.setQueryData).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should map segments correctly when old data exists', () => {
|
||||
const mockOldData = {
|
||||
data: [
|
||||
createMockSegment({ id: 'seg-1', enabled: true }),
|
||||
createMockSegment({ id: 'seg-2', enabled: true }),
|
||||
],
|
||||
total: 2,
|
||||
total_pages: 1,
|
||||
}
|
||||
|
||||
mockQueryClient.setQueryData.mockImplementation((_key, updater) => {
|
||||
const result = typeof updater === 'function' ? updater(mockOldData) : updater
|
||||
// Verify the updater transforms the data correctly
|
||||
expect(result.data[0].enabled).toBe(false) // seg-1 should be updated
|
||||
expect(result.data[1].enabled).toBe(true) // seg-2 should remain unchanged
|
||||
return result
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData(defaultOptions), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
act(() => {
|
||||
result.current.updateSegmentInCache('seg-1', seg => ({ ...seg, enabled: false }))
|
||||
})
|
||||
|
||||
expect(mockQueryClient.setQueryData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('updateSegmentsInCache batch', () => {
|
||||
it('should handle undefined old data in batch update', async () => {
|
||||
mockQueryClient.setQueryData.mockImplementation((_key, updater) => {
|
||||
const result = typeof updater === 'function' ? updater(undefined) : updater
|
||||
return result
|
||||
})
|
||||
|
||||
mockEnableSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedSegmentIds: ['seg-1', 'seg-2'],
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onChangeSwitch(true, '')
|
||||
})
|
||||
|
||||
expect(mockQueryClient.setQueryData).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should map multiple segments correctly when old data exists', async () => {
|
||||
const mockOldData = {
|
||||
data: [
|
||||
createMockSegment({ id: 'seg-1', enabled: false }),
|
||||
createMockSegment({ id: 'seg-2', enabled: false }),
|
||||
createMockSegment({ id: 'seg-3', enabled: false }),
|
||||
],
|
||||
total: 3,
|
||||
total_pages: 1,
|
||||
}
|
||||
|
||||
mockQueryClient.setQueryData.mockImplementation((_key, updater) => {
|
||||
const result = typeof updater === 'function' ? updater(mockOldData) : updater
|
||||
// Verify only selected segments are updated
|
||||
if (result && result.data) {
|
||||
expect(result.data[0].enabled).toBe(true) // seg-1 should be updated
|
||||
expect(result.data[1].enabled).toBe(true) // seg-2 should be updated
|
||||
expect(result.data[2].enabled).toBe(false) // seg-3 should remain unchanged
|
||||
}
|
||||
return result
|
||||
})
|
||||
|
||||
mockEnableSegment.mockImplementation(async (_params, { onSuccess }: { onSuccess: () => void }) => {
|
||||
onSuccess()
|
||||
})
|
||||
|
||||
const { result } = renderHook(() => useSegmentListData({
|
||||
...defaultOptions,
|
||||
selectedSegmentIds: ['seg-1', 'seg-2'],
|
||||
}), {
|
||||
wrapper: createWrapper(),
|
||||
})
|
||||
|
||||
await act(async () => {
|
||||
await result.current.onChangeSwitch(true, '')
|
||||
})
|
||||
|
||||
expect(mockQueryClient.setQueryData).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1,363 +0,0 @@
|
||||
import type { FileEntity } from '@/app/components/datasets/common/image-uploader/types'
|
||||
import type { SegmentDetailModel, SegmentsResponse, SegmentUpdater } from '@/models/datasets'
|
||||
import { useQueryClient } from '@tanstack/react-query'
|
||||
import { usePathname } from 'next/navigation'
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { useToastContext } from '@/app/components/base/toast'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import { ChunkingMode } from '@/models/datasets'
|
||||
import {
|
||||
useChunkListAllKey,
|
||||
useChunkListDisabledKey,
|
||||
useChunkListEnabledKey,
|
||||
useDeleteSegment,
|
||||
useDisableSegment,
|
||||
useEnableSegment,
|
||||
useSegmentList,
|
||||
useSegmentListKey,
|
||||
useUpdateSegment,
|
||||
} from '@/service/knowledge/use-segment'
|
||||
import { useInvalid } from '@/service/use-base'
|
||||
import { formatNumber } from '@/utils/format'
|
||||
import { useDocumentContext } from '../../context'
|
||||
import { ProcessStatus } from '../../segment-add'
|
||||
|
||||
const DEFAULT_LIMIT = 10
|
||||
|
||||
export type UseSegmentListDataOptions = {
|
||||
searchValue: string
|
||||
selectedStatus: boolean | 'all'
|
||||
selectedSegmentIds: string[]
|
||||
importStatus: ProcessStatus | string | undefined
|
||||
currentPage: number
|
||||
limit: number
|
||||
onCloseSegmentDetail: () => void
|
||||
clearSelection: () => void
|
||||
}
|
||||
|
||||
export type UseSegmentListDataReturn = {
|
||||
segments: SegmentDetailModel[]
|
||||
isLoadingSegmentList: boolean
|
||||
segmentListData: ReturnType<typeof useSegmentList>['data']
|
||||
totalText: string
|
||||
isFullDocMode: boolean
|
||||
segmentListRef: React.RefObject<HTMLDivElement | null>
|
||||
needScrollToBottom: React.RefObject<boolean>
|
||||
// Operations
|
||||
onChangeSwitch: (enable: boolean, segId?: string) => Promise<void>
|
||||
onDelete: (segId?: string) => Promise<void>
|
||||
handleUpdateSegment: (
|
||||
segmentId: string,
|
||||
question: string,
|
||||
answer: string,
|
||||
keywords: string[],
|
||||
attachments: FileEntity[],
|
||||
needRegenerate?: boolean,
|
||||
) => Promise<void>
|
||||
resetList: () => void
|
||||
viewNewlyAddedChunk: () => void
|
||||
invalidSegmentList: () => void
|
||||
updateSegmentInCache: (segmentId: string, updater: (seg: SegmentDetailModel) => SegmentDetailModel) => void
|
||||
}
|
||||
|
||||
export const useSegmentListData = (options: UseSegmentListDataOptions): UseSegmentListDataReturn => {
|
||||
const {
|
||||
searchValue,
|
||||
selectedStatus,
|
||||
selectedSegmentIds,
|
||||
importStatus,
|
||||
currentPage,
|
||||
limit,
|
||||
onCloseSegmentDetail,
|
||||
clearSelection,
|
||||
} = options
|
||||
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useToastContext()
|
||||
const pathname = usePathname()
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const queryClient = useQueryClient()
|
||||
|
||||
const datasetId = useDocumentContext(s => s.datasetId) || ''
|
||||
const documentId = useDocumentContext(s => s.documentId) || ''
|
||||
const docForm = useDocumentContext(s => s.docForm)
|
||||
const parentMode = useDocumentContext(s => s.parentMode)
|
||||
|
||||
const segmentListRef = useRef<HTMLDivElement>(null)
|
||||
const needScrollToBottom = useRef(false)
|
||||
|
||||
const isFullDocMode = useMemo(() => {
|
||||
return docForm === ChunkingMode.parentChild && parentMode === 'full-doc'
|
||||
}, [docForm, parentMode])
|
||||
|
||||
// Build query params
|
||||
const queryParams = useMemo(() => ({
|
||||
page: isFullDocMode ? 1 : currentPage,
|
||||
limit: isFullDocMode ? DEFAULT_LIMIT : limit,
|
||||
keyword: isFullDocMode ? '' : searchValue,
|
||||
enabled: selectedStatus,
|
||||
}), [isFullDocMode, currentPage, limit, searchValue, selectedStatus])
|
||||
|
||||
// Build query key for optimistic updates
|
||||
const currentQueryKey = useMemo(() =>
|
||||
[...useSegmentListKey, datasetId, documentId, queryParams], [datasetId, documentId, queryParams])
|
||||
|
||||
// Fetch segment list
|
||||
const { isLoading: isLoadingSegmentList, data: segmentListData } = useSegmentList({
|
||||
datasetId,
|
||||
documentId,
|
||||
params: queryParams,
|
||||
})
|
||||
|
||||
// Derive segments from query data
|
||||
const segments = useMemo(() => segmentListData?.data || [], [segmentListData])
|
||||
|
||||
// Invalidation hooks
|
||||
const invalidSegmentList = useInvalid(useSegmentListKey)
|
||||
const invalidChunkListAll = useInvalid(useChunkListAllKey)
|
||||
const invalidChunkListEnabled = useInvalid(useChunkListEnabledKey)
|
||||
const invalidChunkListDisabled = useInvalid(useChunkListDisabledKey)
|
||||
|
||||
// Scroll to bottom when needed
|
||||
useEffect(() => {
|
||||
if (segmentListRef.current && needScrollToBottom.current) {
|
||||
segmentListRef.current.scrollTo({ top: segmentListRef.current.scrollHeight, behavior: 'smooth' })
|
||||
needScrollToBottom.current = false
|
||||
}
|
||||
}, [segments])
|
||||
|
||||
// Reset list on pathname change
|
||||
useEffect(() => {
|
||||
clearSelection()
|
||||
invalidSegmentList()
|
||||
}, [pathname])
|
||||
|
||||
// Reset list on import completion
|
||||
useEffect(() => {
|
||||
if (importStatus === ProcessStatus.COMPLETED) {
|
||||
clearSelection()
|
||||
invalidSegmentList()
|
||||
}
|
||||
}, [importStatus])
|
||||
|
||||
const resetList = useCallback(() => {
|
||||
clearSelection()
|
||||
invalidSegmentList()
|
||||
}, [clearSelection, invalidSegmentList])
|
||||
|
||||
const refreshChunkListWithStatusChanged = useCallback(() => {
|
||||
if (selectedStatus === 'all') {
|
||||
invalidChunkListDisabled()
|
||||
invalidChunkListEnabled()
|
||||
}
|
||||
else {
|
||||
invalidSegmentList()
|
||||
}
|
||||
}, [selectedStatus, invalidChunkListDisabled, invalidChunkListEnabled, invalidSegmentList])
|
||||
|
||||
const refreshChunkListDataWithDetailChanged = useCallback(() => {
|
||||
const refreshMap: Record<string, () => void> = {
|
||||
all: () => {
|
||||
invalidChunkListDisabled()
|
||||
invalidChunkListEnabled()
|
||||
},
|
||||
true: () => {
|
||||
invalidChunkListAll()
|
||||
invalidChunkListDisabled()
|
||||
},
|
||||
false: () => {
|
||||
invalidChunkListAll()
|
||||
invalidChunkListEnabled()
|
||||
},
|
||||
}
|
||||
refreshMap[String(selectedStatus)]?.()
|
||||
}, [selectedStatus, invalidChunkListDisabled, invalidChunkListEnabled, invalidChunkListAll])
|
||||
|
||||
// Optimistic update helper using React Query's setQueryData
|
||||
const updateSegmentInCache = useCallback((
|
||||
segmentId: string,
|
||||
updater: (seg: SegmentDetailModel) => SegmentDetailModel,
|
||||
) => {
|
||||
queryClient.setQueryData<SegmentsResponse>(currentQueryKey, (old) => {
|
||||
if (!old)
|
||||
return old
|
||||
return {
|
||||
...old,
|
||||
data: old.data.map(seg => seg.id === segmentId ? updater(seg) : seg),
|
||||
}
|
||||
})
|
||||
}, [queryClient, currentQueryKey])
|
||||
|
||||
// Batch update helper
|
||||
const updateSegmentsInCache = useCallback((
|
||||
segmentIds: string[],
|
||||
updater: (seg: SegmentDetailModel) => SegmentDetailModel,
|
||||
) => {
|
||||
queryClient.setQueryData<SegmentsResponse>(currentQueryKey, (old) => {
|
||||
if (!old)
|
||||
return old
|
||||
return {
|
||||
...old,
|
||||
data: old.data.map(seg => segmentIds.includes(seg.id) ? updater(seg) : seg),
|
||||
}
|
||||
})
|
||||
}, [queryClient, currentQueryKey])
|
||||
|
||||
// Mutations
|
||||
const { mutateAsync: enableSegment } = useEnableSegment()
|
||||
const { mutateAsync: disableSegment } = useDisableSegment()
|
||||
const { mutateAsync: deleteSegment } = useDeleteSegment()
|
||||
const { mutateAsync: updateSegment } = useUpdateSegment()
|
||||
|
||||
const onChangeSwitch = useCallback(async (enable: boolean, segId?: string) => {
|
||||
const operationApi = enable ? enableSegment : disableSegment
|
||||
const targetIds = segId ? [segId] : selectedSegmentIds
|
||||
|
||||
await operationApi({ datasetId, documentId, segmentIds: targetIds }, {
|
||||
onSuccess: () => {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
updateSegmentsInCache(targetIds, seg => ({ ...seg, enabled: enable }))
|
||||
refreshChunkListWithStatusChanged()
|
||||
},
|
||||
onError: () => {
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
},
|
||||
})
|
||||
}, [datasetId, documentId, selectedSegmentIds, disableSegment, enableSegment, t, notify, updateSegmentsInCache, refreshChunkListWithStatusChanged])
|
||||
|
||||
const onDelete = useCallback(async (segId?: string) => {
|
||||
const targetIds = segId ? [segId] : selectedSegmentIds
|
||||
|
||||
await deleteSegment({ datasetId, documentId, segmentIds: targetIds }, {
|
||||
onSuccess: () => {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
resetList()
|
||||
if (!segId)
|
||||
clearSelection()
|
||||
},
|
||||
onError: () => {
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
},
|
||||
})
|
||||
}, [datasetId, documentId, selectedSegmentIds, deleteSegment, resetList, clearSelection, t, notify])
|
||||
|
||||
const handleUpdateSegment = useCallback(async (
|
||||
segmentId: string,
|
||||
question: string,
|
||||
answer: string,
|
||||
keywords: string[],
|
||||
attachments: FileEntity[],
|
||||
needRegenerate = false,
|
||||
) => {
|
||||
const params: SegmentUpdater = { content: '', attachment_ids: [] }
|
||||
|
||||
// Validate and build params based on doc form
|
||||
if (docForm === ChunkingMode.qa) {
|
||||
if (!question.trim()) {
|
||||
notify({ type: 'error', message: t('segment.questionEmpty', { ns: 'datasetDocuments' }) })
|
||||
return
|
||||
}
|
||||
if (!answer.trim()) {
|
||||
notify({ type: 'error', message: t('segment.answerEmpty', { ns: 'datasetDocuments' }) })
|
||||
return
|
||||
}
|
||||
params.content = question
|
||||
params.answer = answer
|
||||
}
|
||||
else {
|
||||
if (!question.trim()) {
|
||||
notify({ type: 'error', message: t('segment.contentEmpty', { ns: 'datasetDocuments' }) })
|
||||
return
|
||||
}
|
||||
params.content = question
|
||||
}
|
||||
|
||||
if (keywords.length)
|
||||
params.keywords = keywords
|
||||
|
||||
if (attachments.length) {
|
||||
const notAllUploaded = attachments.some(item => !item.uploadedId)
|
||||
if (notAllUploaded) {
|
||||
notify({ type: 'error', message: t('segment.allFilesUploaded', { ns: 'datasetDocuments' }) })
|
||||
return
|
||||
}
|
||||
params.attachment_ids = attachments.map(item => item.uploadedId!)
|
||||
}
|
||||
|
||||
if (needRegenerate)
|
||||
params.regenerate_child_chunks = needRegenerate
|
||||
|
||||
eventEmitter?.emit('update-segment')
|
||||
await updateSegment({ datasetId, documentId, segmentId, body: params }, {
|
||||
onSuccess(res) {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
if (!needRegenerate)
|
||||
onCloseSegmentDetail()
|
||||
|
||||
updateSegmentInCache(segmentId, seg => ({
|
||||
...seg,
|
||||
answer: res.data.answer,
|
||||
content: res.data.content,
|
||||
sign_content: res.data.sign_content,
|
||||
keywords: res.data.keywords,
|
||||
attachments: res.data.attachments,
|
||||
word_count: res.data.word_count,
|
||||
hit_count: res.data.hit_count,
|
||||
enabled: res.data.enabled,
|
||||
updated_at: res.data.updated_at,
|
||||
child_chunks: res.data.child_chunks,
|
||||
}))
|
||||
refreshChunkListDataWithDetailChanged()
|
||||
eventEmitter?.emit('update-segment-success')
|
||||
},
|
||||
onSettled() {
|
||||
eventEmitter?.emit('update-segment-done')
|
||||
},
|
||||
})
|
||||
}, [datasetId, documentId, docForm, updateSegment, notify, eventEmitter, onCloseSegmentDetail, updateSegmentInCache, refreshChunkListDataWithDetailChanged, t])
|
||||
|
||||
const viewNewlyAddedChunk = useCallback(() => {
|
||||
const totalPages = segmentListData?.total_pages || 0
|
||||
const total = segmentListData?.total || 0
|
||||
const newPage = Math.ceil((total + 1) / limit)
|
||||
needScrollToBottom.current = true
|
||||
|
||||
if (newPage > totalPages)
|
||||
return
|
||||
resetList()
|
||||
}, [segmentListData, limit, resetList])
|
||||
|
||||
// Compute total text for display
|
||||
const totalText = useMemo(() => {
|
||||
const isSearch = searchValue !== '' || selectedStatus !== 'all'
|
||||
if (!isSearch) {
|
||||
const total = segmentListData?.total ? formatNumber(segmentListData.total) : '--'
|
||||
const count = total === '--' ? 0 : segmentListData!.total
|
||||
const translationKey = (docForm === ChunkingMode.parentChild && parentMode === 'paragraph')
|
||||
? 'segment.parentChunks' as const
|
||||
: 'segment.chunks' as const
|
||||
return `${total} ${t(translationKey, { ns: 'datasetDocuments', count })}`
|
||||
}
|
||||
const total = typeof segmentListData?.total === 'number' ? formatNumber(segmentListData.total) : 0
|
||||
const count = segmentListData?.total || 0
|
||||
return `${total} ${t('segment.searchResults', { ns: 'datasetDocuments', count })}`
|
||||
}, [segmentListData, docForm, parentMode, searchValue, selectedStatus, t])
|
||||
|
||||
return {
|
||||
segments,
|
||||
isLoadingSegmentList,
|
||||
segmentListData,
|
||||
totalText,
|
||||
isFullDocMode,
|
||||
segmentListRef,
|
||||
needScrollToBottom,
|
||||
onChangeSwitch,
|
||||
onDelete,
|
||||
handleUpdateSegment,
|
||||
resetList,
|
||||
viewNewlyAddedChunk,
|
||||
invalidSegmentList,
|
||||
updateSegmentInCache,
|
||||
}
|
||||
}
|
||||
@ -1,58 +0,0 @@
|
||||
import type { SegmentDetailModel } from '@/models/datasets'
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
|
||||
export type UseSegmentSelectionReturn = {
|
||||
selectedSegmentIds: string[]
|
||||
isAllSelected: boolean
|
||||
isSomeSelected: boolean
|
||||
onSelected: (segId: string) => void
|
||||
onSelectedAll: () => void
|
||||
onCancelBatchOperation: () => void
|
||||
clearSelection: () => void
|
||||
}
|
||||
|
||||
export const useSegmentSelection = (segments: SegmentDetailModel[]): UseSegmentSelectionReturn => {
|
||||
const [selectedSegmentIds, setSelectedSegmentIds] = useState<string[]>([])
|
||||
|
||||
const onSelected = useCallback((segId: string) => {
|
||||
setSelectedSegmentIds(prev =>
|
||||
prev.includes(segId)
|
||||
? prev.filter(id => id !== segId)
|
||||
: [...prev, segId],
|
||||
)
|
||||
}, [])
|
||||
|
||||
const isAllSelected = useMemo(() => {
|
||||
return segments.length > 0 && segments.every(seg => selectedSegmentIds.includes(seg.id))
|
||||
}, [segments, selectedSegmentIds])
|
||||
|
||||
const isSomeSelected = useMemo(() => {
|
||||
return segments.some(seg => selectedSegmentIds.includes(seg.id))
|
||||
}, [segments, selectedSegmentIds])
|
||||
|
||||
const onSelectedAll = useCallback(() => {
|
||||
setSelectedSegmentIds((prev) => {
|
||||
const currentAllSegIds = segments.map(seg => seg.id)
|
||||
const prevSelectedIds = prev.filter(item => !currentAllSegIds.includes(item))
|
||||
return [...prevSelectedIds, ...(isAllSelected ? [] : currentAllSegIds)]
|
||||
})
|
||||
}, [segments, isAllSelected])
|
||||
|
||||
const onCancelBatchOperation = useCallback(() => {
|
||||
setSelectedSegmentIds([])
|
||||
}, [])
|
||||
|
||||
const clearSelection = useCallback(() => {
|
||||
setSelectedSegmentIds([])
|
||||
}, [])
|
||||
|
||||
return {
|
||||
selectedSegmentIds,
|
||||
isAllSelected,
|
||||
isSomeSelected,
|
||||
onSelected,
|
||||
onSelectedAll,
|
||||
onCancelBatchOperation,
|
||||
clearSelection,
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,33 +1,89 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { ProcessStatus } from '../segment-add'
|
||||
import type { SegmentListContextValue } from './segment-list-context'
|
||||
import { useCallback, useMemo, useState } from 'react'
|
||||
import type { Item } from '@/app/components/base/select'
|
||||
import type { FileEntity } from '@/app/components/datasets/common/image-uploader/types'
|
||||
import type { ChildChunkDetail, SegmentDetailModel, SegmentUpdater } from '@/models/datasets'
|
||||
import { useDebounceFn } from 'ahooks'
|
||||
import { noop } from 'es-toolkit/function'
|
||||
import { usePathname } from 'next/navigation'
|
||||
import * as React from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { createContext, useContext, useContextSelector } from 'use-context-selector'
|
||||
import Checkbox from '@/app/components/base/checkbox'
|
||||
import Divider from '@/app/components/base/divider'
|
||||
import Input from '@/app/components/base/input'
|
||||
import Pagination from '@/app/components/base/pagination'
|
||||
import { SimpleSelect } from '@/app/components/base/select'
|
||||
import { ToastContext } from '@/app/components/base/toast'
|
||||
import NewSegment from '@/app/components/datasets/documents/detail/new-segment'
|
||||
import { useEventEmitterContextContext } from '@/context/event-emitter'
|
||||
import { ChunkingMode } from '@/models/datasets'
|
||||
import {
|
||||
useChildSegmentList,
|
||||
useChildSegmentListKey,
|
||||
useChunkListAllKey,
|
||||
useChunkListDisabledKey,
|
||||
useChunkListEnabledKey,
|
||||
useDeleteChildSegment,
|
||||
useDeleteSegment,
|
||||
useDisableSegment,
|
||||
useEnableSegment,
|
||||
useSegmentList,
|
||||
useSegmentListKey,
|
||||
useUpdateChildSegment,
|
||||
useUpdateSegment,
|
||||
} from '@/service/knowledge/use-segment'
|
||||
import { useInvalid } from '@/service/use-base'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { formatNumber } from '@/utils/format'
|
||||
import { useDocumentContext } from '../context'
|
||||
import { ProcessStatus } from '../segment-add'
|
||||
import ChildSegmentDetail from './child-segment-detail'
|
||||
import ChildSegmentList from './child-segment-list'
|
||||
import BatchAction from './common/batch-action'
|
||||
import { DrawerGroup, FullDocModeContent, GeneralModeContent, MenuBar } from './components'
|
||||
import {
|
||||
useChildSegmentData,
|
||||
useModalState,
|
||||
useSearchFilter,
|
||||
useSegmentListData,
|
||||
useSegmentSelection,
|
||||
} from './hooks'
|
||||
import {
|
||||
SegmentListContext,
|
||||
useSegmentListContext,
|
||||
} from './segment-list-context'
|
||||
import FullScreenDrawer from './common/full-screen-drawer'
|
||||
import DisplayToggle from './display-toggle'
|
||||
import NewChildSegment from './new-child-segment'
|
||||
import SegmentCard from './segment-card'
|
||||
import SegmentDetail from './segment-detail'
|
||||
import SegmentList from './segment-list'
|
||||
import StatusItem from './status-item'
|
||||
import s from './style.module.css'
|
||||
|
||||
const DEFAULT_LIMIT = 10
|
||||
|
||||
type CurrSegmentType = {
|
||||
segInfo?: SegmentDetailModel
|
||||
showModal: boolean
|
||||
isEditMode?: boolean
|
||||
}
|
||||
|
||||
type CurrChildChunkType = {
|
||||
childChunkInfo?: ChildChunkDetail
|
||||
showModal: boolean
|
||||
}
|
||||
|
||||
export type SegmentListContextValue = {
|
||||
isCollapsed: boolean
|
||||
fullScreen: boolean
|
||||
toggleFullScreen: (fullscreen?: boolean) => void
|
||||
currSegment: CurrSegmentType
|
||||
currChildChunk: CurrChildChunkType
|
||||
}
|
||||
|
||||
const SegmentListContext = createContext<SegmentListContextValue>({
|
||||
isCollapsed: true,
|
||||
fullScreen: false,
|
||||
toggleFullScreen: noop,
|
||||
currSegment: { showModal: false },
|
||||
currChildChunk: { showModal: false },
|
||||
})
|
||||
|
||||
export const useSegmentListContext = (selector: (value: SegmentListContextValue) => any) => {
|
||||
return useContextSelector(SegmentListContext, selector)
|
||||
}
|
||||
|
||||
type ICompletedProps = {
|
||||
embeddingAvailable: boolean
|
||||
showNewSegmentModal: boolean
|
||||
@ -35,7 +91,6 @@ type ICompletedProps = {
|
||||
importStatus: ProcessStatus | string | undefined
|
||||
archived?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Embedding done, show list of all segments
|
||||
* Support search and filter
|
||||
@ -47,219 +102,669 @@ const Completed: FC<ICompletedProps> = ({
|
||||
importStatus,
|
||||
archived,
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { notify } = useContext(ToastContext)
|
||||
const pathname = usePathname()
|
||||
const datasetId = useDocumentContext(s => s.datasetId) || ''
|
||||
const documentId = useDocumentContext(s => s.documentId) || ''
|
||||
const docForm = useDocumentContext(s => s.docForm)
|
||||
const parentMode = useDocumentContext(s => s.parentMode)
|
||||
// the current segment id and whether to show the modal
|
||||
const [currSegment, setCurrSegment] = useState<CurrSegmentType>({ showModal: false })
|
||||
const [currChildChunk, setCurrChildChunk] = useState<CurrChildChunkType>({ showModal: false })
|
||||
const [currChunkId, setCurrChunkId] = useState('')
|
||||
|
||||
// Pagination state
|
||||
const [currentPage, setCurrentPage] = useState(1)
|
||||
const [inputValue, setInputValue] = useState<string>('') // the input value
|
||||
const [searchValue, setSearchValue] = useState<string>('') // the search value
|
||||
const [selectedStatus, setSelectedStatus] = useState<boolean | 'all'>('all') // the selected status, enabled/disabled/undefined
|
||||
|
||||
const [segments, setSegments] = useState<SegmentDetailModel[]>([]) // all segments data
|
||||
const [childSegments, setChildSegments] = useState<ChildChunkDetail[]>([]) // all child segments data
|
||||
const [selectedSegmentIds, setSelectedSegmentIds] = useState<string[]>([])
|
||||
const { eventEmitter } = useEventEmitterContextContext()
|
||||
const [isCollapsed, setIsCollapsed] = useState(true)
|
||||
const [currentPage, setCurrentPage] = useState(1) // start from 1
|
||||
const [limit, setLimit] = useState(DEFAULT_LIMIT)
|
||||
const [fullScreen, setFullScreen] = useState(false)
|
||||
const [showNewChildSegmentModal, setShowNewChildSegmentModal] = useState(false)
|
||||
const [isRegenerationModalOpen, setIsRegenerationModalOpen] = useState(false)
|
||||
|
||||
// Search and filter state
|
||||
const searchFilter = useSearchFilter({
|
||||
onPageChange: setCurrentPage,
|
||||
})
|
||||
const segmentListRef = useRef<HTMLDivElement>(null)
|
||||
const childSegmentListRef = useRef<HTMLDivElement>(null)
|
||||
const needScrollToBottom = useRef(false)
|
||||
const statusList = useRef<Item[]>([
|
||||
{ value: 'all', name: t('list.index.all', { ns: 'datasetDocuments' }) },
|
||||
{ value: 0, name: t('list.status.disabled', { ns: 'datasetDocuments' }) },
|
||||
{ value: 1, name: t('list.status.enabled', { ns: 'datasetDocuments' }) },
|
||||
])
|
||||
|
||||
// Modal state
|
||||
const modalState = useModalState({
|
||||
onNewSegmentModalChange,
|
||||
})
|
||||
const { run: handleSearch } = useDebounceFn(() => {
|
||||
setSearchValue(inputValue)
|
||||
setCurrentPage(1)
|
||||
}, { wait: 500 })
|
||||
|
||||
// Selection state (need segments first, so we use a placeholder initially)
|
||||
const [segmentsForSelection, setSegmentsForSelection] = useState<string[]>([])
|
||||
const handleInputChange = (value: string) => {
|
||||
setInputValue(value)
|
||||
handleSearch()
|
||||
}
|
||||
|
||||
// Invalidation hooks for child segment data
|
||||
const onChangeStatus = ({ value }: Item) => {
|
||||
setSelectedStatus(value === 'all' ? 'all' : !!value)
|
||||
setCurrentPage(1)
|
||||
}
|
||||
|
||||
const isFullDocMode = useMemo(() => {
|
||||
return docForm === ChunkingMode.parentChild && parentMode === 'full-doc'
|
||||
}, [docForm, parentMode])
|
||||
|
||||
const { isLoading: isLoadingSegmentList, data: segmentListData } = useSegmentList(
|
||||
{
|
||||
datasetId,
|
||||
documentId,
|
||||
params: {
|
||||
page: isFullDocMode ? 1 : currentPage,
|
||||
limit: isFullDocMode ? 10 : limit,
|
||||
keyword: isFullDocMode ? '' : searchValue,
|
||||
enabled: selectedStatus,
|
||||
},
|
||||
},
|
||||
)
|
||||
const invalidSegmentList = useInvalid(useSegmentListKey)
|
||||
|
||||
useEffect(() => {
|
||||
if (segmentListData) {
|
||||
setSegments(segmentListData.data || [])
|
||||
const totalPages = segmentListData.total_pages
|
||||
if (totalPages < currentPage)
|
||||
setCurrentPage(totalPages === 0 ? 1 : totalPages)
|
||||
}
|
||||
}, [segmentListData])
|
||||
|
||||
useEffect(() => {
|
||||
if (segmentListRef.current && needScrollToBottom.current) {
|
||||
segmentListRef.current.scrollTo({ top: segmentListRef.current.scrollHeight, behavior: 'smooth' })
|
||||
needScrollToBottom.current = false
|
||||
}
|
||||
}, [segments])
|
||||
|
||||
const { isLoading: isLoadingChildSegmentList, data: childChunkListData } = useChildSegmentList(
|
||||
{
|
||||
datasetId,
|
||||
documentId,
|
||||
segmentId: segments[0]?.id || '',
|
||||
params: {
|
||||
page: currentPage === 0 ? 1 : currentPage,
|
||||
limit,
|
||||
keyword: searchValue,
|
||||
},
|
||||
},
|
||||
!isFullDocMode || segments.length === 0,
|
||||
)
|
||||
const invalidChildSegmentList = useInvalid(useChildSegmentListKey)
|
||||
|
||||
useEffect(() => {
|
||||
if (childSegmentListRef.current && needScrollToBottom.current) {
|
||||
childSegmentListRef.current.scrollTo({ top: childSegmentListRef.current.scrollHeight, behavior: 'smooth' })
|
||||
needScrollToBottom.current = false
|
||||
}
|
||||
}, [childSegments])
|
||||
|
||||
useEffect(() => {
|
||||
if (childChunkListData) {
|
||||
setChildSegments(childChunkListData.data || [])
|
||||
const totalPages = childChunkListData.total_pages
|
||||
if (totalPages < currentPage)
|
||||
setCurrentPage(totalPages === 0 ? 1 : totalPages)
|
||||
}
|
||||
}, [childChunkListData])
|
||||
|
||||
const resetList = useCallback(() => {
|
||||
setSelectedSegmentIds([])
|
||||
invalidSegmentList()
|
||||
}, [invalidSegmentList])
|
||||
|
||||
const resetChildList = useCallback(() => {
|
||||
invalidChildSegmentList()
|
||||
}, [invalidChildSegmentList])
|
||||
|
||||
const onClickCard = (detail: SegmentDetailModel, isEditMode = false) => {
|
||||
setCurrSegment({ segInfo: detail, showModal: true, isEditMode })
|
||||
}
|
||||
|
||||
const onCloseSegmentDetail = useCallback(() => {
|
||||
setCurrSegment({ showModal: false })
|
||||
setFullScreen(false)
|
||||
}, [])
|
||||
|
||||
const onCloseNewSegmentModal = useCallback(() => {
|
||||
onNewSegmentModalChange(false)
|
||||
setFullScreen(false)
|
||||
}, [onNewSegmentModalChange])
|
||||
|
||||
const onCloseNewChildChunkModal = useCallback(() => {
|
||||
setShowNewChildSegmentModal(false)
|
||||
setFullScreen(false)
|
||||
}, [])
|
||||
|
||||
const { mutateAsync: enableSegment } = useEnableSegment()
|
||||
const { mutateAsync: disableSegment } = useDisableSegment()
|
||||
const invalidChunkListAll = useInvalid(useChunkListAllKey)
|
||||
const invalidChunkListEnabled = useInvalid(useChunkListEnabledKey)
|
||||
const invalidChunkListDisabled = useInvalid(useChunkListDisabledKey)
|
||||
|
||||
const refreshChunkListDataWithDetailChanged = useCallback(() => {
|
||||
const refreshMap: Record<string, () => void> = {
|
||||
all: () => {
|
||||
const refreshChunkListWithStatusChanged = useCallback(() => {
|
||||
switch (selectedStatus) {
|
||||
case 'all':
|
||||
invalidChunkListDisabled()
|
||||
invalidChunkListEnabled()
|
||||
},
|
||||
true: () => {
|
||||
invalidChunkListAll()
|
||||
invalidChunkListDisabled()
|
||||
},
|
||||
false: () => {
|
||||
invalidChunkListAll()
|
||||
invalidChunkListEnabled()
|
||||
},
|
||||
break
|
||||
default:
|
||||
invalidSegmentList()
|
||||
}
|
||||
refreshMap[String(searchFilter.selectedStatus)]?.()
|
||||
}, [searchFilter.selectedStatus, invalidChunkListDisabled, invalidChunkListEnabled, invalidChunkListAll])
|
||||
}, [selectedStatus, invalidChunkListDisabled, invalidChunkListEnabled, invalidSegmentList])
|
||||
|
||||
// Segment list data
|
||||
const segmentListDataHook = useSegmentListData({
|
||||
searchValue: searchFilter.searchValue,
|
||||
selectedStatus: searchFilter.selectedStatus,
|
||||
selectedSegmentIds: segmentsForSelection,
|
||||
importStatus,
|
||||
currentPage,
|
||||
limit,
|
||||
onCloseSegmentDetail: modalState.onCloseSegmentDetail,
|
||||
clearSelection: () => setSegmentsForSelection([]),
|
||||
})
|
||||
const onChangeSwitch = useCallback(async (enable: boolean, segId?: string) => {
|
||||
const operationApi = enable ? enableSegment : disableSegment
|
||||
await operationApi({ datasetId, documentId, segmentIds: segId ? [segId] : selectedSegmentIds }, {
|
||||
onSuccess: () => {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
for (const seg of segments) {
|
||||
if (segId ? seg.id === segId : selectedSegmentIds.includes(seg.id))
|
||||
seg.enabled = enable
|
||||
}
|
||||
setSegments([...segments])
|
||||
refreshChunkListWithStatusChanged()
|
||||
},
|
||||
onError: () => {
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
},
|
||||
})
|
||||
}, [datasetId, documentId, selectedSegmentIds, segments, disableSegment, enableSegment, t, notify, refreshChunkListWithStatusChanged])
|
||||
|
||||
// Selection state (with actual segments)
|
||||
const selectionState = useSegmentSelection(segmentListDataHook.segments)
|
||||
const { mutateAsync: deleteSegment } = useDeleteSegment()
|
||||
|
||||
// Sync selection state for segment list data hook
|
||||
useMemo(() => {
|
||||
setSegmentsForSelection(selectionState.selectedSegmentIds)
|
||||
}, [selectionState.selectedSegmentIds])
|
||||
const onDelete = useCallback(async (segId?: string) => {
|
||||
await deleteSegment({ datasetId, documentId, segmentIds: segId ? [segId] : selectedSegmentIds }, {
|
||||
onSuccess: () => {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
resetList()
|
||||
if (!segId)
|
||||
setSelectedSegmentIds([])
|
||||
},
|
||||
onError: () => {
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
},
|
||||
})
|
||||
}, [datasetId, documentId, selectedSegmentIds, deleteSegment, resetList, t, notify])
|
||||
|
||||
// Child segment data
|
||||
const childSegmentDataHook = useChildSegmentData({
|
||||
searchValue: searchFilter.searchValue,
|
||||
currentPage,
|
||||
limit,
|
||||
segments: segmentListDataHook.segments,
|
||||
currChunkId: modalState.currChunkId,
|
||||
isFullDocMode: segmentListDataHook.isFullDocMode,
|
||||
onCloseChildSegmentDetail: modalState.onCloseChildSegmentDetail,
|
||||
refreshChunkListDataWithDetailChanged,
|
||||
updateSegmentInCache: segmentListDataHook.updateSegmentInCache,
|
||||
})
|
||||
const { mutateAsync: updateSegment } = useUpdateSegment()
|
||||
|
||||
// Compute total for pagination
|
||||
const paginationTotal = useMemo(() => {
|
||||
if (segmentListDataHook.isFullDocMode)
|
||||
return childSegmentDataHook.childChunkListData?.total || 0
|
||||
return segmentListDataHook.segmentListData?.total || 0
|
||||
}, [segmentListDataHook.isFullDocMode, childSegmentDataHook.childChunkListData, segmentListDataHook.segmentListData])
|
||||
const refreshChunkListDataWithDetailChanged = useCallback(() => {
|
||||
switch (selectedStatus) {
|
||||
case 'all':
|
||||
invalidChunkListDisabled()
|
||||
invalidChunkListEnabled()
|
||||
break
|
||||
case true:
|
||||
invalidChunkListAll()
|
||||
invalidChunkListDisabled()
|
||||
break
|
||||
case false:
|
||||
invalidChunkListAll()
|
||||
invalidChunkListEnabled()
|
||||
break
|
||||
}
|
||||
}, [selectedStatus, invalidChunkListDisabled, invalidChunkListEnabled, invalidChunkListAll])
|
||||
|
||||
// Handle page change
|
||||
const handlePageChange = useCallback((page: number) => {
|
||||
setCurrentPage(page + 1)
|
||||
const handleUpdateSegment = useCallback(async (
|
||||
segmentId: string,
|
||||
question: string,
|
||||
answer: string,
|
||||
keywords: string[],
|
||||
attachments: FileEntity[],
|
||||
needRegenerate = false,
|
||||
) => {
|
||||
const params: SegmentUpdater = { content: '', attachment_ids: [] }
|
||||
if (docForm === ChunkingMode.qa) {
|
||||
if (!question.trim())
|
||||
return notify({ type: 'error', message: t('segment.questionEmpty', { ns: 'datasetDocuments' }) })
|
||||
if (!answer.trim())
|
||||
return notify({ type: 'error', message: t('segment.answerEmpty', { ns: 'datasetDocuments' }) })
|
||||
|
||||
params.content = question
|
||||
params.answer = answer
|
||||
}
|
||||
else {
|
||||
if (!question.trim())
|
||||
return notify({ type: 'error', message: t('segment.contentEmpty', { ns: 'datasetDocuments' }) })
|
||||
|
||||
params.content = question
|
||||
}
|
||||
|
||||
if (keywords.length)
|
||||
params.keywords = keywords
|
||||
|
||||
if (attachments.length) {
|
||||
const notAllUploaded = attachments.some(item => !item.uploadedId)
|
||||
if (notAllUploaded)
|
||||
return notify({ type: 'error', message: t('segment.allFilesUploaded', { ns: 'datasetDocuments' }) })
|
||||
params.attachment_ids = attachments.map(item => item.uploadedId!)
|
||||
}
|
||||
|
||||
if (needRegenerate)
|
||||
params.regenerate_child_chunks = needRegenerate
|
||||
|
||||
eventEmitter?.emit('update-segment')
|
||||
await updateSegment({ datasetId, documentId, segmentId, body: params }, {
|
||||
onSuccess(res) {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
if (!needRegenerate)
|
||||
onCloseSegmentDetail()
|
||||
for (const seg of segments) {
|
||||
if (seg.id === segmentId) {
|
||||
seg.answer = res.data.answer
|
||||
seg.content = res.data.content
|
||||
seg.sign_content = res.data.sign_content
|
||||
seg.keywords = res.data.keywords
|
||||
seg.attachments = res.data.attachments
|
||||
seg.word_count = res.data.word_count
|
||||
seg.hit_count = res.data.hit_count
|
||||
seg.enabled = res.data.enabled
|
||||
seg.updated_at = res.data.updated_at
|
||||
seg.child_chunks = res.data.child_chunks
|
||||
}
|
||||
}
|
||||
setSegments([...segments])
|
||||
refreshChunkListDataWithDetailChanged()
|
||||
eventEmitter?.emit('update-segment-success')
|
||||
},
|
||||
onSettled() {
|
||||
eventEmitter?.emit('update-segment-done')
|
||||
},
|
||||
})
|
||||
}, [segments, datasetId, documentId, updateSegment, docForm, notify, eventEmitter, onCloseSegmentDetail, refreshChunkListDataWithDetailChanged, t])
|
||||
|
||||
useEffect(() => {
|
||||
resetList()
|
||||
}, [pathname])
|
||||
|
||||
useEffect(() => {
|
||||
if (importStatus === ProcessStatus.COMPLETED)
|
||||
resetList()
|
||||
}, [importStatus])
|
||||
|
||||
const onCancelBatchOperation = useCallback(() => {
|
||||
setSelectedSegmentIds([])
|
||||
}, [])
|
||||
|
||||
// Context value
|
||||
const onSelected = useCallback((segId: string) => {
|
||||
setSelectedSegmentIds(prev =>
|
||||
prev.includes(segId)
|
||||
? prev.filter(id => id !== segId)
|
||||
: [...prev, segId],
|
||||
)
|
||||
}, [])
|
||||
|
||||
const isAllSelected = useMemo(() => {
|
||||
return segments.length > 0 && segments.every(seg => selectedSegmentIds.includes(seg.id))
|
||||
}, [segments, selectedSegmentIds])
|
||||
|
||||
const isSomeSelected = useMemo(() => {
|
||||
return segments.some(seg => selectedSegmentIds.includes(seg.id))
|
||||
}, [segments, selectedSegmentIds])
|
||||
|
||||
const onSelectedAll = useCallback(() => {
|
||||
setSelectedSegmentIds((prev) => {
|
||||
const currentAllSegIds = segments.map(seg => seg.id)
|
||||
const prevSelectedIds = prev.filter(item => !currentAllSegIds.includes(item))
|
||||
return [...prevSelectedIds, ...(isAllSelected ? [] : currentAllSegIds)]
|
||||
})
|
||||
}, [segments, isAllSelected])
|
||||
|
||||
const totalText = useMemo(() => {
|
||||
const isSearch = searchValue !== '' || selectedStatus !== 'all'
|
||||
if (!isSearch) {
|
||||
const total = segmentListData?.total ? formatNumber(segmentListData.total) : '--'
|
||||
const count = total === '--' ? 0 : segmentListData!.total
|
||||
const translationKey = (docForm === ChunkingMode.parentChild && parentMode === 'paragraph')
|
||||
? 'segment.parentChunks' as const
|
||||
: 'segment.chunks' as const
|
||||
return `${total} ${t(translationKey, { ns: 'datasetDocuments', count })}`
|
||||
}
|
||||
else {
|
||||
const total = typeof segmentListData?.total === 'number' ? formatNumber(segmentListData.total) : 0
|
||||
const count = segmentListData?.total || 0
|
||||
return `${total} ${t('segment.searchResults', { ns: 'datasetDocuments', count })}`
|
||||
}
|
||||
}, [segmentListData, docForm, parentMode, searchValue, selectedStatus, t])
|
||||
|
||||
const toggleFullScreen = useCallback(() => {
|
||||
setFullScreen(!fullScreen)
|
||||
}, [fullScreen])
|
||||
|
||||
const toggleCollapsed = useCallback(() => {
|
||||
setIsCollapsed(prev => !prev)
|
||||
}, [])
|
||||
|
||||
const viewNewlyAddedChunk = useCallback(async () => {
|
||||
const totalPages = segmentListData?.total_pages || 0
|
||||
const total = segmentListData?.total || 0
|
||||
const newPage = Math.ceil((total + 1) / limit)
|
||||
needScrollToBottom.current = true
|
||||
if (newPage > totalPages) {
|
||||
setCurrentPage(totalPages + 1)
|
||||
}
|
||||
else {
|
||||
resetList()
|
||||
if (currentPage !== totalPages)
|
||||
setCurrentPage(totalPages)
|
||||
}
|
||||
}, [segmentListData, limit, currentPage, resetList])
|
||||
|
||||
const { mutateAsync: deleteChildSegment } = useDeleteChildSegment()
|
||||
|
||||
const onDeleteChildChunk = useCallback(async (segmentId: string, childChunkId: string) => {
|
||||
await deleteChildSegment(
|
||||
{ datasetId, documentId, segmentId, childChunkId },
|
||||
{
|
||||
onSuccess: () => {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
if (parentMode === 'paragraph')
|
||||
resetList()
|
||||
else
|
||||
resetChildList()
|
||||
},
|
||||
onError: () => {
|
||||
notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
|
||||
},
|
||||
},
|
||||
)
|
||||
}, [datasetId, documentId, parentMode, deleteChildSegment, resetList, resetChildList, t, notify])
|
||||
|
||||
const handleAddNewChildChunk = useCallback((parentChunkId: string) => {
|
||||
setShowNewChildSegmentModal(true)
|
||||
setCurrChunkId(parentChunkId)
|
||||
}, [])
|
||||
|
||||
const onSaveNewChildChunk = useCallback((newChildChunk?: ChildChunkDetail) => {
|
||||
if (parentMode === 'paragraph') {
|
||||
for (const seg of segments) {
|
||||
if (seg.id === currChunkId)
|
||||
seg.child_chunks?.push(newChildChunk!)
|
||||
}
|
||||
setSegments([...segments])
|
||||
refreshChunkListDataWithDetailChanged()
|
||||
}
|
||||
else {
|
||||
resetChildList()
|
||||
}
|
||||
}, [parentMode, currChunkId, segments, refreshChunkListDataWithDetailChanged, resetChildList])
|
||||
|
||||
const viewNewlyAddedChildChunk = useCallback(() => {
|
||||
const totalPages = childChunkListData?.total_pages || 0
|
||||
const total = childChunkListData?.total || 0
|
||||
const newPage = Math.ceil((total + 1) / limit)
|
||||
needScrollToBottom.current = true
|
||||
if (newPage > totalPages) {
|
||||
setCurrentPage(totalPages + 1)
|
||||
}
|
||||
else {
|
||||
resetChildList()
|
||||
if (currentPage !== totalPages)
|
||||
setCurrentPage(totalPages)
|
||||
}
|
||||
}, [childChunkListData, limit, currentPage, resetChildList])
|
||||
|
||||
const onClickSlice = useCallback((detail: ChildChunkDetail) => {
|
||||
setCurrChildChunk({ childChunkInfo: detail, showModal: true })
|
||||
setCurrChunkId(detail.segment_id)
|
||||
}, [])
|
||||
|
||||
const onCloseChildSegmentDetail = useCallback(() => {
|
||||
setCurrChildChunk({ showModal: false })
|
||||
setFullScreen(false)
|
||||
}, [])
|
||||
|
||||
const { mutateAsync: updateChildSegment } = useUpdateChildSegment()
|
||||
|
||||
const handleUpdateChildChunk = useCallback(async (
|
||||
segmentId: string,
|
||||
childChunkId: string,
|
||||
content: string,
|
||||
) => {
|
||||
const params: SegmentUpdater = { content: '' }
|
||||
if (!content.trim())
|
||||
return notify({ type: 'error', message: t('segment.contentEmpty', { ns: 'datasetDocuments' }) })
|
||||
|
||||
params.content = content
|
||||
|
||||
eventEmitter?.emit('update-child-segment')
|
||||
await updateChildSegment({ datasetId, documentId, segmentId, childChunkId, body: params }, {
|
||||
onSuccess: (res) => {
|
||||
notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
|
||||
onCloseChildSegmentDetail()
|
||||
if (parentMode === 'paragraph') {
|
||||
for (const seg of segments) {
|
||||
if (seg.id === segmentId) {
|
||||
for (const childSeg of seg.child_chunks!) {
|
||||
if (childSeg.id === childChunkId) {
|
||||
childSeg.content = res.data.content
|
||||
childSeg.type = res.data.type
|
||||
childSeg.word_count = res.data.word_count
|
||||
childSeg.updated_at = res.data.updated_at
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
setSegments([...segments])
|
||||
refreshChunkListDataWithDetailChanged()
|
||||
}
|
||||
else {
|
||||
resetChildList()
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
eventEmitter?.emit('update-child-segment-done')
|
||||
},
|
||||
})
|
||||
}, [segments, datasetId, documentId, parentMode, updateChildSegment, notify, eventEmitter, onCloseChildSegmentDetail, refreshChunkListDataWithDetailChanged, resetChildList, t])
|
||||
|
||||
const onClearFilter = useCallback(() => {
|
||||
setInputValue('')
|
||||
setSearchValue('')
|
||||
setSelectedStatus('all')
|
||||
setCurrentPage(1)
|
||||
}, [])
|
||||
|
||||
const selectDefaultValue = useMemo(() => {
|
||||
if (selectedStatus === 'all')
|
||||
return 'all'
|
||||
return selectedStatus ? 1 : 0
|
||||
}, [selectedStatus])
|
||||
|
||||
const contextValue = useMemo<SegmentListContextValue>(() => ({
|
||||
isCollapsed: modalState.isCollapsed,
|
||||
fullScreen: modalState.fullScreen,
|
||||
toggleFullScreen: modalState.toggleFullScreen,
|
||||
currSegment: modalState.currSegment,
|
||||
currChildChunk: modalState.currChildChunk,
|
||||
}), [
|
||||
modalState.isCollapsed,
|
||||
modalState.fullScreen,
|
||||
modalState.toggleFullScreen,
|
||||
modalState.currSegment,
|
||||
modalState.currChildChunk,
|
||||
])
|
||||
isCollapsed,
|
||||
fullScreen,
|
||||
toggleFullScreen,
|
||||
currSegment,
|
||||
currChildChunk,
|
||||
}), [isCollapsed, fullScreen, toggleFullScreen, currSegment, currChildChunk])
|
||||
|
||||
return (
|
||||
<SegmentListContext.Provider value={contextValue}>
|
||||
{/* Menu Bar */}
|
||||
{!segmentListDataHook.isFullDocMode && (
|
||||
<MenuBar
|
||||
isAllSelected={selectionState.isAllSelected}
|
||||
isSomeSelected={selectionState.isSomeSelected}
|
||||
onSelectedAll={selectionState.onSelectedAll}
|
||||
isLoading={segmentListDataHook.isLoadingSegmentList}
|
||||
totalText={segmentListDataHook.totalText}
|
||||
statusList={searchFilter.statusList}
|
||||
selectDefaultValue={searchFilter.selectDefaultValue}
|
||||
onChangeStatus={searchFilter.onChangeStatus}
|
||||
inputValue={searchFilter.inputValue}
|
||||
onInputChange={searchFilter.handleInputChange}
|
||||
isCollapsed={modalState.isCollapsed}
|
||||
toggleCollapsed={modalState.toggleCollapsed}
|
||||
/>
|
||||
{!isFullDocMode && (
|
||||
<div className={s.docSearchWrapper}>
|
||||
<Checkbox
|
||||
className="shrink-0"
|
||||
checked={isAllSelected}
|
||||
indeterminate={!isAllSelected && isSomeSelected}
|
||||
onCheck={onSelectedAll}
|
||||
disabled={isLoadingSegmentList}
|
||||
/>
|
||||
<div className="system-sm-semibold-uppercase flex-1 pl-5 text-text-secondary">{totalText}</div>
|
||||
<SimpleSelect
|
||||
onSelect={onChangeStatus}
|
||||
items={statusList.current}
|
||||
defaultValue={selectDefaultValue}
|
||||
className={s.select}
|
||||
wrapperClassName="h-fit mr-2"
|
||||
optionWrapClassName="w-[160px]"
|
||||
optionClassName="p-0"
|
||||
renderOption={({ item, selected }) => <StatusItem item={item} selected={selected} />}
|
||||
notClearable
|
||||
/>
|
||||
<Input
|
||||
showLeftIcon
|
||||
showClearIcon
|
||||
wrapperClassName="!w-52"
|
||||
value={inputValue}
|
||||
onChange={e => handleInputChange(e.target.value)}
|
||||
onClear={() => handleInputChange('')}
|
||||
/>
|
||||
<Divider type="vertical" className="mx-3 h-3.5" />
|
||||
<DisplayToggle isCollapsed={isCollapsed} toggleCollapsed={toggleCollapsed} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Segment list */}
|
||||
{segmentListDataHook.isFullDocMode
|
||||
? (
|
||||
<FullDocModeContent
|
||||
segments={segmentListDataHook.segments}
|
||||
childSegments={childSegmentDataHook.childSegments}
|
||||
isLoadingSegmentList={segmentListDataHook.isLoadingSegmentList}
|
||||
isLoadingChildSegmentList={childSegmentDataHook.isLoadingChildSegmentList}
|
||||
currSegmentId={modalState.currSegment?.segInfo?.id}
|
||||
onClickCard={modalState.onClickCard}
|
||||
onDeleteChildChunk={childSegmentDataHook.onDeleteChildChunk}
|
||||
handleInputChange={searchFilter.handleInputChange}
|
||||
handleAddNewChildChunk={modalState.handleAddNewChildChunk}
|
||||
onClickSlice={modalState.onClickSlice}
|
||||
archived={archived}
|
||||
childChunkTotal={childSegmentDataHook.childChunkListData?.total || 0}
|
||||
inputValue={searchFilter.inputValue}
|
||||
onClearFilter={searchFilter.onClearFilter}
|
||||
/>
|
||||
)
|
||||
: (
|
||||
<GeneralModeContent
|
||||
segmentListRef={segmentListDataHook.segmentListRef}
|
||||
embeddingAvailable={embeddingAvailable}
|
||||
isLoadingSegmentList={segmentListDataHook.isLoadingSegmentList}
|
||||
segments={segmentListDataHook.segments}
|
||||
selectedSegmentIds={selectionState.selectedSegmentIds}
|
||||
onSelected={selectionState.onSelected}
|
||||
onChangeSwitch={segmentListDataHook.onChangeSwitch}
|
||||
onDelete={segmentListDataHook.onDelete}
|
||||
onClickCard={modalState.onClickCard}
|
||||
archived={archived}
|
||||
onDeleteChildChunk={childSegmentDataHook.onDeleteChildChunk}
|
||||
handleAddNewChildChunk={modalState.handleAddNewChildChunk}
|
||||
onClickSlice={modalState.onClickSlice}
|
||||
onClearFilter={searchFilter.onClearFilter}
|
||||
/>
|
||||
)}
|
||||
|
||||
{
|
||||
isFullDocMode
|
||||
? (
|
||||
<div className={cn(
|
||||
'flex grow flex-col overflow-x-hidden',
|
||||
(isLoadingSegmentList || isLoadingChildSegmentList) ? 'overflow-y-hidden' : 'overflow-y-auto',
|
||||
)}
|
||||
>
|
||||
<SegmentCard
|
||||
detail={segments[0]}
|
||||
onClick={() => onClickCard(segments[0])}
|
||||
loading={isLoadingSegmentList}
|
||||
focused={{
|
||||
segmentIndex: currSegment?.segInfo?.id === segments[0]?.id,
|
||||
segmentContent: currSegment?.segInfo?.id === segments[0]?.id,
|
||||
}}
|
||||
/>
|
||||
<ChildSegmentList
|
||||
parentChunkId={segments[0]?.id}
|
||||
onDelete={onDeleteChildChunk}
|
||||
childChunks={childSegments}
|
||||
handleInputChange={handleInputChange}
|
||||
handleAddNewChildChunk={handleAddNewChildChunk}
|
||||
onClickSlice={onClickSlice}
|
||||
enabled={!archived}
|
||||
total={childChunkListData?.total || 0}
|
||||
inputValue={inputValue}
|
||||
onClearFilter={onClearFilter}
|
||||
isLoading={isLoadingSegmentList || isLoadingChildSegmentList}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
: (
|
||||
<SegmentList
|
||||
ref={segmentListRef}
|
||||
embeddingAvailable={embeddingAvailable}
|
||||
isLoading={isLoadingSegmentList}
|
||||
items={segments}
|
||||
selectedSegmentIds={selectedSegmentIds}
|
||||
onSelected={onSelected}
|
||||
onChangeSwitch={onChangeSwitch}
|
||||
onDelete={onDelete}
|
||||
onClick={onClickCard}
|
||||
archived={archived}
|
||||
onDeleteChildChunk={onDeleteChildChunk}
|
||||
handleAddNewChildChunk={handleAddNewChildChunk}
|
||||
onClickSlice={onClickSlice}
|
||||
onClearFilter={onClearFilter}
|
||||
/>
|
||||
)
|
||||
}
|
||||
{/* Pagination */}
|
||||
<Divider type="horizontal" className="mx-6 my-0 h-px w-auto bg-divider-subtle" />
|
||||
<Pagination
|
||||
current={currentPage - 1}
|
||||
onChange={handlePageChange}
|
||||
total={paginationTotal}
|
||||
onChange={cur => setCurrentPage(cur + 1)}
|
||||
total={(isFullDocMode ? childChunkListData?.total : segmentListData?.total) || 0}
|
||||
limit={limit}
|
||||
onLimitChange={setLimit}
|
||||
className={segmentListDataHook.isFullDocMode ? 'px-3' : ''}
|
||||
onLimitChange={limit => setLimit(limit)}
|
||||
className={isFullDocMode ? 'px-3' : ''}
|
||||
/>
|
||||
|
||||
{/* Drawer Group - only render when docForm is available */}
|
||||
{docForm && (
|
||||
<DrawerGroup
|
||||
currSegment={modalState.currSegment}
|
||||
onCloseSegmentDetail={modalState.onCloseSegmentDetail}
|
||||
onUpdateSegment={segmentListDataHook.handleUpdateSegment}
|
||||
isRegenerationModalOpen={modalState.isRegenerationModalOpen}
|
||||
setIsRegenerationModalOpen={modalState.setIsRegenerationModalOpen}
|
||||
showNewSegmentModal={showNewSegmentModal}
|
||||
onCloseNewSegmentModal={modalState.onCloseNewSegmentModal}
|
||||
onSaveNewSegment={segmentListDataHook.resetList}
|
||||
viewNewlyAddedChunk={segmentListDataHook.viewNewlyAddedChunk}
|
||||
currChildChunk={modalState.currChildChunk}
|
||||
currChunkId={modalState.currChunkId}
|
||||
onCloseChildSegmentDetail={modalState.onCloseChildSegmentDetail}
|
||||
onUpdateChildChunk={childSegmentDataHook.handleUpdateChildChunk}
|
||||
showNewChildSegmentModal={modalState.showNewChildSegmentModal}
|
||||
onCloseNewChildChunkModal={modalState.onCloseNewChildChunkModal}
|
||||
onSaveNewChildChunk={childSegmentDataHook.onSaveNewChildChunk}
|
||||
viewNewlyAddedChildChunk={childSegmentDataHook.viewNewlyAddedChildChunk}
|
||||
fullScreen={modalState.fullScreen}
|
||||
{/* Edit or view segment detail */}
|
||||
<FullScreenDrawer
|
||||
isOpen={currSegment.showModal}
|
||||
fullScreen={fullScreen}
|
||||
onClose={onCloseSegmentDetail}
|
||||
showOverlay={false}
|
||||
needCheckChunks
|
||||
modal={isRegenerationModalOpen}
|
||||
>
|
||||
<SegmentDetail
|
||||
key={currSegment.segInfo?.id}
|
||||
segInfo={currSegment.segInfo ?? { id: '' }}
|
||||
docForm={docForm}
|
||||
isEditMode={currSegment.isEditMode}
|
||||
onUpdate={handleUpdateSegment}
|
||||
onCancel={onCloseSegmentDetail}
|
||||
onModalStateChange={setIsRegenerationModalOpen}
|
||||
/>
|
||||
)}
|
||||
|
||||
</FullScreenDrawer>
|
||||
{/* Create New Segment */}
|
||||
<FullScreenDrawer
|
||||
isOpen={showNewSegmentModal}
|
||||
fullScreen={fullScreen}
|
||||
onClose={onCloseNewSegmentModal}
|
||||
modal
|
||||
>
|
||||
<NewSegment
|
||||
docForm={docForm}
|
||||
onCancel={onCloseNewSegmentModal}
|
||||
onSave={resetList}
|
||||
viewNewlyAddedChunk={viewNewlyAddedChunk}
|
||||
/>
|
||||
</FullScreenDrawer>
|
||||
{/* Edit or view child segment detail */}
|
||||
<FullScreenDrawer
|
||||
isOpen={currChildChunk.showModal}
|
||||
fullScreen={fullScreen}
|
||||
onClose={onCloseChildSegmentDetail}
|
||||
showOverlay={false}
|
||||
needCheckChunks
|
||||
>
|
||||
<ChildSegmentDetail
|
||||
key={currChildChunk.childChunkInfo?.id}
|
||||
chunkId={currChunkId}
|
||||
childChunkInfo={currChildChunk.childChunkInfo ?? { id: '' }}
|
||||
docForm={docForm}
|
||||
onUpdate={handleUpdateChildChunk}
|
||||
onCancel={onCloseChildSegmentDetail}
|
||||
/>
|
||||
</FullScreenDrawer>
|
||||
{/* Create New Child Segment */}
|
||||
<FullScreenDrawer
|
||||
isOpen={showNewChildSegmentModal}
|
||||
fullScreen={fullScreen}
|
||||
onClose={onCloseNewChildChunkModal}
|
||||
modal
|
||||
>
|
||||
<NewChildSegment
|
||||
chunkId={currChunkId}
|
||||
onCancel={onCloseNewChildChunkModal}
|
||||
onSave={onSaveNewChildChunk}
|
||||
viewNewlyAddedChildChunk={viewNewlyAddedChildChunk}
|
||||
/>
|
||||
</FullScreenDrawer>
|
||||
{/* Batch Action Buttons */}
|
||||
{selectionState.selectedSegmentIds.length > 0 && (
|
||||
{selectedSegmentIds.length > 0 && (
|
||||
<BatchAction
|
||||
className="absolute bottom-16 left-0 z-20"
|
||||
selectedIds={selectionState.selectedSegmentIds}
|
||||
onBatchEnable={() => segmentListDataHook.onChangeSwitch(true, '')}
|
||||
onBatchDisable={() => segmentListDataHook.onChangeSwitch(false, '')}
|
||||
onBatchDelete={() => segmentListDataHook.onDelete('')}
|
||||
onCancel={selectionState.onCancelBatchOperation}
|
||||
selectedIds={selectedSegmentIds}
|
||||
onBatchEnable={onChangeSwitch.bind(null, true, '')}
|
||||
onBatchDisable={onChangeSwitch.bind(null, false, '')}
|
||||
onBatchDelete={onDelete.bind(null, '')}
|
||||
onCancel={onCancelBatchOperation}
|
||||
/>
|
||||
)}
|
||||
</SegmentListContext.Provider>
|
||||
)
|
||||
}
|
||||
|
||||
export { useSegmentListContext }
|
||||
export type { SegmentListContextValue }
|
||||
|
||||
export default Completed
|
||||
|
||||
@ -1,34 +0,0 @@
|
||||
import type { ChildChunkDetail, SegmentDetailModel } from '@/models/datasets'
|
||||
import { noop } from 'es-toolkit/function'
|
||||
import { createContext, useContextSelector } from 'use-context-selector'
|
||||
|
||||
export type CurrSegmentType = {
|
||||
segInfo?: SegmentDetailModel
|
||||
showModal: boolean
|
||||
isEditMode?: boolean
|
||||
}
|
||||
|
||||
export type CurrChildChunkType = {
|
||||
childChunkInfo?: ChildChunkDetail
|
||||
showModal: boolean
|
||||
}
|
||||
|
||||
export type SegmentListContextValue = {
|
||||
isCollapsed: boolean
|
||||
fullScreen: boolean
|
||||
toggleFullScreen: () => void
|
||||
currSegment: CurrSegmentType
|
||||
currChildChunk: CurrChildChunkType
|
||||
}
|
||||
|
||||
export const SegmentListContext = createContext<SegmentListContextValue>({
|
||||
isCollapsed: true,
|
||||
fullScreen: false,
|
||||
toggleFullScreen: noop,
|
||||
currSegment: { showModal: false },
|
||||
currChildChunk: { showModal: false },
|
||||
})
|
||||
|
||||
export const useSegmentListContext = <T>(selector: (value: SegmentListContextValue) => T): T => {
|
||||
return useContextSelector(SegmentListContext, selector)
|
||||
}
|
||||
@ -1,93 +0,0 @@
|
||||
import { render } from '@testing-library/react'
|
||||
import FullDocListSkeleton from './full-doc-list-skeleton'
|
||||
|
||||
describe('FullDocListSkeleton', () => {
|
||||
describe('Rendering', () => {
|
||||
it('should render the skeleton container', () => {
|
||||
const { container } = render(<FullDocListSkeleton />)
|
||||
|
||||
const skeletonContainer = container.firstChild
|
||||
expect(skeletonContainer).toHaveClass('flex', 'w-full', 'grow', 'flex-col')
|
||||
})
|
||||
|
||||
it('should render 15 Slice components', () => {
|
||||
const { container } = render(<FullDocListSkeleton />)
|
||||
|
||||
// Each Slice has a specific structure with gap-y-1
|
||||
const slices = container.querySelectorAll('.gap-y-1')
|
||||
expect(slices.length).toBe(15)
|
||||
})
|
||||
|
||||
it('should render mask overlay', () => {
|
||||
const { container } = render(<FullDocListSkeleton />)
|
||||
|
||||
const maskOverlay = container.querySelector('.bg-dataset-chunk-list-mask-bg')
|
||||
expect(maskOverlay).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should have overflow hidden', () => {
|
||||
const { container } = render(<FullDocListSkeleton />)
|
||||
|
||||
const skeletonContainer = container.firstChild
|
||||
expect(skeletonContainer).toHaveClass('overflow-y-hidden')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Slice Component', () => {
|
||||
it('should render slice with correct structure', () => {
|
||||
const { container } = render(<FullDocListSkeleton />)
|
||||
|
||||
// Each slice has two rows
|
||||
const sliceRows = container.querySelectorAll('.bg-state-base-hover')
|
||||
expect(sliceRows.length).toBeGreaterThan(0)
|
||||
})
|
||||
|
||||
it('should render label placeholder in each slice', () => {
|
||||
const { container } = render(<FullDocListSkeleton />)
|
||||
|
||||
// Label placeholder has specific width
|
||||
const labelPlaceholders = container.querySelectorAll('.w-\\[30px\\]')
|
||||
expect(labelPlaceholders.length).toBe(15) // One per slice
|
||||
})
|
||||
|
||||
it('should render content placeholder in each slice', () => {
|
||||
const { container } = render(<FullDocListSkeleton />)
|
||||
|
||||
// Content placeholder has 2/3 width
|
||||
const contentPlaceholders = container.querySelectorAll('.w-2\\/3')
|
||||
expect(contentPlaceholders.length).toBe(15) // One per slice
|
||||
})
|
||||
})
|
||||
|
||||
describe('Memoization', () => {
|
||||
it('should be memoized', () => {
|
||||
const { rerender, container } = render(<FullDocListSkeleton />)
|
||||
|
||||
const initialContent = container.innerHTML
|
||||
|
||||
// Rerender should produce same output
|
||||
rerender(<FullDocListSkeleton />)
|
||||
|
||||
expect(container.innerHTML).toBe(initialContent)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Styling', () => {
|
||||
it('should have correct z-index layering', () => {
|
||||
const { container } = render(<FullDocListSkeleton />)
|
||||
|
||||
const skeletonContainer = container.firstChild
|
||||
expect(skeletonContainer).toHaveClass('z-10')
|
||||
|
||||
const maskOverlay = container.querySelector('.z-20')
|
||||
expect(maskOverlay).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('should have gap between slices', () => {
|
||||
const { container } = render(<FullDocListSkeleton />)
|
||||
|
||||
const skeletonContainer = container.firstChild
|
||||
expect(skeletonContainer).toHaveClass('gap-y-3')
|
||||
})
|
||||
})
|
||||
})
|
||||
@ -1,43 +1,34 @@
|
||||
import type { ComponentType, FC } from 'react'
|
||||
import type { FC } from 'react'
|
||||
import type { ModelProvider } from '../declarations'
|
||||
import type { Plugin } from '@/app/components/plugins/types'
|
||||
import { useBoolean } from 'ahooks'
|
||||
import * as React from 'react'
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { useTranslation } from 'react-i18next'
|
||||
import { AnthropicShortLight, Deepseek, Gemini, Grok, OpenaiSmall, Tongyi } from '@/app/components/base/icons/src/public/llm'
|
||||
import { OpenaiSmall } from '@/app/components/base/icons/src/public/llm'
|
||||
import Loading from '@/app/components/base/loading'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import InstallFromMarketplace from '@/app/components/plugins/install-plugin/install-from-marketplace'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useGlobalPublicStore } from '@/context/global-public-context'
|
||||
import useTimestamp from '@/hooks/use-timestamp'
|
||||
import { ModelProviderQuotaGetPaid } from '@/types/model-provider'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { formatNumber } from '@/utils/format'
|
||||
import { PreferredProviderTypeEnum } from '../declarations'
|
||||
import { useMarketplaceAllPlugins } from '../hooks'
|
||||
import { MODEL_PROVIDER_QUOTA_GET_PAID, modelNameMap } from '../utils'
|
||||
import { modelNameMap, ModelProviderQuotaGetPaid } from '../utils'
|
||||
|
||||
// Icon map for each provider - single source of truth for provider icons
|
||||
const providerIconMap: Record<ModelProviderQuotaGetPaid, ComponentType<{ className?: string }>> = {
|
||||
[ModelProviderQuotaGetPaid.OPENAI]: OpenaiSmall,
|
||||
[ModelProviderQuotaGetPaid.ANTHROPIC]: AnthropicShortLight,
|
||||
[ModelProviderQuotaGetPaid.GEMINI]: Gemini,
|
||||
[ModelProviderQuotaGetPaid.X]: Grok,
|
||||
[ModelProviderQuotaGetPaid.DEEPSEEK]: Deepseek,
|
||||
[ModelProviderQuotaGetPaid.TONGYI]: Tongyi,
|
||||
}
|
||||
|
||||
// Derive allProviders from the shared constant
|
||||
const allProviders = MODEL_PROVIDER_QUOTA_GET_PAID.map(key => ({
|
||||
key,
|
||||
Icon: providerIconMap[key],
|
||||
}))
|
||||
const allProviders = [
|
||||
{ key: ModelProviderQuotaGetPaid.OPENAI, Icon: OpenaiSmall },
|
||||
// { key: ModelProviderQuotaGetPaid.ANTHROPIC, Icon: AnthropicShortLight },
|
||||
// { key: ModelProviderQuotaGetPaid.GEMINI, Icon: Gemini },
|
||||
// { key: ModelProviderQuotaGetPaid.X, Icon: Grok },
|
||||
// { key: ModelProviderQuotaGetPaid.DEEPSEEK, Icon: Deepseek },
|
||||
// { key: ModelProviderQuotaGetPaid.TONGYI, Icon: Tongyi },
|
||||
] as const
|
||||
|
||||
// Map provider key to plugin ID
|
||||
// provider key format: langgenius/provider/model, plugin ID format: langgenius/provider
|
||||
const providerKeyToPluginId: Record<ModelProviderQuotaGetPaid, string> = {
|
||||
const providerKeyToPluginId: Record<string, string> = {
|
||||
[ModelProviderQuotaGetPaid.OPENAI]: 'langgenius/openai',
|
||||
[ModelProviderQuotaGetPaid.ANTHROPIC]: 'langgenius/anthropic',
|
||||
[ModelProviderQuotaGetPaid.GEMINI]: 'langgenius/gemini',
|
||||
@ -56,7 +47,6 @@ const QuotaPanel: FC<QuotaPanelProps> = ({
|
||||
}) => {
|
||||
const { t } = useTranslation()
|
||||
const { currentWorkspace } = useAppContext()
|
||||
const { trial_models } = useGlobalPublicStore(s => s.systemFeatures)
|
||||
const credits = Math.max((currentWorkspace.trial_credits - currentWorkspace.trial_credits_used) || 0, 0)
|
||||
const providerMap = useMemo(() => new Map(
|
||||
providers.map(p => [p.provider, p.preferred_provider_type]),
|
||||
@ -72,7 +62,7 @@ const QuotaPanel: FC<QuotaPanelProps> = ({
|
||||
}] = useBoolean(false)
|
||||
const selectedPluginIdRef = useRef<string | null>(null)
|
||||
|
||||
const handleIconClick = useCallback((key: ModelProviderQuotaGetPaid) => {
|
||||
const handleIconClick = useCallback((key: string) => {
|
||||
const providerType = providerMap.get(key)
|
||||
if (!providerType && allPlugins) {
|
||||
const pluginId = providerKeyToPluginId[key]
|
||||
@ -107,7 +97,7 @@ const QuotaPanel: FC<QuotaPanelProps> = ({
|
||||
<div className={cn('my-2 min-w-[72px] shrink-0 rounded-xl border-[0.5px] pb-2.5 pl-4 pr-2.5 pt-3 shadow-xs', credits <= 0 ? 'border-state-destructive-border hover:bg-state-destructive-hover' : 'border-components-panel-border bg-third-party-model-bg-default')}>
|
||||
<div className="system-xs-medium-uppercase mb-2 flex h-4 items-center text-text-tertiary">
|
||||
{t('modelProvider.quota', { ns: 'common' })}
|
||||
<Tooltip popupContent={t('modelProvider.card.tip', { ns: 'common', modelNames: trial_models.map(key => modelNameMap[key as keyof typeof modelNameMap]).filter(Boolean).join(', ') })} />
|
||||
<Tooltip popupContent={t('modelProvider.card.tip', { ns: 'common' })} />
|
||||
</div>
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-1 text-xs text-text-tertiary">
|
||||
@ -129,7 +119,7 @@ const QuotaPanel: FC<QuotaPanelProps> = ({
|
||||
: null}
|
||||
</div>
|
||||
<div className="flex items-center gap-1">
|
||||
{allProviders.filter(({ key }) => trial_models.includes(key)).map(({ key, Icon }) => {
|
||||
{allProviders.map(({ key, Icon }) => {
|
||||
const providerType = providerMap.get(key)
|
||||
const usingQuota = providerType === PreferredProviderTypeEnum.system
|
||||
const getTooltipKey = () => {
|
||||
|
||||
@ -144,7 +144,7 @@ const SystemModel: FC<SystemModelSelectorProps> = ({
|
||||
{t('modelProvider.systemModelSettings', { ns: 'common' })}
|
||||
</Button>
|
||||
</PortalToFollowElemTrigger>
|
||||
<PortalToFollowElemContent className="z-[60]">
|
||||
<PortalToFollowElemContent className="z-[75]">
|
||||
<div className="w-[360px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg pt-4 shadow-xl">
|
||||
<div className="px-6 py-1">
|
||||
<div className="flex h-8 items-center text-[13px] font-medium text-text-primary">
|
||||
|
||||
@ -1,5 +1,4 @@
|
||||
import type {
|
||||
CredentialFormSchemaSelect,
|
||||
CredentialFormSchemaTextInput,
|
||||
FormValue,
|
||||
ModelLoadBalancingConfig,
|
||||
@ -10,7 +9,6 @@ import {
|
||||
validateModelLoadBalancingCredentials,
|
||||
validateModelProvider,
|
||||
} from '@/service/common'
|
||||
import { ModelProviderQuotaGetPaid } from '@/types/model-provider'
|
||||
import { ValidatedStatus } from '../key-validator/declarations'
|
||||
import {
|
||||
ConfigurationMethodEnum,
|
||||
@ -19,8 +17,15 @@ import {
|
||||
ModelTypeEnum,
|
||||
} from './declarations'
|
||||
|
||||
export { ModelProviderQuotaGetPaid } from '@/types/model-provider'
|
||||
|
||||
export enum ModelProviderQuotaGetPaid {
|
||||
ANTHROPIC = 'langgenius/anthropic/anthropic',
|
||||
OPENAI = 'langgenius/openai/openai',
|
||||
// AZURE_OPENAI = 'langgenius/azure_openai/azure_openai',
|
||||
GEMINI = 'langgenius/gemini/google',
|
||||
X = 'langgenius/x/x',
|
||||
DEEPSEEK = 'langgenius/deepseek/deepseek',
|
||||
TONGYI = 'langgenius/tongyi/tongyi',
|
||||
}
|
||||
export const MODEL_PROVIDER_QUOTA_GET_PAID = [ModelProviderQuotaGetPaid.ANTHROPIC, ModelProviderQuotaGetPaid.OPENAI, ModelProviderQuotaGetPaid.GEMINI, ModelProviderQuotaGetPaid.X, ModelProviderQuotaGetPaid.DEEPSEEK, ModelProviderQuotaGetPaid.TONGYI]
|
||||
|
||||
export const modelNameMap = {
|
||||
@ -32,7 +37,7 @@ export const modelNameMap = {
|
||||
[ModelProviderQuotaGetPaid.TONGYI]: 'Tongyi',
|
||||
}
|
||||
|
||||
export const isNullOrUndefined = (value: unknown): value is null | undefined => {
|
||||
export const isNullOrUndefined = (value: any) => {
|
||||
return value === undefined || value === null
|
||||
}
|
||||
|
||||
@ -61,9 +66,8 @@ export const validateCredentials = async (predefined: boolean, provider: string,
|
||||
else
|
||||
return Promise.resolve({ status: ValidatedStatus.Error, message: res.error || 'error' })
|
||||
}
|
||||
catch (e: unknown) {
|
||||
const message = e instanceof Error ? e.message : 'Unknown error'
|
||||
return Promise.resolve({ status: ValidatedStatus.Error, message })
|
||||
catch (e: any) {
|
||||
return Promise.resolve({ status: ValidatedStatus.Error, message: e.message })
|
||||
}
|
||||
}
|
||||
|
||||
@ -86,9 +90,8 @@ export const validateLoadBalancingCredentials = async (predefined: boolean, prov
|
||||
else
|
||||
return Promise.resolve({ status: ValidatedStatus.Error, message: res.error || 'error' })
|
||||
}
|
||||
catch (e: unknown) {
|
||||
const message = e instanceof Error ? e.message : 'Unknown error'
|
||||
return Promise.resolve({ status: ValidatedStatus.Error, message })
|
||||
catch (e: any) {
|
||||
return Promise.resolve({ status: ValidatedStatus.Error, message: e.message })
|
||||
}
|
||||
}
|
||||
|
||||
@ -174,7 +177,7 @@ export const modelTypeFormat = (modelType: ModelTypeEnum) => {
|
||||
return modelType.toLocaleUpperCase()
|
||||
}
|
||||
|
||||
export const genModelTypeFormSchema = (modelTypes: ModelTypeEnum[]): Omit<CredentialFormSchemaSelect, 'name'> => {
|
||||
export const genModelTypeFormSchema = (modelTypes: ModelTypeEnum[]) => {
|
||||
return {
|
||||
type: FormTypeEnum.select,
|
||||
label: {
|
||||
@ -195,10 +198,10 @@ export const genModelTypeFormSchema = (modelTypes: ModelTypeEnum[]): Omit<Creden
|
||||
show_on: [],
|
||||
}
|
||||
}),
|
||||
}
|
||||
} as any
|
||||
}
|
||||
|
||||
export const genModelNameFormSchema = (model?: Pick<CredentialFormSchemaTextInput, 'label' | 'placeholder'>): Omit<CredentialFormSchemaTextInput, 'name'> => {
|
||||
export const genModelNameFormSchema = (model?: Pick<CredentialFormSchemaTextInput, 'label' | 'placeholder'>) => {
|
||||
return {
|
||||
type: FormTypeEnum.textInput,
|
||||
label: model?.label || {
|
||||
@ -212,5 +215,5 @@ export const genModelNameFormSchema = (model?: Pick<CredentialFormSchemaTextInpu
|
||||
zh_Hans: '请输入模型名称',
|
||||
en_US: 'Please enter model name',
|
||||
},
|
||||
}
|
||||
} as any
|
||||
}
|
||||
|
||||
@ -76,7 +76,7 @@ const ActionList = ({
|
||||
className='w-full'
|
||||
onClick={() => setShowSettingAuth(true)}
|
||||
disabled={!isCurrentWorkspaceManager}
|
||||
>{t('workflow.nodes.tool.authorize')}</Button>
|
||||
>{t('nodes.tool.authorize', { ns: 'workflow' })}</Button>
|
||||
)} */}
|
||||
</div>
|
||||
{/* <div className='flex flex-col gap-2'>
|
||||
|
||||
@ -23,7 +23,7 @@ export const useAvailableNodesMetaData = () => {
|
||||
},
|
||||
knowledgeBaseDefault,
|
||||
dataSourceEmptyDefault,
|
||||
], [])
|
||||
] as AvailableNodesMetaData['nodes'], [])
|
||||
|
||||
const helpLinkUri = useMemo(() => docLink(
|
||||
'/use-dify/knowledge/knowledge-pipeline/knowledge-pipeline-orchestration',
|
||||
@ -47,7 +47,7 @@ export const useAvailableNodesMetaData = () => {
|
||||
title,
|
||||
},
|
||||
}
|
||||
}), [mergedNodesMetaData, t])
|
||||
}) as AvailableNodesMetaData['nodes'], [mergedNodesMetaData, t])
|
||||
|
||||
const availableNodesMetaDataMap = useMemo(() => availableNodesMetaData.reduce((acc, node) => {
|
||||
acc![node.metaData.type] = node
|
||||
|
||||
@ -3,8 +3,14 @@ import { useWorkflowStore } from '@/app/components/workflow/store'
|
||||
|
||||
export const useGetRunAndTraceUrl = () => {
|
||||
const workflowStore = useWorkflowStore()
|
||||
const getWorkflowRunAndTraceUrl = useCallback((runId: string) => {
|
||||
const getWorkflowRunAndTraceUrl = useCallback((runId?: string) => {
|
||||
const { pipelineId } = workflowStore.getState()
|
||||
if (!pipelineId || !runId) {
|
||||
return {
|
||||
runUrl: '',
|
||||
traceUrl: '',
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
runUrl: `/rag/pipelines/${pipelineId}/workflow-runs/${runId}`,
|
||||
|
||||
@ -10,6 +10,7 @@ import Divider from '@/app/components/base/divider'
|
||||
import Modal from '@/app/components/base/modal'
|
||||
import Textarea from '@/app/components/base/textarea'
|
||||
import MCPServerParamItem from '@/app/components/tools/mcp/mcp-server-param-item'
|
||||
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
|
||||
import {
|
||||
useCreateMCPServer,
|
||||
useInvalidateMCPServerDetail,
|
||||
@ -59,6 +60,22 @@ const MCPServerModal = ({
|
||||
return res
|
||||
}
|
||||
|
||||
const emitMcpServerUpdate = (action: 'created' | 'updated') => {
|
||||
const socket = webSocketClient.getSocket(appID)
|
||||
if (!socket)
|
||||
return
|
||||
|
||||
const timestamp = Date.now()
|
||||
socket.emit('collaboration_event', {
|
||||
type: 'mcp_server_update',
|
||||
data: {
|
||||
action,
|
||||
timestamp,
|
||||
},
|
||||
timestamp,
|
||||
})
|
||||
}
|
||||
|
||||
const submit = async () => {
|
||||
if (!data) {
|
||||
const payload: any = {
|
||||
@ -71,6 +88,7 @@ const MCPServerModal = ({
|
||||
|
||||
await createMCPServer(payload)
|
||||
invalidateMCPServerDetail(appID)
|
||||
emitMcpServerUpdate('created')
|
||||
onHide()
|
||||
}
|
||||
else {
|
||||
@ -83,6 +101,7 @@ const MCPServerModal = ({
|
||||
payload.description = description
|
||||
await updateMCPServer(payload)
|
||||
invalidateMCPServerDetail(appID)
|
||||
emitMcpServerUpdate('updated')
|
||||
onHide()
|
||||
}
|
||||
}
|
||||
@ -92,6 +111,7 @@ const MCPServerModal = ({
|
||||
isShow={show}
|
||||
onClose={onHide}
|
||||
className={cn('relative !max-w-[520px] !p-0')}
|
||||
highPriority
|
||||
>
|
||||
<div className="absolute right-5 top-5 z-10 cursor-pointer p-1.5" onClick={onHide}>
|
||||
<RiCloseLine className="h-5 w-5 text-text-tertiary" />
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
'use client'
|
||||
import type { CollaborationUpdate } from '@/app/components/workflow/collaboration/types/collaboration'
|
||||
import type { InputVar } from '@/app/components/workflow/types'
|
||||
import type { AppDetailResponse } from '@/models/app'
|
||||
import type { AppSSO } from '@/types/app'
|
||||
import type { AppSSO, ModelConfig, UserInputFormItem } from '@/types/app'
|
||||
import { RiEditLine, RiLoopLeftLine } from '@remixicon/react'
|
||||
import * as React from 'react'
|
||||
import { useEffect, useMemo, useState } from 'react'
|
||||
@ -16,6 +18,8 @@ import Switch from '@/app/components/base/switch'
|
||||
import Tooltip from '@/app/components/base/tooltip'
|
||||
import Indicator from '@/app/components/header/indicator'
|
||||
import MCPServerModal from '@/app/components/tools/mcp/mcp-server-modal'
|
||||
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
|
||||
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
|
||||
import { BlockEnum } from '@/app/components/workflow/types'
|
||||
import { useAppContext } from '@/context/app-context'
|
||||
import { useDocLink } from '@/context/i18n'
|
||||
@ -36,6 +40,16 @@ export type IAppCardProps = {
|
||||
triggerModeMessage?: React.ReactNode // display-only message explaining the trigger restriction
|
||||
}
|
||||
|
||||
type BasicAppConfig = Partial<ModelConfig> & {
|
||||
updated_at?: number
|
||||
}
|
||||
|
||||
type McpServerParam = {
|
||||
label: string
|
||||
variable: string
|
||||
type: string
|
||||
}
|
||||
|
||||
function MCPServiceCard({
|
||||
appInfo,
|
||||
triggerModeDisabled = false,
|
||||
@ -54,16 +68,16 @@ function MCPServiceCard({
|
||||
const isAdvancedApp = appInfo?.mode === AppModeEnum.ADVANCED_CHAT || appInfo?.mode === AppModeEnum.WORKFLOW
|
||||
const isBasicApp = !isAdvancedApp
|
||||
const { data: currentWorkflow } = useAppWorkflow(isAdvancedApp ? appId : '')
|
||||
const [basicAppConfig, setBasicAppConfig] = useState<any>({})
|
||||
const basicAppInputForm = useMemo(() => {
|
||||
if (!isBasicApp || !basicAppConfig?.user_input_form)
|
||||
const [basicAppConfig, setBasicAppConfig] = useState<BasicAppConfig>({})
|
||||
const basicAppInputForm = useMemo<McpServerParam[]>(() => {
|
||||
if (!isBasicApp || !basicAppConfig.user_input_form)
|
||||
return []
|
||||
return basicAppConfig.user_input_form.map((item: any) => {
|
||||
const type = Object.keys(item)[0]
|
||||
return {
|
||||
...item[type],
|
||||
type: type || 'text-input',
|
||||
}
|
||||
return basicAppConfig.user_input_form.map((item: UserInputFormItem) => {
|
||||
if ('text-input' in item)
|
||||
return { label: item['text-input'].label, variable: item['text-input'].variable, type: 'text-input' }
|
||||
if ('select' in item)
|
||||
return { label: item.select.label, variable: item.select.variable, type: 'select' }
|
||||
return { label: item.paragraph.label, variable: item.paragraph.variable, type: 'paragraph' }
|
||||
})
|
||||
}, [basicAppConfig.user_input_form, isBasicApp])
|
||||
useEffect(() => {
|
||||
@ -90,12 +104,22 @@ function MCPServiceCard({
|
||||
|
||||
const [activated, setActivated] = useState(serverActivated)
|
||||
|
||||
const latestParams = useMemo(() => {
|
||||
const latestParams = useMemo<McpServerParam[]>(() => {
|
||||
if (isAdvancedApp) {
|
||||
if (!currentWorkflow?.graph)
|
||||
return []
|
||||
const startNode = currentWorkflow?.graph.nodes.find(node => node.data.type === BlockEnum.Start) as any
|
||||
return startNode?.data.variables as any[] || []
|
||||
const startNode = currentWorkflow?.graph.nodes.find(node => node.data.type === BlockEnum.Start)
|
||||
const variables = (startNode?.data as { variables?: InputVar[] } | undefined)?.variables || []
|
||||
return variables.map((variable) => {
|
||||
const label = typeof variable.label === 'string'
|
||||
? variable.label
|
||||
: (variable.label.variable || variable.label.nodeName)
|
||||
return {
|
||||
label,
|
||||
variable: variable.variable,
|
||||
type: variable.type,
|
||||
}
|
||||
})
|
||||
}
|
||||
return basicAppInputForm
|
||||
}, [currentWorkflow, basicAppInputForm, isAdvancedApp])
|
||||
@ -103,6 +127,19 @@ function MCPServiceCard({
|
||||
const onGenCode = async () => {
|
||||
await refreshMCPServerCode(detail?.id || '')
|
||||
invalidateMCPServerDetail(appId)
|
||||
|
||||
// Emit collaboration event to notify other clients of MCP server changes
|
||||
const socket = webSocketClient.getSocket(appId)
|
||||
if (socket) {
|
||||
socket.emit('collaboration_event', {
|
||||
type: 'mcp_server_update',
|
||||
data: {
|
||||
action: 'codeRegenerated',
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const onChangeStatus = async (state: boolean) => {
|
||||
@ -132,6 +169,20 @@ function MCPServiceCard({
|
||||
})
|
||||
invalidateMCPServerDetail(appId)
|
||||
}
|
||||
|
||||
// Emit collaboration event to notify other clients of MCP server status change
|
||||
const socket = webSocketClient.getSocket(appId)
|
||||
if (socket) {
|
||||
socket.emit('collaboration_event', {
|
||||
type: 'mcp_server_update',
|
||||
data: {
|
||||
action: 'statusChanged',
|
||||
status: state ? 'active' : 'inactive',
|
||||
timestamp: Date.now(),
|
||||
},
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const handleServerModalHide = () => {
|
||||
@ -144,6 +195,23 @@ function MCPServiceCard({
|
||||
setActivated(serverActivated)
|
||||
}, [serverActivated])
|
||||
|
||||
// Listen for collaborative MCP server updates from other clients
|
||||
useEffect(() => {
|
||||
if (!appId)
|
||||
return
|
||||
|
||||
const unsubscribe = collaborationManager.onMcpServerUpdate(async (_update: CollaborationUpdate) => {
|
||||
try {
|
||||
invalidateMCPServerDetail(appId)
|
||||
}
|
||||
catch (error) {
|
||||
console.error('MCP server update failed:', error)
|
||||
}
|
||||
})
|
||||
|
||||
return unsubscribe
|
||||
}, [appId, invalidateMCPServerDetail])
|
||||
|
||||
if (!currentWorkflow && isAdvancedApp)
|
||||
return null
|
||||
|
||||
|
||||
@ -1,6 +1,5 @@
|
||||
'use client'
|
||||
import type { Collection, CustomCollectionBackend, Tool, WorkflowToolProviderRequest, WorkflowToolProviderResponse } from '../types'
|
||||
import type { WorkflowToolModalPayload } from '@/app/components/tools/workflow-tool'
|
||||
import {
|
||||
RiCloseLine,
|
||||
} from '@remixicon/react'
|
||||
@ -413,7 +412,7 @@ const ProviderDetail = ({
|
||||
)}
|
||||
{isShowEditWorkflowToolModal && (
|
||||
<WorkflowToolModal
|
||||
payload={customCollection as unknown as WorkflowToolModalPayload}
|
||||
payload={customCollection}
|
||||
onHide={() => setIsShowEditWorkflowToolModal(false)}
|
||||
onRemove={onClickWorkflowToolDelete}
|
||||
onSave={updateWorkflowToolProvider}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
import type { FC } from 'react'
|
||||
import type { Emoji, WorkflowToolProviderOutputParameter, WorkflowToolProviderOutputSchema, WorkflowToolProviderParameter, WorkflowToolProviderRequest } from '../types'
|
||||
import type { Emoji, WorkflowToolProviderOutputParameter, WorkflowToolProviderParameter, WorkflowToolProviderRequest } from '../types'
|
||||
import { RiErrorWarningLine } from '@remixicon/react'
|
||||
import { produce } from 'immer'
|
||||
import * as React from 'react'
|
||||
@ -21,25 +21,9 @@ import { VarType } from '@/app/components/workflow/types'
|
||||
import { cn } from '@/utils/classnames'
|
||||
import { buildWorkflowOutputParameters } from './utils'
|
||||
|
||||
export type WorkflowToolModalPayload = {
|
||||
icon: Emoji
|
||||
label: string
|
||||
name: string
|
||||
description: string
|
||||
parameters: WorkflowToolProviderParameter[]
|
||||
outputParameters: WorkflowToolProviderOutputParameter[]
|
||||
labels: string[]
|
||||
privacy_policy: string
|
||||
tool?: {
|
||||
output_schema?: WorkflowToolProviderOutputSchema
|
||||
}
|
||||
workflow_tool_id?: string
|
||||
workflow_app_id?: string
|
||||
}
|
||||
|
||||
type Props = {
|
||||
isAdd?: boolean
|
||||
payload: WorkflowToolModalPayload
|
||||
payload: any
|
||||
onHide: () => void
|
||||
onRemove?: () => void
|
||||
onCreate?: (payload: WorkflowToolProviderRequest & { workflow_app_id: string }) => void
|
||||
@ -89,7 +73,7 @@ const WorkflowToolAsModal: FC<Props> = ({
|
||||
},
|
||||
]
|
||||
|
||||
const handleParameterChange = (key: string, value: string, index: number) => {
|
||||
const handleParameterChange = (key: string, value: any, index: number) => {
|
||||
const newData = produce(parameters, (draft: WorkflowToolProviderParameter[]) => {
|
||||
if (key === 'description')
|
||||
draft[index].description = value
|
||||
@ -152,13 +136,13 @@ const WorkflowToolAsModal: FC<Props> = ({
|
||||
if (!isAdd) {
|
||||
onSave?.({
|
||||
...requestParams,
|
||||
workflow_tool_id: payload.workflow_tool_id!,
|
||||
workflow_tool_id: payload.workflow_tool_id,
|
||||
})
|
||||
}
|
||||
else {
|
||||
onCreate?.({
|
||||
...requestParams,
|
||||
workflow_app_id: payload.workflow_app_id!,
|
||||
workflow_app_id: payload.workflow_app_id,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -108,7 +108,7 @@ vi.mock('@/app/components/app/app-publisher', () => ({
|
||||
<button type="button" onClick={() => { Promise.resolve(props.onPublish?.()).catch(() => undefined) }}>
|
||||
publisher-publish
|
||||
</button>
|
||||
<button type="button" onClick={() => { Promise.resolve(props.onPublish?.({ title: 'Test title', releaseNotes: 'Test notes' })).catch(() => undefined) }}>
|
||||
<button type="button" onClick={() => { Promise.resolve(props.onPublish?.({ url: '/apps/app-id/workflows/publish', title: 'Test title', releaseNotes: 'Test notes' })).catch(() => undefined) }}>
|
||||
publisher-publish-with-params
|
||||
</button>
|
||||
</div>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user