Compare commits

..

362 Commits

Author SHA1 Message Date
f4321279b9 fix migration file 2026-01-24 19:51:43 +08:00
2a372df33c fix web unittest 2026-01-24 19:49:20 +08:00
ef536ba909 fix 2026-01-24 15:30:45 +08:00
b192c6e658 fix package version 2026-01-24 15:26:59 +08:00
89b2ae01a6 [autofix.ci] apply automated fixes 2026-01-24 07:26:47 +00:00
edb4457684 Merge remote-tracking branch 'myori/main' into feat/collaboration2 2026-01-24 15:22:07 +08:00
bb6d6a4f96 improve compute nodes diff speed 2026-01-24 15:04:51 +08:00
486a30402b remove forceUpload 2026-01-23 14:33:15 +08:00
e105dc6289 new restore 2026-01-23 14:22:58 +08:00
51c8c50b82 expire leader key in redis 2026-01-22 09:30:51 +08:00
1b70a7e4c7 use contract for api request 2026-01-21 18:20:38 +08:00
eaf888b02a env var NEXT_PUBLIC_SOCKET_URL 2026-01-20 20:34:56 +08:00
f99ac24d5c websocket use cookie connect 2026-01-20 17:01:40 +08:00
bdac6f91dd add socket edit permission validate 2026-01-20 13:56:28 +08:00
9be496f953 fix publish workflow not sync 2026-01-20 13:20:02 +08:00
4acca22ff0 whether resolved sync to canvas 2026-01-20 10:12:15 +08:00
018175ec2d Merge branch 'feat/collaboration2' of github.com:langgenius/dify into feat/collaboration2 2026-01-19 21:54:01 +08:00
faa88dc2f3 fix unittests 2026-01-19 21:53:56 +08:00
060c7f2b45 fix pyright 2026-01-19 21:48:05 +08:00
acb603bff7 fix migration file 2026-01-19 21:46:40 +08:00
e36ee54a16 fix web style 2026-01-19 21:44:26 +08:00
f3fa4f11ba [autofix.ci] apply automated fixes 2026-01-19 13:18:15 +00:00
cb8fc9cf2d Merge remote-tracking branch 'myori/main' into feat/collaboration2 2026-01-19 21:15:53 +08:00
aaa3d2d74f add unittests 2026-01-19 21:11:44 +08:00
c17f564718 add unittests 2026-01-19 20:41:21 +08:00
3389071361 add unittests 2026-01-19 20:25:47 +08:00
41473ff450 refactor workflow collaboration service 2026-01-19 19:56:18 +08:00
805bb7c468 fix node in panel sync 2026-01-19 18:01:43 +08:00
995d5ccf66 fix graph not sync 2026-01-19 13:45:00 +08:00
0d08f7db97 fix 2026-01-18 18:36:44 +08:00
6443366f50 [autofix.ci] apply automated fixes 2026-01-18 10:01:22 +00:00
70c41a7dc3 Update api/controllers/console/app/workflow.py
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2026-01-18 17:59:18 +08:00
8804623121 Update api/app_factory.py
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2026-01-18 17:59:10 +08:00
1fb6d1286f fix webtest 2026-01-18 17:27:29 +08:00
511df81201 fix web style 2026-01-18 13:40:12 +08:00
682c93f262 Merge remote-tracking branch 'myori/main' into feat/collaboration2 2026-01-18 10:28:50 +08:00
51c96b0b7e fix CI 2026-01-18 10:12:43 +08:00
224f426765 fix CI 2026-01-18 10:07:46 +08:00
e9657cfb48 [autofix.ci] apply automated fixes 2026-01-17 15:00:37 +00:00
4200ac0da3 fix CI 2026-01-17 22:58:27 +08:00
434f7f3bcb fix web style 2026-01-17 22:10:10 +08:00
03cc196965 fix CI 2026-01-17 22:05:14 +08:00
25c88b3f5c fix mypy 2026-01-17 21:41:03 +08:00
2d94904241 fix web unittests 2026-01-17 19:43:40 +08:00
a99e70d96e fix CI 2026-01-17 15:55:27 +08:00
9eeceb2455 fix basedpyright 2026-01-17 15:54:32 +08:00
7901e18fa6 [autofix.ci] apply automated fixes 2026-01-17 06:57:16 +00:00
2befef0b21 Merge branch 'feat/collaboration2' of github.com:langgenius/dify into feat/collaboration2 2026-01-17 14:55:22 +08:00
8869cd7008 fix api 2026-01-17 14:55:12 +08:00
91e6ae2a7d fix bug 2026-01-17 14:53:33 +08:00
6ab8e05a5e fix api 2026-01-17 14:47:44 +08:00
717f99a352 fix migration file 2026-01-17 12:54:15 +08:00
735cd78dc2 fix api 2026-01-17 12:45:40 +08:00
c820501cbb [autofix.ci] apply automated fixes (attempt 2/3) 2026-01-17 04:29:38 +00:00
43ef2395ac [autofix.ci] apply automated fixes 2026-01-17 04:27:34 +00:00
bb3d94f1c5 Merge remote-tracking branch 'myori/main' into feat/collaboration2 2026-01-17 12:24:37 +08:00
c45fbb6491 rm workflow.ts 2026-01-17 10:26:12 +08:00
fc291e4ca2 Merge remote-tracking branch 'myori/main' into feat/collaboration2 2026-01-17 10:22:41 +08:00
b549d669d6 clear logic 2026-01-15 13:17:14 +08:00
802b38eede fix 2026-01-15 13:16:35 +08:00
4b57e7bd53 fix 2026-01-15 11:42:34 +08:00
bfedee0532 fix 2026-01-14 16:40:52 +08:00
1845938e70 fix type issue 2026-01-13 22:18:54 +08:00
fad81ab85e fix type issue 2026-01-13 22:11:36 +08:00
d1c64f5c74 add toast when disconnected 2026-01-13 22:08:59 +08:00
7f6c93bdce reduce CURSOR_THROTTLE_MS 2026-01-13 22:08:07 +08:00
7730c88c74 fix leader election concurrently 2026-01-13 18:01:12 +08:00
ac6b540fd8 CORS config 2026-01-13 17:50:16 +08:00
8c9276370c remove console.log 2026-01-13 17:46:53 +08:00
b91370aff7 fix next config 2026-01-13 17:40:04 +08:00
30424df7ce uuid v7 2026-01-13 17:20:02 +08:00
14f7f4758a fix error display 2026-01-13 17:19:52 +08:00
79c19983e0 refactor: fix N+1 query issue in workflow comments 2026-01-13 16:56:54 +08:00
aeb3fc6729 add backend logging 2026-01-13 16:25:54 +08:00
0c18d4e058 fix duplicated status 2026-01-13 15:59:59 +08:00
bd597497e7 prevent comment thread pinch 2025-11-27 15:37:46 +08:00
be1f841b37 control panel should be z-60 2025-11-24 16:27:37 +08:00
d98a428100 Revert "fix model config panel z-index"
This reverts commit f85bf0867c.
2025-11-24 16:23:10 +08:00
26d330e744 setting dialog should be z-index 60 2025-11-24 16:19:29 +08:00
61bed38afb Reapply "fix system model setting modal index"
This reverts commit 16fbc6b270.
2025-11-24 16:16:56 +08:00
16fbc6b270 Revert "fix system model setting modal index"
This reverts commit fe132de3c8.
2025-11-24 16:16:45 +08:00
fe132de3c8 fix system model setting modal index 2025-11-24 16:12:18 +08:00
f85bf0867c fix model config panel z-index 2025-11-24 16:10:46 +08:00
b441a7fbc4 fix style 2025-11-18 10:31:56 +08:00
8497d296b1 feat: can drag avatar to move the comment input 2025-11-18 09:53:15 +08:00
3ee2508ec8 fix comment input also not allow to zoomin canvas 2025-11-17 16:17:34 +08:00
ff8d5ac4b5 fix gesture zoom in 2025-11-17 15:37:43 +08:00
7fc98b2183 fix sync of webhook node 2025-11-14 11:31:08 +08:00
a4adafd8ad remove the single env button 2025-11-14 11:00:33 +08:00
c1bc3aeab9 fix migration file 2025-11-14 10:58:16 +08:00
edf962cdb5 Merge branch 'feat/collaboration' into feat/collaboration2 2025-11-13 15:31:21 +08:00
2fa13cdf86 if session unauthorized, rejoin 2025-11-11 16:38:55 +08:00
39de7673eb add redis key expire time for collaboration 2025-11-11 16:13:05 +08:00
d930d8cc4a fix setting dialog z-index 2025-11-10 18:02:36 +08:00
97626a3ba5 can't zoomOnPinch when mouse over comment preview 2025-11-07 09:27:49 +08:00
b7f7d04639 fix comment input mention not display avatar 2025-11-05 18:09:42 +08:00
13674bd859 comment input mode click empty place can close 2025-11-05 17:41:10 +08:00
fb9cbc0471 comment mode can't click node 2025-11-05 14:14:36 +08:00
2f60288d86 fix: resize workflow canvas cause incorrect comment position 2025-11-05 14:08:21 +08:00
ee3ded0fc2 fix control layer 2025-10-22 10:25:31 +08:00
351bad9ec4 fix minimap disable collobroation 2025-10-22 10:21:25 +08:00
9bf7473bbf hide comments when disable collaboration 2025-10-22 10:10:23 +08:00
fa09c88f5c add CollaborationEnabled for comment shortcut 2025-10-22 09:59:43 +08:00
83df78d0c8 hide comments icon when disable collabrotion mode 2025-10-22 09:50:37 +08:00
79266f7302 add note node sync data 2025-10-21 15:34:44 +08:00
7fecc7236c add more collaboration manager unit tests 2025-10-21 14:37:31 +08:00
9c7f6b7b71 add crdt provider unittests 2025-10-21 14:27:13 +08:00
b46da93e99 add unittests for event-emitter 2025-10-21 14:12:13 +08:00
e299a1fb20 add ws manager unit tests 2025-10-21 14:09:25 +08:00
122033cadb sort out code 2025-10-21 12:27:11 +08:00
df9bd1b3b5 add Parameters of ParametersExtractor node sync 2025-10-21 12:14:48 +08:00
f74492eb59 add prompt_template of LLM node sync 2025-10-21 12:00:42 +08:00
eaf1ae37dd add ENABLE_COLLABORATION_MODE 2025-10-21 11:46:28 +08:00
8e3b412ff6 fix websocket cookie auth 2025-10-21 11:46:00 +08:00
ba17f576e9 Merge remote-tracking branch 'myori/main' into feat/collaboration 2025-10-21 08:47:01 +08:00
9415ce4512 Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-20 10:04:13 +08:00
239536933b Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-17 19:33:40 +08:00
80b34598e9 try to fix start node collaboration 2025-10-16 10:18:37 +08:00
9c66b92c34 Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-15 21:08:08 +08:00
79872ea5e2 Refine workflow comment avatar highlight ring 2025-10-15 14:58:03 +08:00
cbf181bd76 Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-15 11:06:23 +08:00
1393d21858 fix(web): adjust online users badge sizing and add pointer cursor to chevron 2025-10-15 11:06:04 +08:00
3a46b7bd18 fix(web): restyle workflow online-users avatar stack and dropdown 2025-10-15 10:48:38 +08:00
0bbfd81d26 fix: tooltip font 2025-10-15 10:35:42 +08:00
86db517142 fix(web): make workflow online-users dropdown click-based with revised spacing 2025-10-15 10:34:00 +08:00
50151f4007 fix(web): adjust workflow online-users icon and label styles 2025-10-15 10:21:54 +08:00
0395d1f91f Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-15 10:02:55 +08:00
5f4c1e4057 Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-15 09:33:54 +08:00
d14413f3b0 comment click caculate the panel width 2025-10-15 09:11:44 +08:00
4fd968270c Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-14 18:56:27 +08:00
708a7dd362 fix comment mode can't drag node 2025-10-14 17:31:03 +08:00
cd85b75312 fix control panel hovered by comment icon 2025-10-14 17:16:33 +08:00
d685da377e fix minimap 2025-10-14 17:11:22 +08:00
8583992d23 when new user connected should rebroadcast the graph data 2025-10-14 16:57:02 +08:00
23fec75c90 cache the new created comment 2025-10-14 11:21:18 +08:00
ebe7303894 fix loop variable not sync well 2025-10-14 10:10:34 +08:00
79fb977f10 fix loop/iteration incorrect nodes width 2025-10-14 09:54:37 +08:00
c0af3414a3 Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-14 07:54:05 +08:00
1857d37fae sync app published 2025-10-13 16:42:17 +08:00
60fdbb56a9 fix all lines missing 2025-10-13 16:38:50 +08:00
4c7853164d fix mcp server edit modal disappear 2025-10-13 16:36:39 +08:00
6c7a3ce4bb sync workflow publish to mcp server 2025-10-13 14:07:26 +08:00
a9e74b21f1 fix: increase ContentDialog z-index to display above workflow operators
The collaboration feature increased workflow operator z-index from z-10 to z-[60].
This caused the AppInfo ContentDialog (z-30) to appear below the operator buttons.
Increased ContentDialog z-index to z-[70] to ensure proper layer hierarchy.
2025-10-13 14:00:28 +08:00
e6730f7164 fix: dropdown menu border 2025-10-13 13:15:54 +08:00
3344723393 fix: prevent Enter key from triggering submit during IME composition
Add isComposing check at the start of handleKeyDown to ignore keyboard events during IME (Chinese/Japanese/Korean) input composition. This follows the existing pattern used in tag-management component and prevents premature form submission when users press Enter to confirm IME candidates.
2025-10-13 13:09:52 +08:00
c571185a91 fix: extract @mention highlighting from content in real-time to persist after edit 2025-10-13 13:03:55 +08:00
325c1cfa41 fix: prevent Save button flash by maintaining loading state until edit closes 2025-10-13 12:56:18 +08:00
1069421753 refactor: replace keyboard shortcut icons with custom EnterKey icon 2025-10-13 12:52:07 +08:00
b33a97ea5b style: update comment thread UI with design specs
- Fix edit bubble: keep avatar visible and match ThreadMessage layout
- Update edit container: rounded-xl, p-1, shadow-md, backdrop-blur
- Add keyboard shortcut icons (Cmd+Enter) to Save button
- Fix hover background: full-width with -mx-4 negative margin technique
- Apply design tokens consistently across components
2025-10-13 12:42:41 +08:00
d2c1d4c337 style: update mention dropdown UI to match design specs
- Update container: rounded-xl, border-0.5px, backdrop-blur, bg opacity 95%
- Update items: rounded-md with asymmetric padding (py-1 pl-2 pr-3)
- Use project design tokens (shadow-lg, bg-state-base-hover)
2025-10-13 12:24:28 +08:00
67762cf1d8 chore: resolve merge conflict in pnpm-lock.yaml
Merged origin/main into feat/collaboration and resolved dependency lock file conflicts by regenerating pnpm-lock.yaml through clean install.

Changes:
- Resolved eslint version differences (9.36.0 vs 9.35.0)
- Updated lock file reflects current dependency resolution
- All other changes from main branch successfully merged
2025-10-13 11:53:43 +08:00
eadce0287c app meta sync 2025-10-13 11:49:54 +08:00
ecaff5b63f fix loop var change cause collaboration crash 2025-10-13 10:06:50 +08:00
a300c9ef96 fix canvas empty on the bottom 2025-10-13 09:38:59 +08:00
44fe71e4db fix: ensure comment thread always scrolls to bottom on first render 2025-10-12 13:27:42 +08:00
0ac32188c5 feat: implement comprehensive focus management for comment thread
- Add forwardRef support to MentionInput to expose textarea ref
- Auto-focus reply input when thread opens (100ms delay)
- Restore focus after reply submission and edit operations
- Add Esc key handler to close thread with smart guards
- Enhance accessibility with ARIA attributes (dialog, modal, labelledby)
- Improve keyboard navigation and user experience

Implements P0-P3 priorities following WCAG 2.1 AA accessibility standards
2025-10-12 13:21:57 +08:00
9aaace706b feat: optimize comments panel filter UI and interaction logic 2025-10-12 13:04:24 +08:00
b22de5a824 Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-12 13:04:07 +08:00
97463661c1 fix: translations 2025-10-11 20:33:55 +08:00
239a11855a fix: prevent dropdown from closing when showing inline delete confirmation
Use pre-rendering strategy with CSS visibility control instead of conditional rendering to avoid race condition between React state update and PortalToFollowElem's click-outside detection.
2025-10-11 20:21:52 +08:00
0632557d91 feat: use inline delete confirm for comment reply deletion(second time) 2025-10-11 18:37:41 +08:00
44be7d4c51 Revert "feat: use inline delete confirm for comment reply deletion"
This reverts commit a077a3f609.
2025-10-11 18:24:15 +08:00
efb4a9d327 Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-11 18:18:40 +08:00
a077a3f609 feat: use inline delete confirm for comment reply deletion 2025-10-11 18:06:31 +08:00
3ccec0aab0 Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-11 17:21:05 +08:00
3006133f0e sync node title 2025-10-11 15:48:51 +08:00
79beb25530 feat: add tooltips and improve delete button styling in CommentThread
- Add compact tooltips to Delete, Resolve, Previous, and Next buttons
- Change delete button hover to red background and text
- Use existing i18n translations for tooltip content
2025-10-11 15:22:37 +08:00
b47b228164 fix: align dropdown menu styles with design specs in CommentThread
- Update background to blur variant with backdrop filter
- Change border radius from lg to xl (12px)
- Add rounded corners to menu items to prevent hover overflow
2025-10-11 15:10:57 +08:00
be91db14d9 fix: add hover effect to first message in CommentThread
Wrap the root comment message with the same hover container as replies to ensure consistent hover behavior across all messages.
2025-10-11 15:08:27 +08:00
120893209e fix: align CommentPreview styles with design specs
- Update border radius to 24px with 3px bottom-left corner
- Change border width to 0.5px
- Add backdrop blur effect with bg-blur variant
- Replace custom shadow with standard shadow-lg
- Maintain proper Tailwind utility class usage
2025-10-11 15:02:06 +08:00
f19630bcf5 Merge remote-tracking branch 'origin/main' into feat/collaboration 2025-10-11 14:43:20 +08:00
9d93fda471 refactor: separate loading states for comment operations
Separate loading states to distinguish between different operations:
- activeCommentDetailLoading: loading comment details, delete/resolve operations
- replySubmitting: sending new replies
- replyUpdating: editing existing replies

Changes:
- Add replySubmitting and replyUpdating states to comment store
- Restore full-screen loading overlay for comment detail loading
- Use inline spinner (RiLoader2Line) in send/save buttons for reply operations
- Update loading state usage in handleCommentReply and handleCommentReplyUpdate
- Pass separated loading states from workflow index to CommentThread component

Benefits:
- UI clarity: different loading states have appropriate visual feedback
- Better UX: users can still navigate while sending replies
- Clear separation of concerns: each operation has its own loading state
2025-10-11 14:34:35 +08:00
d986659add chore: replace Chinese/Japanese comments with English translations 2025-10-11 14:20:37 +08:00
00dab7ca5f feat: improve mention input loading state and prevent button flash on submit 2025-10-11 14:20:37 +08:00
a4add403fb Fix MentionInput layout and improve comment hover styling 2025-10-11 14:20:37 +08:00
e9cdc96c74 feat: prevent duplicate @ insertion in mention input with visual feedback 2025-10-11 14:20:37 +08:00
6af1fea232 fix: update mention button icon color for better visibility in light mode 2025-10-11 14:20:37 +08:00
45d5d9e44f fix: mention input cannot scroll 2025-10-11 14:20:36 +08:00
376a084aca refactor: use PortalToFollowElem for dropdown with scroll handling
- Replace inline dropdown with PortalToFollowElem to prevent container overflow
- Use z-[100] for dropdown to ensure proper stacking
- Remove redundant outside click handler (handled by PortalToFollowElem)
- Add scroll event listener to auto-close dropdown when scrolling
- Dropdown now renders via portal outside message container
2025-10-11 14:20:36 +08:00
d1f42d47fe fix: improve dropdown menu hover and positioning 2025-10-11 14:20:36 +08:00
64b8fd87ad fix: improve dropdown menu positioning and z-index 2025-10-11 14:20:36 +08:00
364be48248 feat: add smooth scroll to comment thread 2025-10-11 14:20:36 +08:00
2bce046278 fix node error default value not sync 2025-10-11 14:17:58 +08:00
1120d552b6 fix knowledge node add/delete dataset not sync 2025-10-11 14:09:37 +08:00
69cab0817f fix comment input hoverd by comment content 2025-10-11 10:41:28 +08:00
c4d03bf378 change event type name of websocket 2025-10-11 09:07:02 +08:00
6c039be2ca fix jump to other page not disconnect websocket 2025-10-10 16:51:57 +08:00
832dabc8a4 only author can move the comment position 2025-10-10 15:58:01 +08:00
1da2028d9d keep the previous private property when import node data 2025-10-10 13:26:55 +08:00
7c3f6dcc8d use cloneDeep instead of json.parse 2025-10-10 10:34:00 +08:00
1472884eb5 sync the create/delete app in the list page 2025-10-10 10:18:23 +08:00
ec22b1c706 fix user uploaded avatar display incorrect 2025-10-09 17:40:20 +08:00
a1712df7c2 comment author avatar is the first avatar 2025-10-09 17:12:37 +08:00
a40e11cb3e only can edit own replies 2025-10-09 17:02:39 +08:00
61c46bea40 fix missing i18n 2025-10-09 16:55:53 +08:00
1c5c28a82c fix switch to cursor mode comment input still exists 2025-10-09 16:36:20 +08:00
2310145937 comment reply auto scoll down to bottom 2025-10-09 15:50:23 +08:00
6a9c9cadd0 fix comment hover the variable panel 2025-10-09 15:44:56 +08:00
7774ff9944 fix version not display 2025-10-09 15:07:36 +08:00
33d4c95470 can update comment position 2025-10-05 10:17:04 +08:00
659cbc05a9 fix mention-input in the bottom of the browser 2025-10-04 21:24:27 +08:00
6ce65de2cd fix merged main issues 2025-10-04 21:11:59 +08:00
93b2eb3ff6 Merge remote-tracking branch 'myori/main' into p284 2025-10-04 15:28:29 +08:00
bf71300635 improve comment cursor move 2025-10-04 14:36:10 +08:00
37ecd4a0bc fix @ input problem 2025-10-04 13:39:00 +08:00
827a1b181b fix comment icon position 2025-10-04 13:25:59 +08:00
c4e7cb75cd cache the mentioned users 2025-10-04 11:22:02 +08:00
98e4bfcda8 click comment icon not switch to comment mode 2025-10-03 23:36:56 +08:00
ee48ca7671 fix default comment icon 2025-09-30 15:23:43 +08:00
4ba6de1116 add leader session more check 2025-09-29 14:01:42 +08:00
bfbe636555 fix docker file websocket mode 2025-09-29 13:35:10 +08:00
54ae43ef47 sync children node data 2025-09-26 14:07:34 +08:00
7a74b5ee3e fix add child node resize parent node size 2025-09-26 14:04:50 +08:00
0e9d43d605 http node data sync 2025-09-26 11:13:20 +08:00
cc54363c27 sync the prompt editor 2025-09-26 10:48:00 +08:00
89affe3139 fix opened panel be affected 2025-09-26 09:20:33 +08:00
2c4977dbb1 fix bug 2025-09-25 16:56:06 +08:00
e240175116 sync nodes 2025-09-25 16:31:46 +08:00
2398ed6fe8 fix update env api update time error 2025-09-25 16:28:33 +08:00
a8420ac33c add fragment to prevent list missing key 2025-09-25 09:52:08 +08:00
8470be6411 improve delete comment i18n 2025-09-25 09:41:59 +08:00
3d6295c622 refactor delete comment and reply 2025-09-25 09:35:46 +08:00
ff2f7206f3 bump nextjs to 15.5 and turbopack for development mode (#24346)
Co-authored-by: crazywoola <427733928@qq.com>
Co-authored-by: 非法操作 <hjlarry@163.com>
2025-09-25 09:10:09 +08:00
b937fc8978 app online user list 2025-09-24 17:03:33 +08:00
86a9a51952 add comment preview 2025-09-24 12:54:54 +08:00
4188c9a1dd fix dark theme 2025-09-24 10:08:33 +08:00
8c00f89e36 add icon to zoom2fit 2025-09-23 22:22:28 +08:00
9e8ac5c96b refactor cursor and add hide comment 2025-09-23 22:13:02 +08:00
05a67f4716 add display/hide collaborator cursors 2025-09-23 17:37:40 +08:00
f49476a206 add show/hide minimap 2025-09-23 17:20:41 +08:00
c1e9c56e25 fix style 2025-09-23 17:19:36 +08:00
d5dd73cacf add i18n for comment 2025-09-23 16:19:04 +08:00
21f7a49b4e fix restore page crash 2025-09-23 15:44:57 +08:00
716ac04e13 add comment shortcut 2025-09-23 15:40:53 +08:00
c28a32fc47 fix handleModeComment 2025-09-23 15:35:28 +08:00
31cba28e8a improve comment cursor icon 2025-09-23 15:28:22 +08:00
48cd7e6481 input comment should not cancel comment mode 2025-09-23 14:48:31 +08:00
47aba1c9f9 fix style 2025-09-23 14:41:34 +08:00
0f3f8bc0d9 make mention input can display name different color 2025-09-23 11:38:38 +08:00
e0df12c212 fix mentioned names color 2025-09-23 11:24:17 +08:00
eb448d9bb8 fix avatar background color 2025-09-23 11:09:02 +08:00
0ba77f13db fix avatar inset 2025-09-23 10:46:18 +08:00
f0a2eb843c fix user cursor should not over the panel 2025-09-23 10:35:16 +08:00
5cf3d9e4d9 fix nginx config 2025-09-22 14:21:07 +08:00
41958f55cd fix CSP 2025-09-22 14:20:11 +08:00
600ad232e1 fix config 2025-09-22 14:20:11 +08:00
7a3825cfce fix docker config 2025-09-22 14:20:11 +08:00
9519653422 change default ws url 2025-09-22 14:20:11 +08:00
efa2307c73 change default ws url 2025-09-22 14:20:11 +08:00
068fa3d0e3 fix CI 2025-09-22 14:20:11 +08:00
13d8dbd542 fix CI 2025-09-22 14:20:08 +08:00
b442ba8b2b fix UserAvatarList background color 2025-09-19 12:07:07 +08:00
10e36d2355 add avatar on canvas node 2025-09-19 10:43:28 +08:00
13c53fedad add avatar display on node 2025-09-19 10:07:01 +08:00
4bda1bd884 open node panel not affect others 2025-09-18 17:42:02 +08:00
3abe7850d6 fix migration file 2025-09-18 16:30:40 +08:00
b50284d864 fix merge problem 2025-09-18 15:45:53 +08:00
81c6e52401 Merge remote-tracking branch 'origin/p254' into p284 2025-09-18 15:14:55 +08:00
847d257366 Merge branch 'p254' into p284 2025-09-18 14:50:59 +08:00
687662cf1f comment sync 2025-09-18 13:27:27 +08:00
6432d98469 improve the icon display on canvas 2025-09-18 11:49:43 +08:00
088ccf8b8d add UserAvatarList component 2025-09-18 09:47:07 +08:00
e8683bf957 fix comment cursor position 2025-09-18 09:17:45 +08:00
4653981b6b not display more icon when in edit mode 2025-09-17 20:45:54 +08:00
e2547413d3 fix edit input mouse pos 2025-09-17 20:40:59 +08:00
ea17f41b5b refactor reply code 2025-09-17 20:29:23 +08:00
29178d8adf can edit and delete a reply 2025-09-17 17:44:09 +08:00
7e86ead574 upgrade style 2025-09-17 16:41:10 +08:00
72debcb228 refactor mention input 2025-09-17 16:28:47 +08:00
72737dabc7 fix at can't click bug 2025-09-17 14:50:05 +08:00
f6e5cb4381 improve comment detail 2025-09-17 14:34:36 +08:00
ffad3b5fb1 comment detail window fix height 2025-09-17 13:45:56 +08:00
cba9fc3020 add comment reply 2025-09-17 12:50:42 +08:00
e776accaf3 add top operation buttons of comment detail 2025-09-17 10:45:15 +08:00
3eac26929a sync the comment panel and canvas 2025-09-17 09:13:31 +08:00
4d3adec738 click canvas icon display the active comment detail 2025-09-17 09:01:16 +08:00
89bed479e4 improve comment panel 2025-09-16 17:25:51 +08:00
fdd673a3a9 improve comments panel 2025-09-16 13:39:31 +08:00
22f6d285c7 fix comment cursor in panel incorrect 2025-09-16 10:20:12 +08:00
10aa16b471 add workflow comment panel 2025-09-16 09:51:12 +08:00
b3838581fd improve mention 2025-09-15 17:13:46 +08:00
affbe7ccdb can mention user in the create comment 2025-09-15 16:42:31 +08:00
dd8577f832 comments display on canvas 2025-09-15 14:16:06 +08:00
d7f5da5df4 display comments avatar on the canvas 2025-09-15 11:41:06 +08:00
9fda130b3a fix click comment once more then esc not work 2025-09-15 11:11:07 +08:00
72cdbdba0f fix chat input style 2025-09-15 09:20:06 +08:00
b92a153902 refactor code 2025-09-14 13:03:08 +08:00
9f2927979b fix comment cursor icon 2025-09-14 12:50:18 +08:00
75257232c3 add create comment frontend 2025-09-14 12:10:37 +08:00
1721314c62 add frontend comment service 2025-09-13 17:57:19 +08:00
fc230bcc59 add force update workflow to support restore 2025-09-12 16:27:12 +08:00
b4636ddf44 add leader restore workflow 2025-09-12 15:34:41 +08:00
b1140301a4 sync import dsl 2025-09-12 14:46:40 +08:00
58cd785da6 use const for cursor move config 2025-09-11 09:36:22 +08:00
2035186cd2 click avatar to follow user cursor position 2025-09-11 09:26:05 +08:00
53ba6aadff cursor pos transform to canvas 2025-09-11 09:07:03 +08:00
f091868b7c use new get avatar api 2025-09-10 15:15:43 +08:00
89bedae0d3 remove the test code for develop collaboration 2025-09-10 14:27:20 +08:00
c8acc48976 ruff format 2025-09-10 14:25:37 +08:00
21fee59b22 use new features update api 2025-09-10 14:24:38 +08:00
957a8253f8 change user list to conversation var panel left 2025-09-10 09:26:38 +08:00
d5fc3e7bed add new conversation vars update api 2025-09-10 09:24:22 +08:00
ab438b42da use new env variables update api 2025-09-10 09:07:55 +08:00
3867fece4a mcp server update 2025-09-09 15:01:38 +08:00
2b908d4fbe add app state update 2025-09-09 14:24:37 +08:00
8ff062ec8b change user default color 2025-09-09 10:20:02 +08:00
294fc41aec add redo undo manager of CRDT 2025-09-09 09:58:55 +08:00
684f7df158 node data use crdt data 2025-09-08 14:46:28 +08:00
c3287755e3 add request leader to sync graph 2025-09-08 09:00:20 +08:00
9f97f4d79e fix cursor style 2025-09-06 15:54:19 +08:00
34eb421649 add currentUserId is me 2025-09-06 12:27:54 +08:00
850b05573e add dropdown users list 2025-09-06 12:01:49 +08:00
6ec8bfdfee add mouse over avatar display username 2025-09-06 11:29:45 +08:00
81638c248e use one getUserColor func 2025-09-06 11:22:59 +08:00
2e11b1298e add online users avatar 2025-09-06 11:19:47 +08:00
20320f3a27 show online users on the canvas 2025-09-06 00:08:17 +08:00
4019c12d26 fix missing import 2025-09-05 22:20:07 +08:00
cf72184ce4 each browser tab session a ws connected obj 2025-09-05 22:19:16 +08:00
ca8d15bc64 add mention user list api 2025-08-31 13:42:59 +08:00
a91c897fd3 improve code 2025-08-31 00:43:34 +08:00
816bdf0320 add delete comment and reply 2025-08-31 00:28:01 +08:00
d4a6acbd99 add update reply 2025-08-30 23:49:27 +08:00
e421db4005 add resolve comment 2025-08-30 22:37:01 +08:00
9067c2a9c1 add update comment 2025-08-22 17:48:14 +08:00
9f7321ca1a add create reply 2025-08-22 17:33:47 +08:00
5fa01132b9 add create and list comment api 2025-08-22 16:47:08 +08:00
e082b6d599 add workflow comment models 2025-08-22 11:28:26 +08:00
d44be2d835 add leader submit graph data 2025-08-21 17:53:39 +08:00
7dc8557033 add Leader election 2025-08-21 16:17:16 +08:00
72037a1865 improve cursors logic 2025-08-21 14:27:41 +08:00
2d1621c43d add leader but not review 2025-08-08 14:54:18 +08:00
d1a5db3310 rm useCollaborativeCursors compoent 2025-08-07 18:03:12 +08:00
ad8fd8fecc clone the node to avoid loro recursive 2025-08-07 17:45:38 +08:00
be74b76079 refactor websocket init 2025-08-07 17:31:12 +08:00
dd64af728f refactor the cursors component 2025-08-07 14:29:23 +08:00
e43b46786d refactor all the frontend code 2025-08-07 10:58:53 +08:00
3f3b37b843 refactor to support mutli websocket connections 2025-08-06 17:05:39 +08:00
2ecf9f6ddf add features collaboration 2025-08-06 10:58:32 +08:00
48c069fe68 support env vars collaborate 2025-08-05 15:22:22 +08:00
9c5c597c85 support empty collaboration event data 2025-08-05 15:21:41 +08:00
c2eec8545d collaborate conversation vars 2025-08-05 14:24:51 +08:00
2395d4be26 fix imported updates also broadcast to other clients 2025-08-05 10:21:22 +08:00
9455476705 handle edge delete 2025-08-04 14:17:59 +08:00
494e223706 some operations don't need to broadcast 2025-08-03 14:18:48 +08:00
348fd18230 refactor collaboration 2025-08-03 13:34:07 +08:00
7233b4de55 the initial data to collaboration store 2025-07-31 16:27:01 +08:00
af6df05685 add setNodes and setEdges of collaboration store 2025-07-31 15:25:50 +08:00
965b65db6e use loro for crdt data 2025-07-31 14:02:53 +08:00
4cc01c8aa8 try a lot for yjs, but update data still not work... 2025-07-30 14:36:29 +08:00
41372168b6 refactor code 2025-07-23 10:04:16 +08:00
f4438b0a08 support mouse display 2025-07-22 18:08:35 +08:00
897c842637 ruff format 2025-07-21 16:13:04 +08:00
ee86ceb906 fix gunicorn gvent 2025-07-21 16:09:51 +08:00
e298732499 refactor code 2025-07-21 16:07:22 +08:00
4081937e22 migrate to python-socketio 2025-07-21 14:57:28 +08:00
f9aedb2118 add collaborate event 2025-07-21 11:10:23 +08:00
74b4719af8 support broadcast online users 2025-07-18 15:02:34 +08:00
2f35cc9188 add online users backend api and frontend submit cursor pos 2025-07-18 11:17:08 +08:00
2f966d8c38 fix websocket auth 2025-07-17 17:16:52 +08:00
b0868d9136 fix websocket auth 2025-07-17 17:16:38 +08:00
37440e9416 ruff format 2025-07-17 15:37:13 +08:00
0d7d27ec0b establish websocket connection 2025-07-17 15:36:50 +08:00
205 changed files with 14648 additions and 3325 deletions

View File

@ -33,6 +33,9 @@ TRIGGER_URL=http://localhost:5001
# The time in seconds after the signature is rejected
FILES_ACCESS_TIMEOUT=300
# Collaboration mode toggle
ENABLE_COLLABORATION_MODE=false
# Access token expiration time in minutes
ACCESS_TOKEN_EXPIRE_MINUTES=60

View File

@ -1,3 +1,4 @@
import os
import sys
@ -8,10 +9,15 @@ def is_db_command() -> bool:
# create app
flask_app = None
socketio_app = None
if is_db_command():
from app_factory import create_migrations_app
app = create_migrations_app()
socketio_app = app
flask_app = app
else:
# Gunicorn and Celery handle monkey patching automatically in production by
# specifying the `gevent` worker class. Manual monkey patching is not required here.
@ -22,8 +28,15 @@ else:
from app_factory import create_app
app = create_app()
celery = app.extensions["celery"]
socketio_app, flask_app = create_app()
app = flask_app
celery = flask_app.extensions["celery"]
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5001)
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler # type: ignore[reportMissingTypeStubs]
host = os.environ.get("HOST", "0.0.0.0")
port = int(os.environ.get("PORT", 5001))
server = pywsgi.WSGIServer((host, port), socketio_app, handler_class=WebSocketHandler)
server.serve_forever()

View File

@ -1,6 +1,7 @@
import logging
import time
import socketio # type: ignore[reportMissingTypeStubs]
from opentelemetry.trace import get_current_span
from opentelemetry.trace.span import INVALID_SPAN_ID, INVALID_TRACE_ID
@ -8,6 +9,7 @@ from configs import dify_config
from contexts.wrapper import RecyclableContextVar
from core.logging.context import init_request_context
from dify_app import DifyApp
from extensions.ext_socketio import sio
logger = logging.getLogger(__name__)
@ -60,14 +62,18 @@ def create_flask_app_with_configs() -> DifyApp:
return dify_app
def create_app() -> DifyApp:
def create_app() -> tuple[socketio.WSGIApp, DifyApp]:
start_time = time.perf_counter()
app = create_flask_app_with_configs()
initialize_extensions(app)
sio.app = app
socketio_app = socketio.WSGIApp(sio, app)
end_time = time.perf_counter()
if dify_config.DEBUG:
logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
return app
return socketio_app, app
def initialize_extensions(app: DifyApp):

View File

@ -1229,6 +1229,13 @@ class PositionConfig(BaseSettings):
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
class CollaborationConfig(BaseSettings):
ENABLE_COLLABORATION_MODE: bool = Field(
description="Whether to enable collaboration mode features across the workspace",
default=False,
)
class LoginConfig(BaseSettings):
ENABLE_EMAIL_CODE_LOGIN: bool = Field(
description="whether to enable email code login",
@ -1347,6 +1354,7 @@ class FeatureConfig(
WorkflowConfig,
WorkflowNodeExecutionConfig,
WorkspaceConfig,
CollaborationConfig,
LoginConfig,
AccountConfig,
SwaggerUIConfig,

View File

@ -63,6 +63,7 @@ from .app import (
statistic,
workflow,
workflow_app_log,
workflow_comment,
workflow_draft_variable,
workflow_run,
workflow_statistic,
@ -114,6 +115,7 @@ from .explore import (
saved_message,
trial,
)
from .socketio import workflow as socketio_workflow # pyright: ignore[reportUnusedImport]
# Import tag controllers
from .tag import tags
@ -207,6 +209,7 @@ __all__ = [
"website",
"workflow",
"workflow_app_log",
"workflow_comment",
"workflow_draft_variable",
"workflow_run",
"workflow_statistic",

View File

@ -32,8 +32,10 @@ from core.trigger.debug.event_selectors import (
from core.workflow.enums import NodeType
from core.workflow.graph_engine.manager import GraphEngineManager
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from factories import file_factory, variable_factory
from fields.member_fields import simple_account_fields
from fields.online_user_fields import online_user_list_fields
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
from fields.workflow_run_fields import workflow_run_node_execution_fields
from libs import helper
@ -43,6 +45,7 @@ from libs.login import current_account_with_tenant, login_required
from models import App
from models.model import AppMode
from models.workflow import Workflow
from repositories.workflow_collaboration_repository import WORKFLOW_ONLINE_USERS_PREFIX
from services.app_generate_service import AppGenerateService
from services.errors.app import WorkflowHashNotEqualError
from services.errors.llm import InvokeRateLimitError
@ -180,6 +183,14 @@ class WorkflowUpdatePayload(BaseModel):
marked_comment: str | None = Field(default=None, max_length=100)
class WorkflowFeaturesPayload(BaseModel):
features: dict[str, Any] = Field(..., description="Workflow feature configuration")
class WorkflowOnlineUsersQuery(BaseModel):
workflow_ids: str = Field(..., description="Comma-separated workflow IDs")
class DraftWorkflowTriggerRunPayload(BaseModel):
node_id: str
@ -203,6 +214,8 @@ reg(DefaultBlockConfigQuery)
reg(ConvertToWorkflowPayload)
reg(WorkflowListQuery)
reg(WorkflowUpdatePayload)
reg(WorkflowFeaturesPayload)
reg(WorkflowOnlineUsersQuery)
reg(DraftWorkflowTriggerRunPayload)
reg(DraftWorkflowTriggerRunAllPayload)
@ -791,6 +804,31 @@ class ConvertToWorkflowApi(Resource):
}
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/features")
class WorkflowFeaturesApi(Resource):
"""Update draft workflow features."""
@console_ns.expect(console_ns.models[WorkflowFeaturesPayload.__name__])
@console_ns.doc("update_workflow_features")
@console_ns.doc(description="Update draft workflow features")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Workflow features updated successfully")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
def post(self, app_model: App):
current_user, _ = current_account_with_tenant()
args = WorkflowFeaturesPayload.model_validate(console_ns.payload or {})
features = args.features
workflow_service = WorkflowService()
workflow_service.update_draft_workflow_features(app_model=app_model, features=features, account=current_user)
return {"result": "success"}
@console_ns.route("/apps/<uuid:app_id>/workflows")
class PublishedAllWorkflowApi(Resource):
@console_ns.expect(console_ns.models[WorkflowListQuery.__name__])
@ -1166,3 +1204,32 @@ class DraftWorkflowTriggerRunAllApi(Resource):
"status": "error",
}
), 400
@console_ns.route("/apps/workflows/online-users")
class WorkflowOnlineUsersApi(Resource):
@console_ns.expect(console_ns.models[WorkflowOnlineUsersQuery.__name__])
@console_ns.doc("get_workflow_online_users")
@console_ns.doc(description="Get workflow online users")
@setup_required
@login_required
@account_initialization_required
@marshal_with(online_user_list_fields)
def get(self):
args = WorkflowOnlineUsersQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
workflow_ids = [workflow_id.strip() for workflow_id in args.workflow_ids.split(",") if workflow_id.strip()]
results = []
for workflow_id in workflow_ids:
users_json = redis_client.hgetall(f"{WORKFLOW_ONLINE_USERS_PREFIX}{workflow_id}")
users = []
for _, user_info_json in users_json.items():
try:
users.append(json.loads(user_info_json))
except Exception:
continue
results.append({"workflow_id": workflow_id, "users": users})
return {"data": results}

View File

@ -0,0 +1,317 @@
import logging
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from fields.member_fields import account_with_role_fields
from fields.workflow_comment_fields import (
workflow_comment_basic_fields,
workflow_comment_create_fields,
workflow_comment_detail_fields,
workflow_comment_reply_create_fields,
workflow_comment_reply_update_fields,
workflow_comment_resolve_fields,
workflow_comment_update_fields,
)
from libs.login import current_user, login_required
from models import App
from services.account_service import TenantService
from services.workflow_comment_service import WorkflowCommentService
logger = logging.getLogger(__name__)
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
class WorkflowCommentCreatePayload(BaseModel):
position_x: float = Field(..., description="Comment X position")
position_y: float = Field(..., description="Comment Y position")
content: str = Field(..., description="Comment content")
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
class WorkflowCommentUpdatePayload(BaseModel):
content: str = Field(..., description="Comment content")
position_x: float | None = Field(default=None, description="Comment X position")
position_y: float | None = Field(default=None, description="Comment Y position")
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
class WorkflowCommentReplyCreatePayload(BaseModel):
content: str = Field(..., description="Reply content")
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
class WorkflowCommentReplyUpdatePayload(BaseModel):
content: str = Field(..., description="Reply content")
mentioned_user_ids: list[str] = Field(default_factory=list, description="Mentioned user IDs")
for model in (
WorkflowCommentCreatePayload,
WorkflowCommentUpdatePayload,
WorkflowCommentReplyCreatePayload,
WorkflowCommentReplyUpdatePayload,
):
console_ns.schema_model(model.__name__, model.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
workflow_comment_basic_model = console_ns.model("WorkflowCommentBasic", workflow_comment_basic_fields)
workflow_comment_detail_model = console_ns.model("WorkflowCommentDetail", workflow_comment_detail_fields)
workflow_comment_create_model = console_ns.model("WorkflowCommentCreate", workflow_comment_create_fields)
workflow_comment_update_model = console_ns.model("WorkflowCommentUpdate", workflow_comment_update_fields)
workflow_comment_resolve_model = console_ns.model("WorkflowCommentResolve", workflow_comment_resolve_fields)
workflow_comment_reply_create_model = console_ns.model(
"WorkflowCommentReplyCreate", workflow_comment_reply_create_fields
)
workflow_comment_reply_update_model = console_ns.model(
"WorkflowCommentReplyUpdate", workflow_comment_reply_update_fields
)
workflow_comment_mention_users_model = console_ns.model(
"WorkflowCommentMentionUsers",
{"users": fields.List(fields.Nested(account_with_role_fields))},
)
@console_ns.route("/apps/<uuid:app_id>/workflow/comments")
class WorkflowCommentListApi(Resource):
"""API for listing and creating workflow comments."""
@console_ns.doc("list_workflow_comments")
@console_ns.doc(description="Get all comments for a workflow")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Comments retrieved successfully", workflow_comment_basic_model)
@login_required
@setup_required
@account_initialization_required
@get_app_model()
@marshal_with(workflow_comment_basic_model, envelope="data")
def get(self, app_model: App):
"""Get all comments for a workflow."""
comments = WorkflowCommentService.get_comments(tenant_id=current_user.current_tenant_id, app_id=app_model.id)
return comments
@console_ns.doc("create_workflow_comment")
@console_ns.doc(description="Create a new workflow comment")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(console_ns.models[WorkflowCommentCreatePayload.__name__])
@console_ns.response(201, "Comment created successfully", workflow_comment_create_model)
@login_required
@setup_required
@account_initialization_required
@get_app_model()
@marshal_with(workflow_comment_create_model)
def post(self, app_model: App):
"""Create a new workflow comment."""
payload = WorkflowCommentCreatePayload.model_validate(console_ns.payload or {})
result = WorkflowCommentService.create_comment(
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
created_by=current_user.id,
content=payload.content,
position_x=payload.position_x,
position_y=payload.position_y,
mentioned_user_ids=payload.mentioned_user_ids,
)
return result, 201
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>")
class WorkflowCommentDetailApi(Resource):
"""API for managing individual workflow comments."""
@console_ns.doc("get_workflow_comment")
@console_ns.doc(description="Get a specific workflow comment")
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
@console_ns.response(200, "Comment retrieved successfully", workflow_comment_detail_model)
@login_required
@setup_required
@account_initialization_required
@get_app_model()
@marshal_with(workflow_comment_detail_model)
def get(self, app_model: App, comment_id: str):
"""Get a specific workflow comment."""
comment = WorkflowCommentService.get_comment(
tenant_id=current_user.current_tenant_id, app_id=app_model.id, comment_id=comment_id
)
return comment
@console_ns.doc("update_workflow_comment")
@console_ns.doc(description="Update a workflow comment")
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
@console_ns.expect(console_ns.models[WorkflowCommentUpdatePayload.__name__])
@console_ns.response(200, "Comment updated successfully", workflow_comment_update_model)
@login_required
@setup_required
@account_initialization_required
@get_app_model()
@marshal_with(workflow_comment_update_model)
def put(self, app_model: App, comment_id: str):
"""Update a workflow comment."""
payload = WorkflowCommentUpdatePayload.model_validate(console_ns.payload or {})
result = WorkflowCommentService.update_comment(
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
comment_id=comment_id,
user_id=current_user.id,
content=payload.content,
position_x=payload.position_x,
position_y=payload.position_y,
mentioned_user_ids=payload.mentioned_user_ids,
)
return result
@console_ns.doc("delete_workflow_comment")
@console_ns.doc(description="Delete a workflow comment")
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
@console_ns.response(204, "Comment deleted successfully")
@login_required
@setup_required
@account_initialization_required
@get_app_model()
def delete(self, app_model: App, comment_id: str):
"""Delete a workflow comment."""
WorkflowCommentService.delete_comment(
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
comment_id=comment_id,
user_id=current_user.id,
)
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/resolve")
class WorkflowCommentResolveApi(Resource):
"""API for resolving and reopening workflow comments."""
@console_ns.doc("resolve_workflow_comment")
@console_ns.doc(description="Resolve a workflow comment")
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
@console_ns.response(200, "Comment resolved successfully", workflow_comment_resolve_model)
@login_required
@setup_required
@account_initialization_required
@get_app_model()
@marshal_with(workflow_comment_resolve_model)
def post(self, app_model: App, comment_id: str):
"""Resolve a workflow comment."""
comment = WorkflowCommentService.resolve_comment(
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
comment_id=comment_id,
user_id=current_user.id,
)
return comment
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies")
class WorkflowCommentReplyApi(Resource):
"""API for managing comment replies."""
@console_ns.doc("create_workflow_comment_reply")
@console_ns.doc(description="Add a reply to a workflow comment")
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID"})
@console_ns.expect(console_ns.models[WorkflowCommentReplyCreatePayload.__name__])
@console_ns.response(201, "Reply created successfully", workflow_comment_reply_create_model)
@login_required
@setup_required
@account_initialization_required
@get_app_model()
@marshal_with(workflow_comment_reply_create_model)
def post(self, app_model: App, comment_id: str):
"""Add a reply to a workflow comment."""
# Validate comment access first
WorkflowCommentService.validate_comment_access(
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
)
payload = WorkflowCommentReplyCreatePayload.model_validate(console_ns.payload or {})
result = WorkflowCommentService.create_reply(
comment_id=comment_id,
content=payload.content,
created_by=current_user.id,
mentioned_user_ids=payload.mentioned_user_ids,
)
return result, 201
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies/<string:reply_id>")
class WorkflowCommentReplyDetailApi(Resource):
"""API for managing individual comment replies."""
@console_ns.doc("update_workflow_comment_reply")
@console_ns.doc(description="Update a comment reply")
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
@console_ns.expect(console_ns.models[WorkflowCommentReplyUpdatePayload.__name__])
@console_ns.response(200, "Reply updated successfully", workflow_comment_reply_update_model)
@login_required
@setup_required
@account_initialization_required
@get_app_model()
@marshal_with(workflow_comment_reply_update_model)
def put(self, app_model: App, comment_id: str, reply_id: str):
"""Update a comment reply."""
# Validate comment access first
WorkflowCommentService.validate_comment_access(
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
)
payload = WorkflowCommentReplyUpdatePayload.model_validate(console_ns.payload or {})
reply = WorkflowCommentService.update_reply(
reply_id=reply_id,
user_id=current_user.id,
content=payload.content,
mentioned_user_ids=payload.mentioned_user_ids,
)
return reply
@console_ns.doc("delete_workflow_comment_reply")
@console_ns.doc(description="Delete a comment reply")
@console_ns.doc(params={"app_id": "Application ID", "comment_id": "Comment ID", "reply_id": "Reply ID"})
@console_ns.response(204, "Reply deleted successfully")
@login_required
@setup_required
@account_initialization_required
@get_app_model()
def delete(self, app_model: App, comment_id: str, reply_id: str):
"""Delete a comment reply."""
# Validate comment access first
WorkflowCommentService.validate_comment_access(
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
)
WorkflowCommentService.delete_reply(reply_id=reply_id, user_id=current_user.id)
return {"result": "success"}, 204
@console_ns.route("/apps/<uuid:app_id>/workflow/comments/mention-users")
class WorkflowCommentMentionUsersApi(Resource):
"""API for getting mentionable users for workflow comments."""
@console_ns.doc("workflow_comment_mention_users")
@console_ns.doc(description="Get all users in current tenant for mentions")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Mentionable users retrieved successfully", workflow_comment_mention_users_model)
@login_required
@setup_required
@account_initialization_required
@get_app_model()
@marshal_with(workflow_comment_mention_users_model)
def get(self, app_model: App):
"""Get all users in current tenant for mentions."""
members = TenantService.get_tenant_members(current_user.current_tenant)
return {"users": members}

View File

@ -21,9 +21,9 @@ from core.variables.segments import ArrayFileSegment, FileSegment, Segment
from core.variables.types import SegmentType
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from extensions.ext_database import db
from factories import variable_factory
from factories.file_factory import build_from_mapping, build_from_mappings
from factories.variable_factory import build_segment_with_type
from libs.login import login_required
from libs.login import current_user, login_required
from models import App, AppMode
from models.workflow import WorkflowDraftVariable
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
@ -43,6 +43,16 @@ class WorkflowDraftVariableUpdatePayload(BaseModel):
value: Any | None = Field(default=None, description="Variable value")
class ConversationVariableUpdatePayload(BaseModel):
conversation_variables: list[dict[str, Any]] = Field(
..., description="Conversation variables for the draft workflow"
)
class EnvironmentVariableUpdatePayload(BaseModel):
environment_variables: list[dict[str, Any]] = Field(..., description="Environment variables for the draft workflow")
console_ns.schema_model(
WorkflowDraftVariableListQuery.__name__,
WorkflowDraftVariableListQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
@ -51,6 +61,14 @@ console_ns.schema_model(
WorkflowDraftVariableUpdatePayload.__name__,
WorkflowDraftVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
ConversationVariableUpdatePayload.__name__,
ConversationVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
EnvironmentVariableUpdatePayload.__name__,
EnvironmentVariableUpdatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
def _convert_values_to_json_serializable_object(value: Segment):
@ -383,7 +401,7 @@ class VariableApi(Resource):
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
raw_value = build_from_mappings(mappings=raw_value, tenant_id=app_model.tenant_id)
new_value = build_segment_with_type(variable.value_type, raw_value)
new_value = variable_factory.build_segment_with_type(variable.value_type, raw_value)
draft_var_srv.update_variable(variable, name=new_name, value=new_value)
db.session.commit()
return variable
@ -476,6 +494,34 @@ class ConversationVariableCollectionApi(Resource):
db.session.commit()
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
@console_ns.expect(console_ns.models[ConversationVariableUpdatePayload.__name__])
@console_ns.doc("update_conversation_variables")
@console_ns.doc(description="Update conversation variables for workflow draft")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Conversation variables updated successfully")
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_app_model(mode=AppMode.ADVANCED_CHAT)
def post(self, app_model: App):
payload = ConversationVariableUpdatePayload.model_validate(console_ns.payload or {})
workflow_service = WorkflowService()
conversation_variables_list = payload.conversation_variables
conversation_variables = [
variable_factory.build_conversation_variable_from_mapping(obj) for obj in conversation_variables_list
]
workflow_service.update_draft_workflow_conversation_variables(
app_model=app_model,
account=current_user,
conversation_variables=conversation_variables,
)
return {"result": "success"}
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
class SystemVariableCollectionApi(Resource):
@ -527,3 +573,31 @@ class EnvironmentVariableCollectionApi(Resource):
)
return {"items": env_vars_list}
@console_ns.expect(console_ns.models[EnvironmentVariableUpdatePayload.__name__])
@console_ns.doc("update_environment_variables")
@console_ns.doc(description="Update environment variables for workflow draft")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Environment variables updated successfully")
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
def post(self, app_model: App):
payload = EnvironmentVariableUpdatePayload.model_validate(console_ns.payload or {})
workflow_service = WorkflowService()
environment_variables_list = payload.environment_variables
environment_variables = [
variable_factory.build_environment_variable_from_mapping(obj) for obj in environment_variables_list
]
workflow_service.update_draft_workflow_environment_variables(
app_model=app_model,
account=current_user,
environment_variables=environment_variables,
)
return {"result": "success"}

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,108 @@
import logging
from collections.abc import Callable
from typing import cast
from flask import Request as FlaskRequest
from extensions.ext_socketio import sio
from libs.passport import PassportService
from libs.token import extract_access_token
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
from services.account_service import AccountService
from services.workflow_collaboration_service import WorkflowCollaborationService
repository = WorkflowCollaborationRepository()
collaboration_service = WorkflowCollaborationService(repository, sio)
def _sio_on(event: str) -> Callable[[Callable[..., object]], Callable[..., object]]:
return cast(Callable[[Callable[..., object]], Callable[..., object]], sio.on(event))
@_sio_on("connect")
def socket_connect(sid, environ, auth):
"""
WebSocket connect event, do authentication here.
"""
try:
request_environ = FlaskRequest(environ)
token = extract_access_token(request_environ)
except Exception:
logging.exception("Failed to extract token")
token = None
if not token:
logging.warning("Socket connect rejected: missing token (sid=%s)", sid)
return False
try:
decoded = PassportService().verify(token)
user_id = decoded.get("user_id")
if not user_id:
logging.warning("Socket connect rejected: missing user_id (sid=%s)", sid)
return False
with sio.app.app_context():
user = AccountService.load_logged_in_account(account_id=user_id)
if not user:
logging.warning("Socket connect rejected: user not found (user_id=%s, sid=%s)", user_id, sid)
return False
if not user.has_edit_permission:
logging.warning("Socket connect rejected: no edit permission (user_id=%s, sid=%s)", user_id, sid)
return False
collaboration_service.save_session(sid, user)
return True
except Exception:
logging.exception("Socket authentication failed")
return False
@_sio_on("user_connect")
def handle_user_connect(sid, data):
"""
Handle user connect event. Each session (tab) is treated as an independent collaborator.
"""
workflow_id = data.get("workflow_id")
if not workflow_id:
return {"msg": "workflow_id is required"}, 400
result = collaboration_service.register_session(workflow_id, sid)
if not result:
return {"msg": "unauthorized"}, 401
user_id, is_leader = result
return {"msg": "connected", "user_id": user_id, "sid": sid, "isLeader": is_leader}
@_sio_on("disconnect")
def handle_disconnect(sid):
"""
Handle session disconnect event. Remove the specific session from online users.
"""
collaboration_service.disconnect_session(sid)
@_sio_on("collaboration_event")
def handle_collaboration_event(sid, data):
"""
Handle general collaboration events, include:
1. mouse_move
2. vars_and_features_update
3. sync_request (ask leader to update graph)
4. app_state_update
5. mcp_server_update
6. workflow_update
7. comments_update
8. node_panel_presence
"""
return collaboration_service.relay_collaboration_event(sid, data)
@_sio_on("graph_event")
def handle_graph_event(sid, data):
"""
Handle graph events - simple broadcast relay.
"""
return collaboration_service.relay_graph_event(sid, data)

View File

@ -36,6 +36,7 @@ from controllers.console.wraps import (
only_edition_cloud,
setup_required,
)
from core.file import helpers as file_helpers
from extensions.ext_database import db
from fields.member_fields import account_fields
from libs.datetime_utils import naive_utc_now
@ -73,6 +74,10 @@ class AccountAvatarPayload(BaseModel):
avatar: str
class AccountAvatarQuery(BaseModel):
avatar: str = Field(..., description="Avatar file ID")
class AccountInterfaceLanguagePayload(BaseModel):
interface_language: str
@ -158,6 +163,7 @@ def reg(cls: type[BaseModel]):
reg(AccountInitPayload)
reg(AccountNamePayload)
reg(AccountAvatarPayload)
reg(AccountAvatarQuery)
reg(AccountInterfaceLanguagePayload)
reg(AccountInterfaceThemePayload)
reg(AccountTimezonePayload)
@ -248,6 +254,18 @@ class AccountNameApi(Resource):
@console_ns.route("/account/avatar")
class AccountAvatarApi(Resource):
@console_ns.expect(console_ns.models[AccountAvatarQuery.__name__])
@console_ns.doc("get_account_avatar")
@console_ns.doc(description="Get account avatar url")
@setup_required
@login_required
@account_initialization_required
def get(self):
args = AccountAvatarQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore
avatar_url = file_helpers.get_signed_file_url(args.avatar)
return {"avatar_url": avatar_url}
@console_ns.expect(console_ns.models[AccountAvatarPayload.__name__])
@setup_required
@login_required

View File

@ -119,14 +119,16 @@ elif [[ "${MODE}" == "job" ]]; then
else
if [[ "${DEBUG}" == "true" ]]; then
exec flask run --host=${DIFY_BIND_ADDRESS:-0.0.0.0} --port=${DIFY_PORT:-5001} --debug
export HOST=${DIFY_BIND_ADDRESS:-0.0.0.0}
export PORT=${DIFY_PORT:-5001}
exec python -m app
else
exec gunicorn \
--bind "${DIFY_BIND_ADDRESS:-0.0.0.0}:${DIFY_PORT:-5001}" \
--workers ${SERVER_WORKER_AMOUNT:-1} \
--worker-class ${SERVER_WORKER_CLASS:-gevent} \
--worker-class ${SERVER_WORKER_CLASS:-geventwebsocket.gunicorn.workers.GeventWebSocketWorker} \
--worker-connections ${SERVER_WORKER_CONNECTIONS:-10} \
--timeout ${GUNICORN_TIMEOUT:-200} \
app:app
app:socketio_app
fi
fi

View File

@ -0,0 +1,5 @@
import socketio # type: ignore[reportMissingTypeStubs]
from configs import dify_config
sio = socketio.Server(async_mode="gevent", cors_allowed_origins=dify_config.CONSOLE_CORS_ALLOW_ORIGINS)

View File

@ -0,0 +1,17 @@
from flask_restx import fields
online_user_partial_fields = {
"user_id": fields.String,
"username": fields.String,
"avatar": fields.String,
"sid": fields.String,
}
workflow_online_users_fields = {
"workflow_id": fields.String,
"users": fields.List(fields.Nested(online_user_partial_fields)),
}
online_user_list_fields = {
"data": fields.List(fields.Nested(workflow_online_users_fields)),
}

View File

@ -0,0 +1,96 @@
from flask_restx import fields
from libs.helper import AvatarUrlField, TimestampField
# basic account fields for comments
account_fields = {
"id": fields.String,
"name": fields.String,
"email": fields.String,
"avatar_url": AvatarUrlField,
}
# Comment mention fields
workflow_comment_mention_fields = {
"mentioned_user_id": fields.String,
"mentioned_user_account": fields.Nested(account_fields, allow_null=True),
"reply_id": fields.String,
}
# Comment reply fields
workflow_comment_reply_fields = {
"id": fields.String,
"content": fields.String,
"created_by": fields.String,
"created_by_account": fields.Nested(account_fields, allow_null=True),
"created_at": TimestampField,
}
# Basic comment fields (for list views)
workflow_comment_basic_fields = {
"id": fields.String,
"position_x": fields.Float,
"position_y": fields.Float,
"content": fields.String,
"created_by": fields.String,
"created_by_account": fields.Nested(account_fields, allow_null=True),
"created_at": TimestampField,
"updated_at": TimestampField,
"resolved": fields.Boolean,
"resolved_at": TimestampField,
"resolved_by": fields.String,
"resolved_by_account": fields.Nested(account_fields, allow_null=True),
"reply_count": fields.Integer,
"mention_count": fields.Integer,
"participants": fields.List(fields.Nested(account_fields)),
}
# Detailed comment fields (for single comment view)
workflow_comment_detail_fields = {
"id": fields.String,
"position_x": fields.Float,
"position_y": fields.Float,
"content": fields.String,
"created_by": fields.String,
"created_by_account": fields.Nested(account_fields, allow_null=True),
"created_at": TimestampField,
"updated_at": TimestampField,
"resolved": fields.Boolean,
"resolved_at": TimestampField,
"resolved_by": fields.String,
"resolved_by_account": fields.Nested(account_fields, allow_null=True),
"replies": fields.List(fields.Nested(workflow_comment_reply_fields)),
"mentions": fields.List(fields.Nested(workflow_comment_mention_fields)),
}
# Comment creation response fields (simplified)
workflow_comment_create_fields = {
"id": fields.String,
"created_at": TimestampField,
}
# Comment update response fields (simplified)
workflow_comment_update_fields = {
"id": fields.String,
"updated_at": TimestampField,
}
# Comment resolve response fields
workflow_comment_resolve_fields = {
"id": fields.String,
"resolved": fields.Boolean,
"resolved_at": TimestampField,
"resolved_by": fields.String,
}
# Reply creation response fields (simplified)
workflow_comment_reply_create_fields = {
"id": fields.String,
"created_at": TimestampField,
}
# Reply update response fields
workflow_comment_reply_update_fields = {
"id": fields.String,
"updated_at": TimestampField,
}

View File

@ -0,0 +1,90 @@
"""Add workflow comments table
Revision ID: 227822d22895
Revises: 9d77545f524e
Create Date: 2025-08-22 17:26:15.255980
"""
from alembic import op
import models as models
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '227822d22895'
down_revision = '9d77545f524e'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('workflow_comments',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('tenant_id', models.types.StringUUID(), nullable=False),
sa.Column('app_id', models.types.StringUUID(), nullable=False),
sa.Column('position_x', sa.Float(), nullable=False),
sa.Column('position_y', sa.Float(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('resolved', sa.Boolean(), server_default=sa.text('false'), nullable=False),
sa.Column('resolved_at', sa.DateTime(), nullable=True),
sa.Column('resolved_by', models.types.StringUUID(), nullable=True),
sa.PrimaryKeyConstraint('id', name='workflow_comments_pkey')
)
with op.batch_alter_table('workflow_comments', schema=None) as batch_op:
batch_op.create_index('workflow_comments_app_idx', ['tenant_id', 'app_id'], unique=False)
batch_op.create_index('workflow_comments_created_at_idx', ['created_at'], unique=False)
op.create_table('workflow_comment_replies',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('comment_id', models.types.StringUUID(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('created_by', models.types.StringUUID(), nullable=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.ForeignKeyConstraint(['comment_id'], ['workflow_comments.id'], name=op.f('workflow_comment_replies_comment_id_fkey'), ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='workflow_comment_replies_pkey')
)
with op.batch_alter_table('workflow_comment_replies', schema=None) as batch_op:
batch_op.create_index('comment_replies_comment_idx', ['comment_id'], unique=False)
batch_op.create_index('comment_replies_created_at_idx', ['created_at'], unique=False)
op.create_table('workflow_comment_mentions',
sa.Column('id', models.types.StringUUID(), server_default=sa.text('uuidv7()'), nullable=False),
sa.Column('comment_id', models.types.StringUUID(), nullable=False),
sa.Column('reply_id', models.types.StringUUID(), nullable=True),
sa.Column('mentioned_user_id', models.types.StringUUID(), nullable=False),
sa.ForeignKeyConstraint(['comment_id'], ['workflow_comments.id'], name=op.f('workflow_comment_mentions_comment_id_fkey'), ondelete='CASCADE'),
sa.ForeignKeyConstraint(['reply_id'], ['workflow_comment_replies.id'], name=op.f('workflow_comment_mentions_reply_id_fkey'), ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='workflow_comment_mentions_pkey')
)
with op.batch_alter_table('workflow_comment_mentions', schema=None) as batch_op:
batch_op.create_index('comment_mentions_comment_idx', ['comment_id'], unique=False)
batch_op.create_index('comment_mentions_reply_idx', ['reply_id'], unique=False)
batch_op.create_index('comment_mentions_user_idx', ['mentioned_user_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('workflow_comment_mentions', schema=None) as batch_op:
batch_op.drop_index('comment_mentions_user_idx')
batch_op.drop_index('comment_mentions_reply_idx')
batch_op.drop_index('comment_mentions_comment_idx')
op.drop_table('workflow_comment_mentions')
with op.batch_alter_table('workflow_comment_replies', schema=None) as batch_op:
batch_op.drop_index('comment_replies_created_at_idx')
batch_op.drop_index('comment_replies_comment_idx')
op.drop_table('workflow_comment_replies')
with op.batch_alter_table('workflow_comments', schema=None) as batch_op:
batch_op.drop_index('workflow_comments_created_at_idx')
batch_op.drop_index('workflow_comments_app_idx')
op.drop_table('workflow_comments')
# ### end Alembic commands ###

View File

@ -9,6 +9,11 @@ from .account import (
TenantStatus,
)
from .api_based_extension import APIBasedExtension, APIBasedExtensionPoint
from .comment import (
WorkflowComment,
WorkflowCommentMention,
WorkflowCommentReply,
)
from .dataset import (
AppDatasetJoin,
Dataset,
@ -205,6 +210,9 @@ __all__ = [
"WorkflowAppLog",
"WorkflowAppLogCreatedFrom",
"WorkflowArchiveLog",
"WorkflowComment",
"WorkflowCommentMention",
"WorkflowCommentReply",
"WorkflowNodeExecutionModel",
"WorkflowNodeExecutionOffload",
"WorkflowNodeExecutionTriggeredFrom",

210
api/models/comment.py Normal file
View File

@ -0,0 +1,210 @@
"""Workflow comment models."""
from datetime import datetime
from typing import Optional
from sqlalchemy import Index, func
from sqlalchemy.orm import Mapped, mapped_column, relationship
from .account import Account
from .base import Base
from .engine import db
from .types import StringUUID
class WorkflowComment(Base):
"""Workflow comment model for canvas commenting functionality.
Comments are associated with apps rather than specific workflow versions,
since an app has only one draft workflow at a time and comments should persist
across workflow version changes.
Attributes:
id: Comment ID
tenant_id: Workspace ID
app_id: App ID (primary association, comments belong to apps)
position_x: X coordinate on canvas
position_y: Y coordinate on canvas
content: Comment content
created_by: Creator account ID
created_at: Creation time
updated_at: Last update time
resolved: Whether comment is resolved
resolved_at: Resolution time
resolved_by: Resolver account ID
"""
__tablename__ = "workflow_comments"
__table_args__ = (
db.PrimaryKeyConstraint("id", name="workflow_comments_pkey"),
Index("workflow_comments_app_idx", "tenant_id", "app_id"),
Index("workflow_comments_created_at_idx", "created_at"),
)
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
app_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
position_x: Mapped[float] = mapped_column(db.Float)
position_y: Mapped[float] = mapped_column(db.Float)
content: Mapped[str] = mapped_column(db.Text, nullable=False)
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at: Mapped[datetime] = mapped_column(
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
)
resolved: Mapped[bool] = mapped_column(db.Boolean, nullable=False, server_default=db.text("false"))
resolved_at: Mapped[datetime | None] = mapped_column(db.DateTime)
resolved_by: Mapped[str | None] = mapped_column(StringUUID)
# Relationships
replies: Mapped[list["WorkflowCommentReply"]] = relationship(
"WorkflowCommentReply", back_populates="comment", cascade="all, delete-orphan"
)
mentions: Mapped[list["WorkflowCommentMention"]] = relationship(
"WorkflowCommentMention", back_populates="comment", cascade="all, delete-orphan"
)
@property
def created_by_account(self):
"""Get creator account."""
if hasattr(self, "_created_by_account_cache"):
return self._created_by_account_cache
return db.session.get(Account, self.created_by)
def cache_created_by_account(self, account: Account | None) -> None:
"""Cache creator account to avoid extra queries."""
self._created_by_account_cache = account
@property
def resolved_by_account(self):
"""Get resolver account."""
if hasattr(self, "_resolved_by_account_cache"):
return self._resolved_by_account_cache
if self.resolved_by:
return db.session.get(Account, self.resolved_by)
return None
def cache_resolved_by_account(self, account: Account | None) -> None:
"""Cache resolver account to avoid extra queries."""
self._resolved_by_account_cache = account
@property
def reply_count(self):
"""Get reply count."""
return len(self.replies)
@property
def mention_count(self):
"""Get mention count."""
return len(self.mentions)
@property
def participants(self):
"""Get all participants (creator + repliers + mentioned users)."""
participant_ids = set()
# Add comment creator
participant_ids.add(self.created_by)
# Add reply creators
participant_ids.update(reply.created_by for reply in self.replies)
# Add mentioned users
participant_ids.update(mention.mentioned_user_id for mention in self.mentions)
# Get account objects
participants = []
for user_id in participant_ids:
account = db.session.get(Account, user_id)
if account:
participants.append(account)
return participants
class WorkflowCommentReply(Base):
"""Workflow comment reply model.
Attributes:
id: Reply ID
comment_id: Parent comment ID
content: Reply content
created_by: Creator account ID
created_at: Creation time
"""
__tablename__ = "workflow_comment_replies"
__table_args__ = (
db.PrimaryKeyConstraint("id", name="workflow_comment_replies_pkey"),
Index("comment_replies_comment_idx", "comment_id"),
Index("comment_replies_created_at_idx", "created_at"),
)
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
comment_id: Mapped[str] = mapped_column(
StringUUID, db.ForeignKey("workflow_comments.id", ondelete="CASCADE"), nullable=False
)
content: Mapped[str] = mapped_column(db.Text, nullable=False)
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(db.DateTime, nullable=False, server_default=func.current_timestamp())
updated_at: Mapped[datetime] = mapped_column(
db.DateTime, nullable=False, server_default=func.current_timestamp(), onupdate=func.current_timestamp()
)
# Relationships
comment: Mapped["WorkflowComment"] = relationship("WorkflowComment", back_populates="replies")
@property
def created_by_account(self):
"""Get creator account."""
if hasattr(self, "_created_by_account_cache"):
return self._created_by_account_cache
return db.session.get(Account, self.created_by)
def cache_created_by_account(self, account: Account | None) -> None:
"""Cache creator account to avoid extra queries."""
self._created_by_account_cache = account
class WorkflowCommentMention(Base):
"""Workflow comment mention model.
Mentions are only for internal accounts since end users
cannot access workflow canvas and commenting features.
Attributes:
id: Mention ID
comment_id: Parent comment ID
mentioned_user_id: Mentioned account ID
"""
__tablename__ = "workflow_comment_mentions"
__table_args__ = (
db.PrimaryKeyConstraint("id", name="workflow_comment_mentions_pkey"),
Index("comment_mentions_comment_idx", "comment_id"),
Index("comment_mentions_reply_idx", "reply_id"),
Index("comment_mentions_user_idx", "mentioned_user_id"),
)
id: Mapped[str] = mapped_column(StringUUID, server_default=db.text("uuidv7()"))
comment_id: Mapped[str] = mapped_column(
StringUUID, db.ForeignKey("workflow_comments.id", ondelete="CASCADE"), nullable=False
)
reply_id: Mapped[str | None] = mapped_column(
StringUUID, db.ForeignKey("workflow_comment_replies.id", ondelete="CASCADE"), nullable=True
)
mentioned_user_id: Mapped[str] = mapped_column(StringUUID, nullable=False)
# Relationships
comment: Mapped["WorkflowComment"] = relationship("WorkflowComment", back_populates="mentions")
reply: Mapped[Optional["WorkflowCommentReply"]] = relationship("WorkflowCommentReply")
@property
def mentioned_user_account(self):
"""Get mentioned account."""
if hasattr(self, "_mentioned_user_account_cache"):
return self._mentioned_user_account_cache
return db.session.get(Account, self.mentioned_user_id)
def cache_mentioned_user_account(self, account: Account | None) -> None:
"""Cache mentioned account to avoid extra queries."""
self._mentioned_user_account_cache = account

View File

@ -400,7 +400,7 @@ class Workflow(Base): # bug
:return: hash
"""
entity = {"graph": self.graph_dict, "features": self.features_dict}
entity = {"graph": self.graph_dict}
return helper.generate_text_hash(json.dumps(entity, sort_keys=True))

View File

@ -21,6 +21,7 @@ dependencies = [
"flask-orjson~=2.0.0",
"flask-sqlalchemy~=3.1.1",
"gevent~=25.9.1",
"gevent-websocket~=0.10.1",
"gmpy2~=2.2.1",
"google-api-core==2.18.0",
"google-api-python-client==2.90.0",
@ -72,6 +73,7 @@ dependencies = [
"pypdfium2==5.2.0",
"python-docx~=1.1.0",
"python-dotenv==1.0.1",
"python-socketio~=5.13.0",
"pyyaml~=6.0.1",
"readabilipy~=0.3.0",
"redis[hiredis]~=6.1.0",

View File

@ -0,0 +1,147 @@
from __future__ import annotations
import json
from typing import TypedDict
from extensions.ext_redis import redis_client
SESSION_STATE_TTL_SECONDS = 3600
WORKFLOW_ONLINE_USERS_PREFIX = "workflow_online_users:"
WORKFLOW_LEADER_PREFIX = "workflow_leader:"
WS_SID_MAP_PREFIX = "ws_sid_map:"
class WorkflowSessionInfo(TypedDict):
user_id: str
username: str
avatar: str | None
sid: str
connected_at: int
class SidMapping(TypedDict):
workflow_id: str
user_id: str
class WorkflowCollaborationRepository:
def __init__(self) -> None:
self._redis = redis_client
def __repr__(self) -> str:
return f"{self.__class__.__name__}(redis_client={self._redis})"
@staticmethod
def workflow_key(workflow_id: str) -> str:
return f"{WORKFLOW_ONLINE_USERS_PREFIX}{workflow_id}"
@staticmethod
def leader_key(workflow_id: str) -> str:
return f"{WORKFLOW_LEADER_PREFIX}{workflow_id}"
@staticmethod
def sid_key(sid: str) -> str:
return f"{WS_SID_MAP_PREFIX}{sid}"
@staticmethod
def _decode(value: str | bytes | None) -> str | None:
if value is None:
return None
if isinstance(value, bytes):
return value.decode("utf-8")
return value
def refresh_session_state(self, workflow_id: str, sid: str) -> None:
workflow_key = self.workflow_key(workflow_id)
sid_key = self.sid_key(sid)
if self._redis.exists(workflow_key):
self._redis.expire(workflow_key, SESSION_STATE_TTL_SECONDS)
if self._redis.exists(sid_key):
self._redis.expire(sid_key, SESSION_STATE_TTL_SECONDS)
def set_session_info(self, workflow_id: str, session_info: WorkflowSessionInfo) -> None:
workflow_key = self.workflow_key(workflow_id)
self._redis.hset(workflow_key, session_info["sid"], json.dumps(session_info))
self._redis.set(
self.sid_key(session_info["sid"]),
json.dumps({"workflow_id": workflow_id, "user_id": session_info["user_id"]}),
ex=SESSION_STATE_TTL_SECONDS,
)
self.refresh_session_state(workflow_id, session_info["sid"])
def get_sid_mapping(self, sid: str) -> SidMapping | None:
raw = self._redis.get(self.sid_key(sid))
if not raw:
return None
value = self._decode(raw)
if not value:
return None
try:
return json.loads(value)
except (TypeError, json.JSONDecodeError):
return None
def delete_session(self, workflow_id: str, sid: str) -> None:
self._redis.hdel(self.workflow_key(workflow_id), sid)
self._redis.delete(self.sid_key(sid))
def session_exists(self, workflow_id: str, sid: str) -> bool:
return bool(self._redis.hexists(self.workflow_key(workflow_id), sid))
def sid_mapping_exists(self, sid: str) -> bool:
return bool(self._redis.exists(self.sid_key(sid)))
def get_session_sids(self, workflow_id: str) -> list[str]:
raw_sids = self._redis.hkeys(self.workflow_key(workflow_id))
decoded_sids: list[str] = []
for sid in raw_sids:
decoded = self._decode(sid)
if decoded:
decoded_sids.append(decoded)
return decoded_sids
def list_sessions(self, workflow_id: str) -> list[WorkflowSessionInfo]:
sessions_json = self._redis.hgetall(self.workflow_key(workflow_id))
users: list[WorkflowSessionInfo] = []
for session_info_json in sessions_json.values():
value = self._decode(session_info_json)
if not value:
continue
try:
session_info = json.loads(value)
except (TypeError, json.JSONDecodeError):
continue
if not isinstance(session_info, dict):
continue
if "user_id" not in session_info or "username" not in session_info or "sid" not in session_info:
continue
users.append(
{
"user_id": str(session_info["user_id"]),
"username": str(session_info["username"]),
"avatar": session_info.get("avatar"),
"sid": str(session_info["sid"]),
"connected_at": int(session_info.get("connected_at") or 0),
}
)
return users
def get_current_leader(self, workflow_id: str) -> str | None:
raw = self._redis.get(self.leader_key(workflow_id))
return self._decode(raw)
def set_leader_if_absent(self, workflow_id: str, sid: str) -> bool:
return bool(self._redis.set(self.leader_key(workflow_id), sid, nx=True, ex=SESSION_STATE_TTL_SECONDS))
def set_leader(self, workflow_id: str, sid: str) -> None:
self._redis.set(self.leader_key(workflow_id), sid, ex=SESSION_STATE_TTL_SECONDS)
def delete_leader(self, workflow_id: str) -> None:
self._redis.delete(self.leader_key(workflow_id))
def expire_leader(self, workflow_id: str) -> None:
self._redis.expire(self.leader_key(workflow_id), SESSION_STATE_TTL_SECONDS)

View File

@ -161,6 +161,7 @@ class SystemFeatureModel(BaseModel):
enable_email_code_login: bool = False
enable_email_password_login: bool = True
enable_social_oauth_login: bool = False
enable_collaboration_mode: bool = False
is_allow_register: bool = False
is_allow_create_workspace: bool = False
is_email_setup: bool = False
@ -224,6 +225,7 @@ class FeatureService:
system_features.enable_email_code_login = dify_config.ENABLE_EMAIL_CODE_LOGIN
system_features.enable_email_password_login = dify_config.ENABLE_EMAIL_PASSWORD_LOGIN
system_features.enable_social_oauth_login = dify_config.ENABLE_SOCIAL_OAUTH_LOGIN
system_features.enable_collaboration_mode = dify_config.ENABLE_COLLABORATION_MODE
system_features.is_allow_register = dify_config.ALLOW_REGISTER
system_features.is_allow_create_workspace = dify_config.ALLOW_CREATE_WORKSPACE
system_features.is_email_setup = dify_config.MAIL_TYPE is not None and dify_config.MAIL_TYPE != ""

View File

@ -0,0 +1,196 @@
from __future__ import annotations
import logging
import time
from collections.abc import Mapping
from models.account import Account
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository, WorkflowSessionInfo
class WorkflowCollaborationService:
def __init__(self, repository: WorkflowCollaborationRepository, socketio) -> None:
self._repository = repository
self._socketio = socketio
def __repr__(self) -> str:
return f"{self.__class__.__name__}(repository={self._repository})"
def save_session(self, sid: str, user: Account) -> None:
self._socketio.save_session(
sid,
{
"user_id": user.id,
"username": user.name,
"avatar": user.avatar,
},
)
def register_session(self, workflow_id: str, sid: str) -> tuple[str, bool] | None:
session = self._socketio.get_session(sid)
user_id = session.get("user_id")
if not user_id:
return None
session_info: WorkflowSessionInfo = {
"user_id": str(user_id),
"username": str(session.get("username", "Unknown")),
"avatar": session.get("avatar"),
"sid": sid,
"connected_at": int(time.time()),
}
self._repository.set_session_info(workflow_id, session_info)
leader_sid = self.get_or_set_leader(workflow_id, sid)
is_leader = leader_sid == sid
self._socketio.enter_room(sid, workflow_id)
self.broadcast_online_users(workflow_id)
self._socketio.emit("status", {"isLeader": is_leader}, room=sid)
return str(user_id), is_leader
def disconnect_session(self, sid: str) -> None:
mapping = self._repository.get_sid_mapping(sid)
if not mapping:
return
workflow_id = mapping["workflow_id"]
self._repository.delete_session(workflow_id, sid)
self.handle_leader_disconnect(workflow_id, sid)
self.broadcast_online_users(workflow_id)
def relay_collaboration_event(self, sid: str, data: Mapping[str, object]) -> tuple[dict[str, str], int]:
mapping = self._repository.get_sid_mapping(sid)
if not mapping:
return {"msg": "unauthorized"}, 401
workflow_id = mapping["workflow_id"]
user_id = mapping["user_id"]
self.refresh_session_state(workflow_id, sid)
event_type = data.get("type")
event_data = data.get("data")
timestamp = data.get("timestamp", int(time.time()))
if not event_type:
return {"msg": "invalid event type"}, 400
self._socketio.emit(
"collaboration_update",
{"type": event_type, "userId": user_id, "data": event_data, "timestamp": timestamp},
room=workflow_id,
skip_sid=sid,
)
return {"msg": "event_broadcasted"}, 200
def relay_graph_event(self, sid: str, data: object) -> tuple[dict[str, str], int]:
mapping = self._repository.get_sid_mapping(sid)
if not mapping:
return {"msg": "unauthorized"}, 401
workflow_id = mapping["workflow_id"]
self.refresh_session_state(workflow_id, sid)
self._socketio.emit("graph_update", data, room=workflow_id, skip_sid=sid)
return {"msg": "graph_update_broadcasted"}, 200
def get_or_set_leader(self, workflow_id: str, sid: str) -> str:
current_leader = self._repository.get_current_leader(workflow_id)
if current_leader:
if self.is_session_active(workflow_id, current_leader):
return current_leader
self._repository.delete_session(workflow_id, current_leader)
self._repository.delete_leader(workflow_id)
was_set = self._repository.set_leader_if_absent(workflow_id, sid)
if was_set:
if current_leader:
self.broadcast_leader_change(workflow_id, sid)
return sid
current_leader = self._repository.get_current_leader(workflow_id)
if current_leader:
return current_leader
return sid
def handle_leader_disconnect(self, workflow_id: str, disconnected_sid: str) -> None:
current_leader = self._repository.get_current_leader(workflow_id)
if not current_leader:
return
if current_leader != disconnected_sid:
return
session_sids = self._repository.get_session_sids(workflow_id)
if session_sids:
new_leader_sid = session_sids[0]
self._repository.set_leader(workflow_id, new_leader_sid)
self.broadcast_leader_change(workflow_id, new_leader_sid)
else:
self._repository.delete_leader(workflow_id)
def broadcast_leader_change(self, workflow_id: str, new_leader_sid: str) -> None:
for sid in self._repository.get_session_sids(workflow_id):
try:
is_leader = sid == new_leader_sid
self._socketio.emit("status", {"isLeader": is_leader}, room=sid)
except Exception:
logging.exception("Failed to emit leader status to session %s", sid)
def get_current_leader(self, workflow_id: str) -> str | None:
return self._repository.get_current_leader(workflow_id)
def broadcast_online_users(self, workflow_id: str) -> None:
users = self._repository.list_sessions(workflow_id)
users.sort(key=lambda x: x.get("connected_at") or 0)
leader_sid = self.get_current_leader(workflow_id)
self._socketio.emit(
"online_users",
{"workflow_id": workflow_id, "users": users, "leader": leader_sid},
room=workflow_id,
)
def refresh_session_state(self, workflow_id: str, sid: str) -> None:
self._repository.refresh_session_state(workflow_id, sid)
self._ensure_leader(workflow_id, sid)
def _ensure_leader(self, workflow_id: str, sid: str) -> None:
current_leader = self._repository.get_current_leader(workflow_id)
if current_leader and self.is_session_active(workflow_id, current_leader):
self._repository.expire_leader(workflow_id)
return
if current_leader:
self._repository.delete_leader(workflow_id)
self._repository.set_leader(workflow_id, sid)
self.broadcast_leader_change(workflow_id, sid)
def is_session_active(self, workflow_id: str, sid: str) -> bool:
if not sid:
return False
try:
if not self._socketio.manager.is_connected(sid, "/"):
return False
except AttributeError:
return False
if not self._repository.session_exists(workflow_id, sid):
return False
if not self._repository.sid_mapping_exists(sid):
return False
return True

View File

@ -0,0 +1,345 @@
import logging
from collections.abc import Sequence
from sqlalchemy import desc, select
from sqlalchemy.orm import Session, selectinload
from werkzeug.exceptions import Forbidden, NotFound
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import uuid_value
from models import WorkflowComment, WorkflowCommentMention, WorkflowCommentReply
from models.account import Account
logger = logging.getLogger(__name__)
class WorkflowCommentService:
"""Service for managing workflow comments."""
@staticmethod
def _validate_content(content: str) -> None:
if len(content.strip()) == 0:
raise ValueError("Comment content cannot be empty")
if len(content) > 1000:
raise ValueError("Comment content cannot exceed 1000 characters")
@staticmethod
def get_comments(tenant_id: str, app_id: str) -> Sequence[WorkflowComment]:
"""Get all comments for a workflow."""
with Session(db.engine) as session:
# Get all comments with eager loading
stmt = (
select(WorkflowComment)
.options(selectinload(WorkflowComment.replies), selectinload(WorkflowComment.mentions))
.where(WorkflowComment.tenant_id == tenant_id, WorkflowComment.app_id == app_id)
.order_by(desc(WorkflowComment.created_at))
)
comments = session.scalars(stmt).all()
# Batch preload all Account objects to avoid N+1 queries
WorkflowCommentService._preload_accounts(session, comments)
return comments
@staticmethod
def _preload_accounts(session: Session, comments: Sequence[WorkflowComment]) -> None:
"""Batch preload Account objects for comments, replies, and mentions."""
# Collect all user IDs
user_ids: set[str] = set()
for comment in comments:
user_ids.add(comment.created_by)
if comment.resolved_by:
user_ids.add(comment.resolved_by)
user_ids.update(reply.created_by for reply in comment.replies)
user_ids.update(mention.mentioned_user_id for mention in comment.mentions)
if not user_ids:
return
# Batch query all accounts
accounts = session.scalars(select(Account).where(Account.id.in_(user_ids))).all()
account_map = {str(account.id): account for account in accounts}
# Cache accounts on objects
for comment in comments:
comment.cache_created_by_account(account_map.get(comment.created_by))
comment.cache_resolved_by_account(account_map.get(comment.resolved_by) if comment.resolved_by else None)
for reply in comment.replies:
reply.cache_created_by_account(account_map.get(reply.created_by))
for mention in comment.mentions:
mention.cache_mentioned_user_account(account_map.get(mention.mentioned_user_id))
@staticmethod
def get_comment(tenant_id: str, app_id: str, comment_id: str, session: Session | None = None) -> WorkflowComment:
"""Get a specific comment."""
def _get_comment(session: Session) -> WorkflowComment:
stmt = (
select(WorkflowComment)
.options(selectinload(WorkflowComment.replies), selectinload(WorkflowComment.mentions))
.where(
WorkflowComment.id == comment_id,
WorkflowComment.tenant_id == tenant_id,
WorkflowComment.app_id == app_id,
)
)
comment = session.scalar(stmt)
if not comment:
raise NotFound("Comment not found")
# Preload accounts to avoid N+1 queries
WorkflowCommentService._preload_accounts(session, [comment])
return comment
if session is not None:
return _get_comment(session)
else:
with Session(db.engine, expire_on_commit=False) as session:
return _get_comment(session)
@staticmethod
def create_comment(
tenant_id: str,
app_id: str,
created_by: str,
content: str,
position_x: float,
position_y: float,
mentioned_user_ids: list[str] | None = None,
) -> dict:
"""Create a new workflow comment."""
WorkflowCommentService._validate_content(content)
with Session(db.engine) as session:
comment = WorkflowComment(
tenant_id=tenant_id,
app_id=app_id,
position_x=position_x,
position_y=position_y,
content=content,
created_by=created_by,
)
session.add(comment)
session.flush() # Get the comment ID for mentions
# Create mentions if specified
mentioned_user_ids = mentioned_user_ids or []
for user_id in mentioned_user_ids:
if isinstance(user_id, str) and uuid_value(user_id):
mention = WorkflowCommentMention(
comment_id=comment.id,
reply_id=None, # This is a comment mention, not reply mention
mentioned_user_id=user_id,
)
session.add(mention)
session.commit()
# Return only what we need - id and created_at
return {"id": comment.id, "created_at": comment.created_at}
@staticmethod
def update_comment(
tenant_id: str,
app_id: str,
comment_id: str,
user_id: str,
content: str,
position_x: float | None = None,
position_y: float | None = None,
mentioned_user_ids: list[str] | None = None,
) -> dict:
"""Update a workflow comment."""
WorkflowCommentService._validate_content(content)
with Session(db.engine, expire_on_commit=False) as session:
# Get comment with validation
stmt = select(WorkflowComment).where(
WorkflowComment.id == comment_id,
WorkflowComment.tenant_id == tenant_id,
WorkflowComment.app_id == app_id,
)
comment = session.scalar(stmt)
if not comment:
raise NotFound("Comment not found")
# Only the creator can update the comment
if comment.created_by != user_id:
raise Forbidden("Only the comment creator can update it")
# Update comment fields
comment.content = content
if position_x is not None:
comment.position_x = position_x
if position_y is not None:
comment.position_y = position_y
# Update mentions - first remove existing mentions for this comment only (not replies)
existing_mentions = session.scalars(
select(WorkflowCommentMention).where(
WorkflowCommentMention.comment_id == comment.id,
WorkflowCommentMention.reply_id.is_(None), # Only comment mentions, not reply mentions
)
).all()
for mention in existing_mentions:
session.delete(mention)
# Add new mentions
mentioned_user_ids = mentioned_user_ids or []
for user_id_str in mentioned_user_ids:
if isinstance(user_id_str, str) and uuid_value(user_id_str):
mention = WorkflowCommentMention(
comment_id=comment.id,
reply_id=None, # This is a comment mention
mentioned_user_id=user_id_str,
)
session.add(mention)
session.commit()
return {"id": comment.id, "updated_at": comment.updated_at}
@staticmethod
def delete_comment(tenant_id: str, app_id: str, comment_id: str, user_id: str) -> None:
"""Delete a workflow comment."""
with Session(db.engine, expire_on_commit=False) as session:
comment = WorkflowCommentService.get_comment(tenant_id, app_id, comment_id, session)
# Only the creator can delete the comment
if comment.created_by != user_id:
raise Forbidden("Only the comment creator can delete it")
# Delete associated mentions (both comment and reply mentions)
mentions = session.scalars(
select(WorkflowCommentMention).where(WorkflowCommentMention.comment_id == comment_id)
).all()
for mention in mentions:
session.delete(mention)
# Delete associated replies
replies = session.scalars(
select(WorkflowCommentReply).where(WorkflowCommentReply.comment_id == comment_id)
).all()
for reply in replies:
session.delete(reply)
session.delete(comment)
session.commit()
@staticmethod
def resolve_comment(tenant_id: str, app_id: str, comment_id: str, user_id: str) -> WorkflowComment:
"""Resolve a workflow comment."""
with Session(db.engine, expire_on_commit=False) as session:
comment = WorkflowCommentService.get_comment(tenant_id, app_id, comment_id, session)
if comment.resolved:
return comment
comment.resolved = True
comment.resolved_at = naive_utc_now()
comment.resolved_by = user_id
session.commit()
return comment
@staticmethod
def create_reply(
comment_id: str, content: str, created_by: str, mentioned_user_ids: list[str] | None = None
) -> dict:
"""Add a reply to a workflow comment."""
WorkflowCommentService._validate_content(content)
with Session(db.engine, expire_on_commit=False) as session:
# Check if comment exists
comment = session.get(WorkflowComment, comment_id)
if not comment:
raise NotFound("Comment not found")
reply = WorkflowCommentReply(comment_id=comment_id, content=content, created_by=created_by)
session.add(reply)
session.flush() # Get the reply ID for mentions
# Create mentions if specified
mentioned_user_ids = mentioned_user_ids or []
for user_id in mentioned_user_ids:
if isinstance(user_id, str) and uuid_value(user_id):
# Create mention linking to specific reply
mention = WorkflowCommentMention(
comment_id=comment_id, reply_id=reply.id, mentioned_user_id=user_id
)
session.add(mention)
session.commit()
return {"id": reply.id, "created_at": reply.created_at}
@staticmethod
def update_reply(reply_id: str, user_id: str, content: str, mentioned_user_ids: list[str] | None = None) -> dict:
"""Update a comment reply."""
WorkflowCommentService._validate_content(content)
with Session(db.engine, expire_on_commit=False) as session:
reply = session.get(WorkflowCommentReply, reply_id)
if not reply:
raise NotFound("Reply not found")
# Only the creator can update the reply
if reply.created_by != user_id:
raise Forbidden("Only the reply creator can update it")
reply.content = content
# Update mentions - first remove existing mentions for this reply
existing_mentions = session.scalars(
select(WorkflowCommentMention).where(WorkflowCommentMention.reply_id == reply.id)
).all()
for mention in existing_mentions:
session.delete(mention)
# Add mentions
mentioned_user_ids = mentioned_user_ids or []
for user_id_str in mentioned_user_ids:
if isinstance(user_id_str, str) and uuid_value(user_id_str):
mention = WorkflowCommentMention(
comment_id=reply.comment_id, reply_id=reply.id, mentioned_user_id=user_id_str
)
session.add(mention)
session.commit()
session.refresh(reply) # Refresh to get updated timestamp
return {"id": reply.id, "updated_at": reply.updated_at}
@staticmethod
def delete_reply(reply_id: str, user_id: str) -> None:
"""Delete a comment reply."""
with Session(db.engine, expire_on_commit=False) as session:
reply = session.get(WorkflowCommentReply, reply_id)
if not reply:
raise NotFound("Reply not found")
# Only the creator can delete the reply
if reply.created_by != user_id:
raise Forbidden("Only the reply creator can delete it")
# Delete associated mentions first
mentions = session.scalars(
select(WorkflowCommentMention).where(WorkflowCommentMention.reply_id == reply_id)
).all()
for mention in mentions:
session.delete(mention)
session.delete(reply)
session.commit()
@staticmethod
def validate_comment_access(comment_id: str, tenant_id: str, app_id: str) -> WorkflowComment:
"""Validate that a comment belongs to the specified tenant and app."""
return WorkflowCommentService.get_comment(tenant_id, app_id, comment_id)

View File

@ -249,6 +249,78 @@ class WorkflowService:
# return draft workflow
return workflow
def update_draft_workflow_environment_variables(
self,
*,
app_model: App,
environment_variables: Sequence[VariableBase],
account: Account,
):
"""
Update draft workflow environment variables
"""
# fetch draft workflow by app_model
workflow = self.get_draft_workflow(app_model=app_model)
if not workflow:
raise ValueError("No draft workflow found.")
workflow.environment_variables = environment_variables
workflow.updated_by = account.id
workflow.updated_at = naive_utc_now()
# commit db session changes
db.session.commit()
def update_draft_workflow_conversation_variables(
self,
*,
app_model: App,
conversation_variables: Sequence[VariableBase],
account: Account,
):
"""
Update draft workflow conversation variables
"""
# fetch draft workflow by app_model
workflow = self.get_draft_workflow(app_model=app_model)
if not workflow:
raise ValueError("No draft workflow found.")
workflow.conversation_variables = conversation_variables
workflow.updated_by = account.id
workflow.updated_at = naive_utc_now()
# commit db session changes
db.session.commit()
def update_draft_workflow_features(
self,
*,
app_model: App,
features: dict,
account: Account,
):
"""
Update draft workflow features
"""
# fetch draft workflow by app_model
workflow = self.get_draft_workflow(app_model=app_model)
if not workflow:
raise ValueError("No draft workflow found.")
# validate features structure
self.validate_features_structure(app_model=app_model, features=features)
workflow.features = json.dumps(features)
workflow.updated_by = account.id
workflow.updated_at = naive_utc_now()
# commit db session changes
db.session.commit()
def publish_workflow(
self,
*,

View File

@ -38,7 +38,7 @@ os.environ["OPENDAL_FS_ROOT"] = "/tmp/dify-storage"
os.environ.setdefault("STORAGE_TYPE", "opendal")
os.environ.setdefault("OPENDAL_SCHEME", "fs")
_CACHED_APP = create_app()
_SIO_APP, _CACHED_APP = create_app()
@pytest.fixture

View File

@ -364,7 +364,7 @@ def _create_app_with_containers() -> Flask:
# Create and configure the Flask application
logger.info("Initializing Flask application...")
app = create_app()
sio_app, app = create_app()
logger.info("Flask application created successfully")
# Initialize database schema

View File

@ -274,6 +274,7 @@ class TestFeatureService:
mock_config.ENABLE_EMAIL_CODE_LOGIN = True
mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True
mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False
mock_config.ENABLE_COLLABORATION_MODE = True
mock_config.ALLOW_REGISTER = False
mock_config.ALLOW_CREATE_WORKSPACE = False
mock_config.MAIL_TYPE = "smtp"
@ -298,6 +299,7 @@ class TestFeatureService:
# Verify authentication settings
assert result.enable_email_code_login is True
assert result.enable_email_password_login is False
assert result.enable_collaboration_mode is True
assert result.is_allow_register is False
assert result.is_allow_create_workspace is False
@ -402,6 +404,7 @@ class TestFeatureService:
mock_config.ENABLE_EMAIL_CODE_LOGIN = True
mock_config.ENABLE_EMAIL_PASSWORD_LOGIN = True
mock_config.ENABLE_SOCIAL_OAUTH_LOGIN = False
mock_config.ENABLE_COLLABORATION_MODE = False
mock_config.ALLOW_REGISTER = True
mock_config.ALLOW_CREATE_WORKSPACE = True
mock_config.MAIL_TYPE = "smtp"
@ -423,6 +426,7 @@ class TestFeatureService:
assert result.enable_email_code_login is True
assert result.enable_email_password_login is True
assert result.enable_social_oauth_login is False
assert result.enable_collaboration_mode is False
assert result.is_allow_register is True
assert result.is_allow_create_workspace is True
assert result.is_email_setup is True

View File

@ -0,0 +1,121 @@
import json
from unittest.mock import Mock
import pytest
from repositories import workflow_collaboration_repository as repo_module
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
class TestWorkflowCollaborationRepository:
@pytest.fixture
def mock_redis(self, monkeypatch: pytest.MonkeyPatch) -> Mock:
mock_redis = Mock()
monkeypatch.setattr(repo_module, "redis_client", mock_redis)
return mock_redis
def test_get_sid_mapping_returns_mapping(self, mock_redis: Mock) -> None:
# Arrange
mock_redis.get.return_value = b'{"workflow_id":"wf-1","user_id":"u-1"}'
repository = WorkflowCollaborationRepository()
# Act
result = repository.get_sid_mapping("sid-1")
# Assert
assert result == {"workflow_id": "wf-1", "user_id": "u-1"}
def test_list_sessions_filters_invalid_entries(self, mock_redis: Mock) -> None:
# Arrange
mock_redis.hgetall.return_value = {
b"sid-1": b'{"user_id":"u-1","username":"Jane","sid":"sid-1","connected_at":2}',
b"sid-2": b'{"username":"Missing","sid":"sid-2"}',
b"sid-3": b"not-json",
}
repository = WorkflowCollaborationRepository()
# Act
result = repository.list_sessions("wf-1")
# Assert
assert result == [
{
"user_id": "u-1",
"username": "Jane",
"avatar": None,
"sid": "sid-1",
"connected_at": 2,
}
]
def test_set_session_info_persists_payload(self, mock_redis: Mock) -> None:
# Arrange
mock_redis.exists.return_value = True
repository = WorkflowCollaborationRepository()
payload = {
"user_id": "u-1",
"username": "Jane",
"avatar": None,
"sid": "sid-1",
"connected_at": 1,
}
# Act
repository.set_session_info("wf-1", payload)
# Assert
assert mock_redis.hset.called
workflow_key, sid, session_json = mock_redis.hset.call_args.args
assert workflow_key == "workflow_online_users:wf-1"
assert sid == "sid-1"
assert json.loads(session_json)["user_id"] == "u-1"
assert mock_redis.set.called
def test_refresh_session_state_expires_keys(self, mock_redis: Mock) -> None:
# Arrange
mock_redis.exists.return_value = True
repository = WorkflowCollaborationRepository()
# Act
repository.refresh_session_state("wf-1", "sid-1")
# Assert
assert mock_redis.expire.call_count == 2
def test_get_current_leader_decodes_bytes(self, mock_redis: Mock) -> None:
# Arrange
mock_redis.get.return_value = b"sid-1"
repository = WorkflowCollaborationRepository()
# Act
result = repository.get_current_leader("wf-1")
# Assert
assert result == "sid-1"
def test_set_leader_if_absent_uses_nx(self, mock_redis: Mock) -> None:
# Arrange
mock_redis.set.return_value = True
repository = WorkflowCollaborationRepository()
# Act
result = repository.set_leader_if_absent("wf-1", "sid-1")
# Assert
assert result is True
_key, _value = mock_redis.set.call_args.args
assert _key == "workflow_leader:wf-1"
assert _value == "sid-1"
assert mock_redis.set.call_args.kwargs["nx"] is True
assert "ex" in mock_redis.set.call_args.kwargs
def test_get_session_sids_decodes(self, mock_redis: Mock) -> None:
# Arrange
mock_redis.hkeys.return_value = [b"sid-1", "sid-2"]
repository = WorkflowCollaborationRepository()
# Act
result = repository.get_session_sids("wf-1")
# Assert
assert result == ["sid-1", "sid-2"]

View File

@ -0,0 +1,271 @@
from unittest.mock import Mock, patch
import pytest
from repositories.workflow_collaboration_repository import WorkflowCollaborationRepository
from services.workflow_collaboration_service import WorkflowCollaborationService
class TestWorkflowCollaborationService:
@pytest.fixture
def service(self) -> tuple[WorkflowCollaborationService, Mock, Mock]:
repository = Mock(spec=WorkflowCollaborationRepository)
socketio = Mock()
return WorkflowCollaborationService(repository, socketio), repository, socketio
def test_register_session_returns_leader_status(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, socketio = service
socketio.get_session.return_value = {"user_id": "u-1", "username": "Jane", "avatar": None}
with (
patch.object(collaboration_service, "get_or_set_leader", return_value="sid-1"),
patch.object(collaboration_service, "broadcast_online_users"),
):
# Act
result = collaboration_service.register_session("wf-1", "sid-1")
# Assert
assert result == ("u-1", True)
repository.set_session_info.assert_called_once()
socketio.enter_room.assert_called_once_with("sid-1", "wf-1")
socketio.emit.assert_called_once_with("status", {"isLeader": True}, room="sid-1")
def test_register_session_returns_none_when_missing_user(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, _repository, socketio = service
socketio.get_session.return_value = {}
# Act
result = collaboration_service.register_session("wf-1", "sid-1")
# Assert
assert result is None
def test_relay_collaboration_event_unauthorized(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_sid_mapping.return_value = None
# Act
result = collaboration_service.relay_collaboration_event("sid-1", {})
# Assert
assert result == ({"msg": "unauthorized"}, 401)
def test_relay_collaboration_event_emits_update(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, socketio = service
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
payload = {"type": "mouse_move", "data": {"x": 1}, "timestamp": 123}
# Act
result = collaboration_service.relay_collaboration_event("sid-1", payload)
# Assert
assert result == ({"msg": "event_broadcasted"}, 200)
socketio.emit.assert_called_once_with(
"collaboration_update",
{"type": "mouse_move", "userId": "u-1", "data": {"x": 1}, "timestamp": 123},
room="wf-1",
skip_sid="sid-1",
)
def test_relay_graph_event_unauthorized(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_sid_mapping.return_value = None
# Act
result = collaboration_service.relay_graph_event("sid-1", {"nodes": []})
# Assert
assert result == ({"msg": "unauthorized"}, 401)
def test_disconnect_session_no_mapping(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_sid_mapping.return_value = None
# Act
collaboration_service.disconnect_session("sid-1")
# Assert
repository.delete_session.assert_not_called()
def test_disconnect_session_cleans_up(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
with (
patch.object(collaboration_service, "handle_leader_disconnect") as handle_leader_disconnect,
patch.object(collaboration_service, "broadcast_online_users") as broadcast_online_users,
):
# Act
collaboration_service.disconnect_session("sid-1")
# Assert
repository.delete_session.assert_called_once_with("wf-1", "sid-1")
handle_leader_disconnect.assert_called_once_with("wf-1", "sid-1")
broadcast_online_users.assert_called_once_with("wf-1")
def test_get_or_set_leader_returns_active_leader(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_current_leader.return_value = "sid-1"
with patch.object(collaboration_service, "is_session_active", return_value=True):
# Act
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
# Assert
assert result == "sid-1"
repository.set_leader_if_absent.assert_not_called()
def test_get_or_set_leader_replaces_dead_leader(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_current_leader.return_value = "sid-1"
repository.set_leader_if_absent.return_value = True
with (
patch.object(collaboration_service, "is_session_active", return_value=False),
patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change,
):
# Act
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
# Assert
assert result == "sid-2"
repository.delete_session.assert_called_once_with("wf-1", "sid-1")
repository.delete_leader.assert_called_once_with("wf-1")
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
def test_get_or_set_leader_falls_back_to_existing(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_current_leader.side_effect = [None, "sid-3"]
repository.set_leader_if_absent.return_value = False
# Act
result = collaboration_service.get_or_set_leader("wf-1", "sid-2")
# Assert
assert result == "sid-3"
def test_handle_leader_disconnect_elects_new(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_current_leader.return_value = "sid-1"
repository.get_session_sids.return_value = ["sid-2"]
with patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change:
# Act
collaboration_service.handle_leader_disconnect("wf-1", "sid-1")
# Assert
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
def test_handle_leader_disconnect_clears_when_empty(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_current_leader.return_value = "sid-1"
repository.get_session_sids.return_value = []
# Act
collaboration_service.handle_leader_disconnect("wf-1", "sid-1")
# Assert
repository.delete_leader.assert_called_once_with("wf-1")
def test_broadcast_online_users_sorts_and_emits(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, socketio = service
repository.list_sessions.return_value = [
{"user_id": "u-1", "username": "A", "avatar": None, "sid": "sid-1", "connected_at": 3},
{"user_id": "u-2", "username": "B", "avatar": None, "sid": "sid-2", "connected_at": 1},
]
repository.get_current_leader.return_value = "sid-1"
# Act
collaboration_service.broadcast_online_users("wf-1")
# Assert
socketio.emit.assert_called_once_with(
"online_users",
{
"workflow_id": "wf-1",
"users": [
{"user_id": "u-2", "username": "B", "avatar": None, "sid": "sid-2", "connected_at": 1},
{"user_id": "u-1", "username": "A", "avatar": None, "sid": "sid-1", "connected_at": 3},
],
"leader": "sid-1",
},
room="wf-1",
)
def test_refresh_session_state_expires_active_leader(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_current_leader.return_value = "sid-1"
with patch.object(collaboration_service, "is_session_active", return_value=True):
# Act
collaboration_service.refresh_session_state("wf-1", "sid-1")
# Assert
repository.refresh_session_state.assert_called_once_with("wf-1", "sid-1")
repository.expire_leader.assert_called_once_with("wf-1")
repository.set_leader.assert_not_called()
def test_refresh_session_state_sets_leader_when_missing(
self, service: tuple[WorkflowCollaborationService, Mock, Mock]
) -> None:
# Arrange
collaboration_service, repository, _socketio = service
repository.get_current_leader.return_value = None
with patch.object(collaboration_service, "broadcast_leader_change") as broadcast_leader_change:
# Act
collaboration_service.refresh_session_state("wf-1", "sid-2")
# Assert
repository.set_leader.assert_called_once_with("wf-1", "sid-2")
broadcast_leader_change.assert_called_once_with("wf-1", "sid-2")
def test_relay_graph_event_emits_update(self, service: tuple[WorkflowCollaborationService, Mock, Mock]) -> None:
# Arrange
collaboration_service, repository, socketio = service
repository.get_sid_mapping.return_value = {"workflow_id": "wf-1", "user_id": "u-1"}
# Act
result = collaboration_service.relay_graph_event("sid-1", {"nodes": []})
# Assert
assert result == ({"msg": "graph_update_broadcasted"}, 200)
repository.refresh_session_state.assert_called_once_with("wf-1", "sid-1")
socketio.emit.assert_called_once_with("graph_update", {"nodes": []}, room="wf-1", skip_sid="sid-1")

View File

@ -0,0 +1,245 @@
from unittest.mock import MagicMock, Mock, patch
import pytest
from werkzeug.exceptions import Forbidden, NotFound
from services import workflow_comment_service as service_module
from services.workflow_comment_service import WorkflowCommentService
@pytest.fixture
def mock_session(monkeypatch: pytest.MonkeyPatch) -> Mock:
session = Mock()
context_manager = MagicMock()
context_manager.__enter__.return_value = session
context_manager.__exit__.return_value = False
mock_db = MagicMock()
mock_db.engine = Mock()
monkeypatch.setattr(service_module, "Session", Mock(return_value=context_manager))
monkeypatch.setattr(service_module, "db", mock_db)
return session
def _mock_scalars(result_list: list[object]) -> Mock:
scalars = Mock()
scalars.all.return_value = result_list
return scalars
class TestWorkflowCommentService:
def test_validate_content_rejects_empty(self) -> None:
with pytest.raises(ValueError):
WorkflowCommentService._validate_content(" ")
def test_validate_content_rejects_too_long(self) -> None:
with pytest.raises(ValueError):
WorkflowCommentService._validate_content("a" * 1001)
def test_create_comment_creates_mentions(self, mock_session: Mock) -> None:
comment = Mock()
comment.id = "comment-1"
comment.created_at = "ts"
with (
patch.object(service_module, "WorkflowComment", return_value=comment),
patch.object(service_module, "WorkflowCommentMention", return_value=Mock()),
patch.object(service_module, "uuid_value", side_effect=[True, False]),
):
result = WorkflowCommentService.create_comment(
tenant_id="tenant-1",
app_id="app-1",
created_by="user-1",
content="hello",
position_x=1.0,
position_y=2.0,
mentioned_user_ids=["user-2", "bad-id"],
)
assert result == {"id": "comment-1", "created_at": "ts"}
assert mock_session.add.call_args_list[0].args[0] is comment
assert mock_session.add.call_count == 2
mock_session.commit.assert_called_once()
def test_update_comment_raises_not_found(self, mock_session: Mock) -> None:
mock_session.scalar.return_value = None
with pytest.raises(NotFound):
WorkflowCommentService.update_comment(
tenant_id="tenant-1",
app_id="app-1",
comment_id="comment-1",
user_id="user-1",
content="hello",
)
def test_update_comment_raises_forbidden(self, mock_session: Mock) -> None:
comment = Mock()
comment.created_by = "owner"
mock_session.scalar.return_value = comment
with pytest.raises(Forbidden):
WorkflowCommentService.update_comment(
tenant_id="tenant-1",
app_id="app-1",
comment_id="comment-1",
user_id="intruder",
content="hello",
)
def test_update_comment_replaces_mentions(self, mock_session: Mock) -> None:
comment = Mock()
comment.id = "comment-1"
comment.created_by = "owner"
mock_session.scalar.return_value = comment
existing_mentions = [Mock(), Mock()]
mock_session.scalars.return_value = _mock_scalars(existing_mentions)
with patch.object(service_module, "uuid_value", side_effect=[True, False]):
result = WorkflowCommentService.update_comment(
tenant_id="tenant-1",
app_id="app-1",
comment_id="comment-1",
user_id="owner",
content="updated",
mentioned_user_ids=["user-2", "bad-id"],
)
assert result == {"id": "comment-1", "updated_at": comment.updated_at}
assert mock_session.delete.call_count == 2
assert mock_session.add.call_count == 1
mock_session.commit.assert_called_once()
def test_delete_comment_raises_forbidden(self, mock_session: Mock) -> None:
comment = Mock()
comment.created_by = "owner"
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
with pytest.raises(Forbidden):
WorkflowCommentService.delete_comment("tenant-1", "app-1", "comment-1", "intruder")
def test_delete_comment_removes_related_entities(self, mock_session: Mock) -> None:
comment = Mock()
comment.created_by = "owner"
mentions = [Mock(), Mock()]
replies = [Mock()]
mock_session.scalars.side_effect = [_mock_scalars(mentions), _mock_scalars(replies)]
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
WorkflowCommentService.delete_comment("tenant-1", "app-1", "comment-1", "owner")
assert mock_session.delete.call_count == 4
mock_session.commit.assert_called_once()
def test_resolve_comment_sets_fields(self, mock_session: Mock) -> None:
comment = Mock()
comment.resolved = False
comment.resolved_at = None
comment.resolved_by = None
with (
patch.object(WorkflowCommentService, "get_comment", return_value=comment),
patch.object(service_module, "naive_utc_now", return_value="now"),
):
result = WorkflowCommentService.resolve_comment("tenant-1", "app-1", "comment-1", "user-1")
assert result is comment
assert comment.resolved is True
assert comment.resolved_at == "now"
assert comment.resolved_by == "user-1"
mock_session.commit.assert_called_once()
def test_resolve_comment_noop_when_already_resolved(self, mock_session: Mock) -> None:
comment = Mock()
comment.resolved = True
with patch.object(WorkflowCommentService, "get_comment", return_value=comment):
result = WorkflowCommentService.resolve_comment("tenant-1", "app-1", "comment-1", "user-1")
assert result is comment
mock_session.commit.assert_not_called()
def test_create_reply_requires_comment(self, mock_session: Mock) -> None:
mock_session.get.return_value = None
with pytest.raises(NotFound):
WorkflowCommentService.create_reply("comment-1", "hello", "user-1")
def test_create_reply_creates_mentions(self, mock_session: Mock) -> None:
mock_session.get.return_value = Mock()
reply = Mock()
reply.id = "reply-1"
reply.created_at = "ts"
with (
patch.object(service_module, "WorkflowCommentReply", return_value=reply),
patch.object(service_module, "WorkflowCommentMention", return_value=Mock()),
patch.object(service_module, "uuid_value", side_effect=[True, False]),
):
result = WorkflowCommentService.create_reply(
comment_id="comment-1",
content="hello",
created_by="user-1",
mentioned_user_ids=["user-2", "bad-id"],
)
assert result == {"id": "reply-1", "created_at": "ts"}
assert mock_session.add.call_count == 2
mock_session.commit.assert_called_once()
def test_update_reply_raises_not_found(self, mock_session: Mock) -> None:
mock_session.get.return_value = None
with pytest.raises(NotFound):
WorkflowCommentService.update_reply("reply-1", "user-1", "hello")
def test_update_reply_raises_forbidden(self, mock_session: Mock) -> None:
reply = Mock()
reply.created_by = "owner"
mock_session.get.return_value = reply
with pytest.raises(Forbidden):
WorkflowCommentService.update_reply("reply-1", "intruder", "hello")
def test_update_reply_replaces_mentions(self, mock_session: Mock) -> None:
reply = Mock()
reply.id = "reply-1"
reply.comment_id = "comment-1"
reply.created_by = "owner"
reply.updated_at = "updated"
mock_session.get.return_value = reply
mock_session.scalars.return_value = _mock_scalars([Mock()])
with patch.object(service_module, "uuid_value", side_effect=[True, False]):
result = WorkflowCommentService.update_reply(
reply_id="reply-1",
user_id="owner",
content="new",
mentioned_user_ids=["user-2", "bad-id"],
)
assert result == {"id": "reply-1", "updated_at": "updated"}
assert mock_session.delete.call_count == 1
assert mock_session.add.call_count == 1
mock_session.commit.assert_called_once()
mock_session.refresh.assert_called_once_with(reply)
def test_delete_reply_raises_forbidden(self, mock_session: Mock) -> None:
reply = Mock()
reply.created_by = "owner"
mock_session.get.return_value = reply
with pytest.raises(Forbidden):
WorkflowCommentService.delete_reply("reply-1", "intruder")
def test_delete_reply_removes_mentions(self, mock_session: Mock) -> None:
reply = Mock()
reply.created_by = "owner"
mock_session.get.return_value = reply
mock_session.scalars.return_value = _mock_scalars([Mock(), Mock()])
WorkflowCommentService.delete_reply("reply-1", "owner")
assert mock_session.delete.call_count == 3
mock_session.commit.assert_called_once()

View File

@ -10,7 +10,7 @@ This test suite covers:
"""
import json
from unittest.mock import MagicMock, patch
from unittest.mock import MagicMock, Mock, patch
import pytest
@ -630,6 +630,79 @@ class TestWorkflowService:
with pytest.raises(ValueError, match="Invalid app mode"):
workflow_service.validate_features_structure(app, features)
# ==================== Draft Workflow Variable Update Tests ====================
# These tests verify updating draft workflow environment/conversation variables
def test_update_draft_workflow_environment_variables_updates_workflow(self, workflow_service, mock_db_session):
"""Test update_draft_workflow_environment_variables updates draft fields."""
app = TestWorkflowAssociatedDataFactory.create_app_mock()
account = TestWorkflowAssociatedDataFactory.create_account_mock()
workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock()
variables = [Mock()]
with (
patch.object(workflow_service, "get_draft_workflow", return_value=workflow),
patch("services.workflow_service.naive_utc_now", return_value="now"),
):
workflow_service.update_draft_workflow_environment_variables(
app_model=app,
environment_variables=variables,
account=account,
)
assert workflow.environment_variables == variables
assert workflow.updated_by == account.id
assert workflow.updated_at == "now"
mock_db_session.session.commit.assert_called_once()
def test_update_draft_workflow_environment_variables_raises_when_missing(self, workflow_service):
"""Test update_draft_workflow_environment_variables raises when draft missing."""
app = TestWorkflowAssociatedDataFactory.create_app_mock()
account = TestWorkflowAssociatedDataFactory.create_account_mock()
with patch.object(workflow_service, "get_draft_workflow", return_value=None):
with pytest.raises(ValueError, match="No draft workflow found."):
workflow_service.update_draft_workflow_environment_variables(
app_model=app,
environment_variables=[],
account=account,
)
def test_update_draft_workflow_conversation_variables_updates_workflow(self, workflow_service, mock_db_session):
"""Test update_draft_workflow_conversation_variables updates draft fields."""
app = TestWorkflowAssociatedDataFactory.create_app_mock()
account = TestWorkflowAssociatedDataFactory.create_account_mock()
workflow = TestWorkflowAssociatedDataFactory.create_workflow_mock()
variables = [Mock()]
with (
patch.object(workflow_service, "get_draft_workflow", return_value=workflow),
patch("services.workflow_service.naive_utc_now", return_value="now"),
):
workflow_service.update_draft_workflow_conversation_variables(
app_model=app,
conversation_variables=variables,
account=account,
)
assert workflow.conversation_variables == variables
assert workflow.updated_by == account.id
assert workflow.updated_at == "now"
mock_db_session.session.commit.assert_called_once()
def test_update_draft_workflow_conversation_variables_raises_when_missing(self, workflow_service):
"""Test update_draft_workflow_conversation_variables raises when draft missing."""
app = TestWorkflowAssociatedDataFactory.create_app_mock()
account = TestWorkflowAssociatedDataFactory.create_account_mock()
with patch.object(workflow_service, "get_draft_workflow", return_value=None):
with pytest.raises(ValueError, match="No draft workflow found."):
workflow_service.update_draft_workflow_conversation_variables(
app_model=app,
conversation_variables=[],
account=account,
)
# ==================== Publish Workflow Tests ====================
# These tests verify creating published versions from draft workflows

74
api/uv.lock generated
View File

@ -583,6 +583,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/57/f4/a69c20ee4f660081a7dedb1ac57f29be9378e04edfcb90c526b923d4bebc/beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a", size = 142979, upload-time = "2023-04-07T15:02:50.77Z" },
]
[[package]]
name = "bidict"
version = "0.23.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9a/6e/026678aa5a830e07cd9498a05d3e7e650a4f56a42f267a53d22bcda1bdc9/bidict-0.23.1.tar.gz", hash = "sha256:03069d763bc387bbd20e7d49914e75fc4132a41937fa3405417e1a5a2d006d71", size = 29093, upload-time = "2024-02-18T19:09:05.748Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/99/37/e8730c3587a65eb5645d4aba2d27aae48e8003614d6aaf15dda67f702f1f/bidict-0.23.1-py3-none-any.whl", hash = "sha256:5dae8d4d79b552a71cbabc7deb25dfe8ce710b17ff41711e13010ead2abfc3e5", size = 32764, upload-time = "2024-02-18T19:09:04.156Z" },
]
[[package]]
name = "billiard"
version = "4.2.3"
@ -1392,6 +1401,7 @@ dependencies = [
{ name = "flask-restx" },
{ name = "flask-sqlalchemy" },
{ name = "gevent" },
{ name = "gevent-websocket" },
{ name = "gmpy2" },
{ name = "google-api-core" },
{ name = "google-api-python-client" },
@ -1442,6 +1452,7 @@ dependencies = [
{ name = "pypdfium2" },
{ name = "python-docx" },
{ name = "python-dotenv" },
{ name = "python-socketio" },
{ name = "pyyaml" },
{ name = "readabilipy" },
{ name = "redis", extra = ["hiredis"] },
@ -1591,6 +1602,7 @@ requires-dist = [
{ name = "flask-restx", specifier = "~=1.3.0" },
{ name = "flask-sqlalchemy", specifier = "~=3.1.1" },
{ name = "gevent", specifier = "~=25.9.1" },
{ name = "gevent-websocket", specifier = "~=0.10.1" },
{ name = "gmpy2", specifier = "~=2.2.1" },
{ name = "google-api-core", specifier = "==2.18.0" },
{ name = "google-api-python-client", specifier = "==2.90.0" },
@ -1641,6 +1653,7 @@ requires-dist = [
{ name = "pypdfium2", specifier = "==5.2.0" },
{ name = "python-docx", specifier = "~=1.1.0" },
{ name = "python-dotenv", specifier = "==1.0.1" },
{ name = "python-socketio", specifier = "~=5.13.0" },
{ name = "pyyaml", specifier = "~=6.0.1" },
{ name = "readabilipy", specifier = "~=0.3.0" },
{ name = "redis", extras = ["hiredis"], specifier = "~=6.1.0" },
@ -2213,6 +2226,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d5/98/caf06d5d22a7c129c1fb2fc1477306902a2c8ddfd399cd26bbbd4caf2141/gevent-25.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:4acd6bcd5feabf22c7c5174bd3b9535ee9f088d2bbce789f740ad8d6554b18f3", size = 1682837, upload-time = "2025-09-17T19:48:47.318Z" },
]
[[package]]
name = "gevent-websocket"
version = "0.10.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "gevent" },
]
sdist = { url = "https://files.pythonhosted.org/packages/98/d2/6fa19239ff1ab072af40ebf339acd91fb97f34617c2ee625b8e34bf42393/gevent-websocket-0.10.1.tar.gz", hash = "sha256:7eaef32968290c9121f7c35b973e2cc302ffb076d018c9068d2f5ca8b2d85fb0", size = 18366, upload-time = "2017-03-12T22:46:05.68Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7b/84/2dc373eb6493e00c884cc11e6c059ec97abae2678d42f06bf780570b0193/gevent_websocket-0.10.1-py3-none-any.whl", hash = "sha256:17b67d91282f8f4c973eba0551183fc84f56f1c90c8f6b6b30256f31f66f5242", size = 22987, upload-time = "2017-03-12T22:46:03.611Z" },
]
[[package]]
name = "gitdb"
version = "4.0.12"
@ -5218,6 +5243,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" },
]
[[package]]
name = "python-engineio"
version = "4.12.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "simple-websocket" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/d8/63e5535ab21dc4998ba1cfe13690ccf122883a38f025dca24d6e56c05eba/python_engineio-4.12.3.tar.gz", hash = "sha256:35633e55ec30915e7fc8f7e34ca8d73ee0c080cec8a8cd04faf2d7396f0a7a7a", size = 91910, upload-time = "2025-09-28T06:31:36.765Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d8/f0/c5aa0a69fd9326f013110653543f36ece4913c17921f3e1dbd78e1b423ee/python_engineio-4.12.3-py3-none-any.whl", hash = "sha256:7c099abb2a27ea7ab429c04da86ab2d82698cdd6c52406cb73766fe454feb7e1", size = 59637, upload-time = "2025-09-28T06:31:35.354Z" },
]
[[package]]
name = "python-http-client"
version = "3.3.7"
@ -5274,6 +5311,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d9/4f/00be2196329ebbff56ce564aa94efb0fbc828d00de250b1980de1a34ab49/python_pptx-1.0.2-py3-none-any.whl", hash = "sha256:160838e0b8565a8b1f67947675886e9fea18aa5e795db7ae531606d68e785cba", size = 472788, upload-time = "2024-08-07T17:33:28.192Z" },
]
[[package]]
name = "python-socketio"
version = "5.13.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "bidict" },
{ name = "python-engineio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/21/1a/396d50ccf06ee539fa758ce5623b59a9cb27637fc4b2dc07ed08bf495e77/python_socketio-5.13.0.tar.gz", hash = "sha256:ac4e19a0302ae812e23b712ec8b6427ca0521f7c582d6abb096e36e24a263029", size = 121125, upload-time = "2025-04-12T15:46:59.933Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/32/b4fb8585d1be0f68bde7e110dffbcf354915f77ad8c778563f0ad9655c02/python_socketio-5.13.0-py3-none-any.whl", hash = "sha256:51f68d6499f2df8524668c24bcec13ba1414117cfb3a90115c559b601ab10caf", size = 77800, upload-time = "2025-04-12T15:46:58.412Z" },
]
[[package]]
name = "pytz"
version = "2025.2"
@ -5773,6 +5823,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" },
]
[[package]]
name = "simple-websocket"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "wsproto" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b0/d4/bfa032f961103eba93de583b161f0e6a5b63cebb8f2c7d0c6e6efe1e3d2e/simple_websocket-1.1.0.tar.gz", hash = "sha256:7939234e7aa067c534abdab3a9ed933ec9ce4691b0713c78acb195560aa52ae4", size = 17300, upload-time = "2024-10-10T22:39:31.412Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/52/59/0782e51887ac6b07ffd1570e0364cf901ebc36345fea669969d2084baebb/simple_websocket-1.1.0-py3-none-any.whl", hash = "sha256:4af6069630a38ed6c561010f0e11a5bc0d4ca569b36306eb257cd9a192497c8c", size = 13842, upload-time = "2024-10-10T22:39:29.645Z" },
]
[[package]]
name = "six"
version = "1.17.0"
@ -7193,6 +7255,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ff/21/abdedb4cdf6ff41ebf01a74087740a709e2edb146490e4d9beea054b0b7a/wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", size = 23362, upload-time = "2023-11-09T06:33:28.271Z" },
]
[[package]]
name = "wsproto"
version = "1.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "h11" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/4a/44d3c295350d776427904d73c189e10aeae66d7f555bb2feee16d1e4ba5a/wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065", size = 53425, upload-time = "2022-08-23T19:58:21.447Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/78/58/e860788190eba3bcce367f74d29c4675466ce8dddfba85f7827588416f01/wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736", size = 24226, upload-time = "2022-08-23T19:58:19.96Z" },
]
[[package]]
name = "xinference-client"
version = "1.2.2"

View File

@ -129,6 +129,10 @@ MIGRATION_ENABLED=true
# The default value is 300 seconds.
FILES_ACCESS_TIMEOUT=300
# Collaboration mode toggle
# To open collaboration features, you also need to set SERVER_WORKER_CLASS=geventwebsocket.gunicorn.workers.GeventWebSocketWorker
ENABLE_COLLABORATION_MODE=false
# Access token expiration time in minutes
ACCESS_TOKEN_EXPIRE_MINUTES=60
@ -164,6 +168,7 @@ SERVER_WORKER_AMOUNT=1
# Modifying it may also decrease throughput.
#
# It is strongly discouraged to change this parameter.
# If enable collaboration mode, it must be set to geventwebsocket.gunicorn.workers.GeventWebSocketWorker
SERVER_WORKER_CLASS=gevent
# Default number of worker connections, the default is 10.
@ -401,6 +406,8 @@ CONSOLE_CORS_ALLOW_ORIGINS=*
COOKIE_DOMAIN=
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
NEXT_PUBLIC_COOKIE_DOMAIN=
# WebSocket server URL.
NEXT_PUBLIC_SOCKET_URL=ws://localhost
NEXT_PUBLIC_BATCH_CONCURRENCY=5
# ------------------------------

View File

@ -139,6 +139,7 @@ services:
APP_API_URL: ${APP_API_URL:-}
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}

View File

@ -33,6 +33,7 @@ x-shared-env: &shared-api-worker-env
OPENAI_API_BASE: ${OPENAI_API_BASE:-https://api.openai.com/v1}
MIGRATION_ENABLED: ${MIGRATION_ENABLED:-true}
FILES_ACCESS_TIMEOUT: ${FILES_ACCESS_TIMEOUT:-300}
ENABLE_COLLABORATION_MODE: ${ENABLE_COLLABORATION_MODE:-false}
ACCESS_TOKEN_EXPIRE_MINUTES: ${ACCESS_TOKEN_EXPIRE_MINUTES:-60}
REFRESH_TOKEN_EXPIRE_DAYS: ${REFRESH_TOKEN_EXPIRE_DAYS:-30}
APP_DEFAULT_ACTIVE_REQUESTS: ${APP_DEFAULT_ACTIVE_REQUESTS:-0}
@ -109,6 +110,7 @@ x-shared-env: &shared-api-worker-env
CONSOLE_CORS_ALLOW_ORIGINS: ${CONSOLE_CORS_ALLOW_ORIGINS:-*}
COOKIE_DOMAIN: ${COOKIE_DOMAIN:-}
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
NEXT_PUBLIC_BATCH_CONCURRENCY: ${NEXT_PUBLIC_BATCH_CONCURRENCY:-5}
STORAGE_TYPE: ${STORAGE_TYPE:-opendal}
OPENDAL_SCHEME: ${OPENDAL_SCHEME:-fs}
@ -824,6 +826,7 @@ services:
APP_API_URL: ${APP_API_URL:-}
AMPLITUDE_API_KEY: ${AMPLITUDE_API_KEY:-}
NEXT_PUBLIC_COOKIE_DOMAIN: ${NEXT_PUBLIC_COOKIE_DOMAIN:-}
NEXT_PUBLIC_SOCKET_URL: ${NEXT_PUBLIC_SOCKET_URL:-ws://localhost}
SENTRY_DSN: ${WEB_SENTRY_DSN:-}
NEXT_TELEMETRY_DISABLED: ${NEXT_TELEMETRY_DISABLED:-0}
TEXT_GENERATION_TIMEOUT_MS: ${TEXT_GENERATION_TIMEOUT_MS:-60000}

View File

@ -14,6 +14,14 @@ server {
include proxy.conf;
}
location /socket.io/ {
proxy_pass http://api:5001;
include proxy.conf;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_cache_bypass $http_upgrade;
}
location /v1 {
proxy_pass http://api:5001;
include proxy.conf;

View File

@ -5,7 +5,7 @@ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Port $server_port;
proxy_http_version 1.1;
proxy_set_header Connection "";
# proxy_set_header Connection "";
proxy_buffering off;
proxy_read_timeout ${NGINX_PROXY_READ_TIMEOUT};
proxy_send_timeout ${NGINX_PROXY_SEND_TIMEOUT};

View File

@ -14,6 +14,8 @@ NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
NEXT_PUBLIC_PUBLIC_API_PREFIX=http://localhost:5001/api
# When the frontend and backend run on different subdomains, set NEXT_PUBLIC_COOKIE_DOMAIN=1.
NEXT_PUBLIC_COOKIE_DOMAIN=
# WebSocket server URL.
NEXT_PUBLIC_SOCKET_URL=ws://localhost:5001
# The API PREFIX for MARKETPLACE
NEXT_PUBLIC_MARKETPLACE_API_PREFIX=https://marketplace.dify.ai/api/v1

View File

@ -43,6 +43,8 @@ NEXT_PUBLIC_EDITION=SELF_HOSTED
# example: http://cloud.dify.ai/console/api
NEXT_PUBLIC_API_PREFIX=http://localhost:5001/console/api
NEXT_PUBLIC_COOKIE_DOMAIN=
# WebSocket server URL.
NEXT_PUBLIC_SOCKET_URL=ws://localhost:5001
# The URL for Web APP, refers to the Web App base URL of WEB service if web app domain is different from
# console or api domain.
# example: http://udify.app/api

View File

@ -5,7 +5,8 @@ import type { BlockEnum } from '@/app/components/workflow/types'
import type { UpdateAppSiteCodeResponse } from '@/models/app'
import type { App } from '@/types/app'
import type { I18nKeysByPrefix } from '@/types/i18n'
import { useCallback, useMemo } from 'react'
import * as React from 'react'
import { useCallback, useEffect, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import AppCard from '@/app/components/app/overview/app-card'
@ -14,6 +15,8 @@ import { useStore as useAppStore } from '@/app/components/app/store'
import Loading from '@/app/components/base/loading'
import { ToastContext } from '@/app/components/base/toast'
import MCPServiceCard from '@/app/components/tools/mcp/mcp-service-card'
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
import { isTriggerNode } from '@/app/components/workflow/types'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import {
@ -74,28 +77,59 @@ const CardView: FC<ICardViewProps> = ({ appId, isInPanel, className }) => {
? buildTriggerModeMessage(t('mcp.server.title', { ns: 'tools' }))
: null
const updateAppDetail = async () => {
const updateAppDetail = useCallback(async () => {
try {
const res = await fetchAppDetail({ url: '/apps', id: appId })
setAppDetail({ ...res })
}
catch (error) { console.error(error) }
}
catch (error) {
console.error(error)
}
}, [appId, setAppDetail])
const handleCallbackResult = (err: Error | null, message?: I18nKeysByPrefix<'common', 'actionMsg.'>) => {
const type = err ? 'error' : 'success'
message ||= (type === 'success' ? 'modifiedSuccessfully' : 'modifiedUnsuccessfully')
if (type === 'success')
if (type === 'success') {
updateAppDetail()
// Emit collaboration event to notify other clients of app state changes
const socket = webSocketClient.getSocket(appId)
if (socket) {
socket.emit('collaboration_event', {
type: 'app_state_update',
data: { timestamp: Date.now() },
timestamp: Date.now(),
})
}
}
notify({
type,
message: t(`actionMsg.${message}`, { ns: 'common' }) as string,
})
}
// Listen for collaborative app state updates from other clients
useEffect(() => {
if (!appId)
return
const unsubscribe = collaborationManager.onAppStateUpdate(async () => {
try {
// Update app detail when other clients modify app state
await updateAppDetail()
}
catch (error) {
console.error('app state update failed:', error)
}
})
return unsubscribe
}, [appId, updateAppDetail])
const onChangeSiteStatus = async (value: boolean) => {
const [err] = await asyncRunSafe<App>(
updateAppSiteStatus({

View File

@ -14,7 +14,7 @@ import {
import dynamic from 'next/dynamic'
import { useRouter } from 'next/navigation'
import * as React from 'react'
import { useCallback, useState } from 'react'
import { useCallback, useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import CardView from '@/app/(commonLayout)/app/(appDetailLayout)/[appId]/overview/card-view'
@ -22,10 +22,12 @@ import { useStore as useAppStore } from '@/app/components/app/store'
import Button from '@/app/components/base/button'
import ContentDialog from '@/app/components/base/content-dialog'
import { ToastContext } from '@/app/components/base/toast'
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import { useAppContext } from '@/context/app-context'
import { useProviderContext } from '@/context/provider-context'
import { copyApp, deleteApp, exportAppConfig, updateAppInfo } from '@/service/apps'
import { copyApp, deleteApp, exportAppConfig, fetchAppDetail, updateAppInfo } from '@/service/apps'
import { useInvalidateAppList } from '@/service/use-apps'
import { fetchWorkflowDraft } from '@/service/workflow'
import { AppModeEnum } from '@/types/app'
@ -77,6 +79,19 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
const [secretEnvList, setSecretEnvList] = useState<EnvironmentVariable[]>([])
const [showExportWarning, setShowExportWarning] = useState(false)
const emitAppMetaUpdate = useCallback(() => {
if (!appDetail?.id)
return
const socket = webSocketClient.getSocket(appDetail.id)
if (socket) {
socket.emit('collaboration_event', {
type: 'app_meta_update',
data: { timestamp: Date.now() },
timestamp: Date.now(),
})
}
}, [appDetail])
const onEdit: CreateAppModalProps['onConfirm'] = useCallback(async ({
name,
icon_type,
@ -105,11 +120,12 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
message: t('editDone', { ns: 'app' }),
})
setAppDetail(app)
emitAppMetaUpdate()
}
catch {
notify({ type: 'error', message: t('editFailed', { ns: 'app' }) })
}
}, [appDetail, notify, setAppDetail, t])
}, [appDetail, notify, setAppDetail, t, emitAppMetaUpdate])
const onCopy: DuplicateAppModalProps['onConfirm'] = async ({ name, icon_type, icon, icon_background }) => {
if (!appDetail)
@ -207,6 +223,23 @@ const AppInfo = ({ expand, onlyShowDetail = false, openState = false, onDetailEx
setShowConfirmDelete(false)
}, [appDetail, invalidateAppList, notify, onPlanInfoChanged, replace, setAppDetail, t])
useEffect(() => {
if (!appDetail?.id)
return
const unsubscribe = collaborationManager.onAppMetaUpdate(async () => {
try {
const res = await fetchAppDetail({ url: '/apps', id: appDetail.id })
setAppDetail({ ...res })
}
catch (error) {
console.error('failed to refresh app detail from collaboration update:', error)
}
})
return unsubscribe
}, [appDetail?.id, setAppDetail])
const { isCurrentWorkspaceEditor } = useAppContext()
if (!appDetail)

View File

@ -1,6 +1,7 @@
import type { AppPublisherProps } from '@/app/components/app/app-publisher'
import type { ModelAndParameter } from '@/app/components/app/configuration/debug/types'
import type { FileUpload } from '@/app/components/base/features/types'
import type { PublishWorkflowParams } from '@/types/workflow'
import { produce } from 'immer'
import * as React from 'react'
import { useCallback, useState } from 'react'
@ -13,7 +14,7 @@ import { SupportUploadFileTypes } from '@/app/components/workflow/types'
import { Resolution } from '@/types/app'
type Props = Omit<AppPublisherProps, 'onPublish'> & {
onPublish?: (modelAndParameter?: ModelAndParameter, features?: any) => Promise<any> | any
onPublish?: (params?: ModelAndParameter | PublishWorkflowParams, features?: any) => Promise<any> | any
publishedConfig?: any
resetAppConfig?: () => void
}
@ -62,8 +63,8 @@ const FeaturesWrappedAppPublisher = (props: Props) => {
setRestoreConfirmOpen(false)
}, [featuresStore, props])
const handlePublish = useCallback((modelAndParameter?: ModelAndParameter) => {
return props.onPublish?.(modelAndParameter, features)
const handlePublish = useCallback((params?: ModelAndParameter | PublishWorkflowParams) => {
return props.onPublish?.(params, features)
}, [features, props])
return (

View File

@ -1,5 +1,7 @@
import type { ModelAndParameter } from '../configuration/debug/types'
import type { CollaborationUpdate } from '@/app/components/workflow/collaboration/types/collaboration'
import type { InputVar, Variable } from '@/app/components/workflow/types'
import type { InstalledApp } from '@/models/explore'
import type { I18nKeysByPrefix } from '@/types/i18n'
import type { PublishWorkflowParams } from '@/types/workflow'
import {
@ -18,6 +20,7 @@ import { useKeyPress } from 'ahooks'
import {
memo,
useCallback,
useContext,
useEffect,
useMemo,
useState,
@ -35,6 +38,9 @@ import {
} from '@/app/components/base/portal-to-follow-elem'
import UpgradeBtn from '@/app/components/billing/upgrade-btn'
import WorkflowToolConfigureButton from '@/app/components/tools/workflow-tool/configure-button'
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
import { WorkflowContext } from '@/app/components/workflow/context'
import { appDefaultIconBackground } from '@/config'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { useAsyncWindowOpen } from '@/hooks/use-async-window-open'
@ -43,6 +49,8 @@ import { AccessMode } from '@/models/access-control'
import { useAppWhiteListSubjects, useGetUserCanAccessApp } from '@/service/access-control'
import { fetchAppDetailDirect } from '@/service/apps'
import { fetchInstalledAppList } from '@/service/explore'
import { useInvalidateAppWorkflow } from '@/service/use-workflow'
import { fetchPublishedWorkflow } from '@/service/workflow'
import { AppModeEnum } from '@/types/app'
import { basePath } from '@/utils/var'
import Divider from '../../base/divider'
@ -56,6 +64,10 @@ import SuggestedAction from './suggested-action'
type AccessModeLabel = I18nKeysByPrefix<'app', 'accessControlDialog.accessItems.'>
type InstalledAppsResponse = {
installed_apps?: InstalledApp[]
}
const ACCESS_MODE_MAP: Record<AccessMode, { label: AccessModeLabel, icon: React.ElementType }> = {
[AccessMode.ORGANIZATION]: {
label: 'organization',
@ -102,8 +114,8 @@ export type AppPublisherProps = {
debugWithMultipleModel?: boolean
multipleModelConfigs?: ModelAndParameter[]
/** modelAndParameter is passed when debugWithMultipleModel is true */
onPublish?: (params?: any) => Promise<any> | any
onRestore?: () => Promise<any> | any
onPublish?: (params?: ModelAndParameter | PublishWorkflowParams) => Promise<void> | void
onRestore?: () => Promise<void> | void
onToggle?: (state: boolean) => void
crossAxisOffset?: number
toolPublished?: boolean
@ -146,6 +158,7 @@ const AppPublisher = ({
const [isAppAccessSet, setIsAppAccessSet] = useState(true)
const [embeddingModalOpen, setEmbeddingModalOpen] = useState(false)
const workflowStore = useContext(WorkflowContext)
const appDetail = useAppStore(state => state.appDetail)
const setAppDetail = useAppStore(s => s.setAppDetail)
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
@ -158,6 +171,7 @@ const AppPublisher = ({
const { data: userCanAccessApp, isLoading: isGettingUserCanAccessApp, refetch } = useGetUserCanAccessApp({ appId: appDetail?.id, enabled: false })
const { data: appAccessSubjects, isLoading: isGettingAppWhiteListSubjects } = useAppWhiteListSubjects(appDetail?.id, open && systemFeatures.webapp_auth.enabled && appDetail?.access_mode === AccessMode.SPECIFIC_GROUPS_MEMBERS)
const invalidateAppWorkflow = useInvalidateAppWorkflow()
const openAsyncWindow = useAsyncWindowOpen()
const noAccessPermission = useMemo(() => systemFeatures.webapp_auth.enabled && appDetail && appDetail.access_mode !== AccessMode.EXTERNAL_MEMBERS && !userCanAccessApp?.result, [systemFeatures, appDetail, userCanAccessApp])
@ -193,12 +207,39 @@ const AppPublisher = ({
try {
await onPublish?.(params)
setPublished(true)
const appId = appDetail?.id
const socket = appId ? webSocketClient.getSocket(appId) : null
console.warn('[app-publisher] publish success', {
appId,
hasSocket: Boolean(socket),
})
if (appId)
invalidateAppWorkflow(appId)
else
console.warn('[app-publisher] missing appId, skip workflow invalidate and socket emit')
if (socket) {
const timestamp = Date.now()
socket.emit('collaboration_event', {
type: 'app_publish_update',
data: {
action: 'published',
timestamp,
},
timestamp,
})
}
else if (appId) {
console.warn('[app-publisher] socket not ready, skip collaboration_event emit', { appId })
}
trackEvent('app_published_time', { action_mode: 'app', app_id: appDetail?.id, app_name: appDetail?.name })
}
catch {
catch (error) {
console.warn('[app-publisher] publish failed', error)
setPublished(false)
}
}, [appDetail, onPublish])
}, [appDetail, onPublish, invalidateAppWorkflow])
const handleRestore = useCallback(async () => {
try {
@ -227,9 +268,10 @@ const AppPublisher = ({
await openAsyncWindow(async () => {
if (!appDetail?.id)
throw new Error('App not found')
const { installed_apps }: any = await fetchInstalledAppList(appDetail?.id) || {}
if (installed_apps?.length > 0)
return `${basePath}/explore/installed/${installed_apps[0].id}`
const response = (await fetchInstalledAppList(appDetail?.id)) as InstalledAppsResponse
const installedApps = response?.installed_apps
if (installedApps?.length)
return `${basePath}/explore/installed/${installedApps[0].id}`
throw new Error('No app found in Explore')
}, {
onError: (err) => {
@ -257,6 +299,29 @@ const AppPublisher = ({
handlePublish()
}, { exactMatch: true, useCapture: true })
useEffect(() => {
const appId = appDetail?.id
if (!appId)
return
const unsubscribe = collaborationManager.onAppPublishUpdate((update: CollaborationUpdate) => {
const action = typeof update.data.action === 'string' ? update.data.action : undefined
if (action === 'published') {
invalidateAppWorkflow(appId)
fetchPublishedWorkflow(`/apps/${appId}/workflows/publish`)
.then((publishedWorkflow) => {
if (publishedWorkflow?.created_at)
workflowStore?.getState().setPublishedAt(publishedWorkflow.created_at)
})
.catch((error) => {
console.warn('[app-publisher] refresh published workflow failed', error)
})
}
})
return unsubscribe
}, [appDetail?.id, invalidateAppWorkflow, workflowStore])
const hasPublishedVersion = !!publishedAt
const workflowToolDisabled = !hasPublishedVersion || !workflowToolAvailable
const workflowToolMessage = workflowToolDisabled ? t('common.workflowAsToolDisabledHint', { ns: 'workflow' }) : undefined

View File

@ -18,6 +18,7 @@ import type {
TextToSpeechConfig,
} from '@/models/debug'
import type { ModelConfig as BackendModelConfig, UserInputFormItem, VisionSettings } from '@/types/app'
import type { PublishWorkflowParams } from '@/types/workflow'
import { CodeBracketIcon } from '@heroicons/react/20/solid'
import { useBoolean, useGetState } from 'ahooks'
import { clone } from 'es-toolkit/object'
@ -760,7 +761,8 @@ const Configuration: FC = () => {
else { return promptEmpty }
})()
const contextVarEmpty = mode === AppModeEnum.COMPLETION && dataSets.length > 0 && !hasSetContextVar
const onPublish = async (modelAndParameter?: ModelAndParameter, features?: FeaturesData) => {
const onPublish = async (params?: ModelAndParameter | PublishWorkflowParams, features?: FeaturesData) => {
const modelAndParameter = params && 'model' in params ? params : undefined
const modelId = modelAndParameter?.model || modelConfig.model_id
const promptTemplate = modelConfig.configs.prompt_template
const promptVariables = modelConfig.configs.prompt_variables

View File

@ -5,6 +5,7 @@ import type { HtmlContentProps } from '@/app/components/base/popover'
import type { Tag } from '@/app/components/base/tag-management/constant'
import type { CreateAppModalProps } from '@/app/components/explore/create-app-modal'
import type { EnvironmentVariable } from '@/app/components/workflow/types'
import type { WorkflowOnlineUser } from '@/models/app'
import type { App } from '@/types/app'
import { RiBuildingLine, RiGlobalLine, RiLockLine, RiMoreFill, RiVerifiedBadgeLine } from '@remixicon/react'
import dynamic from 'next/dynamic'
@ -20,6 +21,7 @@ import CustomPopover from '@/app/components/base/popover'
import TagSelector from '@/app/components/base/tag-management/selector'
import Toast, { ToastContext } from '@/app/components/base/toast'
import Tooltip from '@/app/components/base/tooltip'
import { UserAvatarList } from '@/app/components/base/user-avatar-list'
import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import { useAppContext } from '@/context/app-context'
import { useGlobalPublicStore } from '@/context/global-public-context'
@ -58,9 +60,10 @@ const AccessControl = dynamic(() => import('@/app/components/app/app-access-cont
export type AppCardProps = {
app: App
onRefresh?: () => void
onlineUsers?: WorkflowOnlineUser[]
}
const AppCard = ({ app, onRefresh }: AppCardProps) => {
const AppCard = ({ app, onRefresh, onlineUsers = [] }: AppCardProps) => {
const { t } = useTranslation()
const { notify } = useContext(ToastContext)
const systemFeatures = useGlobalPublicStore(s => s.systemFeatures)
@ -348,6 +351,19 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
return `${t('segment.editedAt', { ns: 'datasetDocuments' })} ${timeText}`
}, [app.updated_at, app.created_at])
const onlineUserAvatars = useMemo(() => {
if (!onlineUsers.length)
return []
return onlineUsers
.map(user => ({
id: user.user_id || user.sid || '',
name: user.username || 'User',
avatar_url: user.avatar || undefined,
}))
.filter(user => !!user.id)
}, [onlineUsers])
return (
<>
<div
@ -400,6 +416,11 @@ const AppCard = ({ app, onRefresh }: AppCardProps) => {
</Tooltip>
)}
</div>
<div>
{onlineUserAvatars.length > 0 && (
<UserAvatarList users={onlineUserAvatars} maxVisible={3} size={20} />
)}
</div>
</div>
<div className="title-wrapper h-[90px] px-[14px] text-xs leading-normal text-text-tertiary">
<div

View File

@ -1,3 +1,4 @@
import { QueryClient, QueryClientProvider } from '@tanstack/react-query'
import { act, fireEvent, render, screen } from '@testing-library/react'
import * as React from 'react'
import { useStore as useTagStore } from '@/app/components/base/tag-management/store'
@ -141,9 +142,13 @@ vi.mock('@/app/components/base/tag-management/filter', () => ({
}))
// Mock config
vi.mock('@/config', () => ({
NEED_REFRESH_APP_LIST_KEY: 'needRefreshAppList',
}))
vi.mock('@/config', async (importOriginal) => {
const actual = await importOriginal<typeof import('@/config')>()
return {
...actual,
NEED_REFRESH_APP_LIST_KEY: 'needRefreshAppList',
}
})
// Mock pay hook
vi.mock('@/hooks/use-pay', () => ({
@ -234,6 +239,21 @@ beforeAll(() => {
} as unknown as typeof IntersectionObserver
})
const renderList = () => {
const queryClient = new QueryClient({
defaultOptions: {
queries: {
retry: false,
},
},
})
return render(
<QueryClientProvider client={queryClient}>
<List />
</QueryClientProvider>,
)
}
describe('List', () => {
beforeEach(() => {
vi.clearAllMocks()
@ -260,13 +280,13 @@ describe('List', () => {
describe('Rendering', () => {
it('should render without crashing', () => {
render(<List />)
renderList()
// Tab slider renders app type tabs
expect(screen.getByText('app.types.all')).toBeInTheDocument()
})
it('should render tab slider with all app types', () => {
render(<List />)
renderList()
expect(screen.getByText('app.types.all')).toBeInTheDocument()
expect(screen.getByText('app.types.workflow')).toBeInTheDocument()
@ -277,48 +297,48 @@ describe('List', () => {
})
it('should render search input', () => {
render(<List />)
renderList()
// Input component renders a searchbox
expect(screen.getByRole('textbox')).toBeInTheDocument()
})
it('should render tag filter', () => {
render(<List />)
renderList()
// Tag filter renders with placeholder text
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
})
it('should render created by me checkbox', () => {
render(<List />)
renderList()
expect(screen.getByText('app.showMyCreatedAppsOnly')).toBeInTheDocument()
})
it('should render app cards when apps exist', () => {
render(<List />)
renderList()
expect(screen.getByTestId('app-card-app-1')).toBeInTheDocument()
expect(screen.getByTestId('app-card-app-2')).toBeInTheDocument()
})
it('should render new app card for editors', () => {
render(<List />)
renderList()
expect(screen.getByTestId('new-app-card')).toBeInTheDocument()
})
it('should render footer when branding is disabled', () => {
render(<List />)
renderList()
expect(screen.getByTestId('footer')).toBeInTheDocument()
})
it('should render drop DSL hint for editors', () => {
render(<List />)
renderList()
expect(screen.getByText('app.newApp.dropDSLToCreateApp')).toBeInTheDocument()
})
})
describe('Tab Navigation', () => {
it('should call setActiveTab when tab is clicked', () => {
render(<List />)
renderList()
fireEvent.click(screen.getByText('app.types.workflow'))
@ -326,7 +346,7 @@ describe('List', () => {
})
it('should call setActiveTab for all tab', () => {
render(<List />)
renderList()
fireEvent.click(screen.getByText('app.types.all'))
@ -336,12 +356,12 @@ describe('List', () => {
describe('Search Functionality', () => {
it('should render search input field', () => {
render(<List />)
renderList()
expect(screen.getByRole('textbox')).toBeInTheDocument()
})
it('should handle search input change', () => {
render(<List />)
renderList()
const input = screen.getByRole('textbox')
fireEvent.change(input, { target: { value: 'test search' } })
@ -350,7 +370,7 @@ describe('List', () => {
})
it('should handle search input interaction', () => {
render(<List />)
renderList()
const input = screen.getByRole('textbox')
expect(input).toBeInTheDocument()
@ -360,7 +380,7 @@ describe('List', () => {
// Set initial keywords to make clear button visible
mockQueryState.keywords = 'existing search'
render(<List />)
renderList()
// Find and click clear button (Input component uses .group class for clear icon container)
const clearButton = document.querySelector('.group')
@ -375,12 +395,12 @@ describe('List', () => {
describe('Tag Filter', () => {
it('should render tag filter component', () => {
render(<List />)
renderList()
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
})
it('should render tag filter with placeholder', () => {
render(<List />)
renderList()
// Tag filter is rendered
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
@ -389,12 +409,12 @@ describe('List', () => {
describe('Created By Me Filter', () => {
it('should render checkbox with correct label', () => {
render(<List />)
renderList()
expect(screen.getByText('app.showMyCreatedAppsOnly')).toBeInTheDocument()
})
it('should handle checkbox change', () => {
render(<List />)
renderList()
// Checkbox component uses data-testid="checkbox-{id}"
// CheckboxWithLabel doesn't pass testId, so id is undefined
@ -409,7 +429,7 @@ describe('List', () => {
it('should not render new app card for non-editors', () => {
mockIsCurrentWorkspaceEditor.mockReturnValue(false)
render(<List />)
renderList()
expect(screen.queryByTestId('new-app-card')).not.toBeInTheDocument()
})
@ -417,7 +437,7 @@ describe('List', () => {
it('should not render drop DSL hint for non-editors', () => {
mockIsCurrentWorkspaceEditor.mockReturnValue(false)
render(<List />)
renderList()
expect(screen.queryByText(/drop dsl file to create app/i)).not.toBeInTheDocument()
})
@ -427,7 +447,7 @@ describe('List', () => {
it('should redirect dataset operators to datasets page', () => {
mockIsCurrentWorkspaceDatasetOperator.mockReturnValue(true)
render(<List />)
renderList()
expect(mockReplace).toHaveBeenCalledWith('/datasets')
})
@ -437,7 +457,7 @@ describe('List', () => {
it('should call refetch when refresh key is set in localStorage', () => {
localStorage.setItem('needRefreshAppList', '1')
render(<List />)
renderList()
expect(mockRefetch).toHaveBeenCalled()
expect(localStorage.getItem('needRefreshAppList')).toBeNull()
@ -446,22 +466,23 @@ describe('List', () => {
describe('Edge Cases', () => {
it('should handle multiple renders without issues', () => {
const { rerender } = render(<List />)
const { unmount } = renderList()
expect(screen.getByText('app.types.all')).toBeInTheDocument()
rerender(<List />)
unmount()
renderList()
expect(screen.getByText('app.types.all')).toBeInTheDocument()
})
it('should render app cards correctly', () => {
render(<List />)
renderList()
expect(screen.getByText('Test App 1')).toBeInTheDocument()
expect(screen.getByText('Test App 2')).toBeInTheDocument()
})
it('should render with all filter options visible', () => {
render(<List />)
renderList()
expect(screen.getByRole('textbox')).toBeInTheDocument()
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
@ -471,14 +492,14 @@ describe('List', () => {
describe('Dragging State', () => {
it('should show drop hint when DSL feature is enabled for editors', () => {
render(<List />)
renderList()
expect(screen.getByText('app.newApp.dropDSLToCreateApp')).toBeInTheDocument()
})
})
describe('App Type Tabs', () => {
it('should render all app type tabs', () => {
render(<List />)
renderList()
expect(screen.getByText('app.types.all')).toBeInTheDocument()
expect(screen.getByText('app.types.workflow')).toBeInTheDocument()
@ -489,7 +510,7 @@ describe('List', () => {
})
it('should call setActiveTab for each app type', () => {
render(<List />)
renderList()
const appTypeTexts = [
{ mode: AppModeEnum.WORKFLOW, text: 'app.types.workflow' },
@ -508,7 +529,7 @@ describe('List', () => {
describe('Search and Filter Integration', () => {
it('should display search input with correct attributes', () => {
render(<List />)
renderList()
const input = screen.getByRole('textbox')
expect(input).toBeInTheDocument()
@ -516,13 +537,13 @@ describe('List', () => {
})
it('should have tag filter component', () => {
render(<List />)
renderList()
expect(screen.getByText('common.tag.placeholder')).toBeInTheDocument()
})
it('should display created by me label', () => {
render(<List />)
renderList()
expect(screen.getByText('app.showMyCreatedAppsOnly')).toBeInTheDocument()
})
@ -530,14 +551,14 @@ describe('List', () => {
describe('App List Display', () => {
it('should display all app cards from data', () => {
render(<List />)
renderList()
expect(screen.getByTestId('app-card-app-1')).toBeInTheDocument()
expect(screen.getByTestId('app-card-app-2')).toBeInTheDocument()
})
it('should display app names correctly', () => {
render(<List />)
renderList()
expect(screen.getByText('Test App 1')).toBeInTheDocument()
expect(screen.getByText('Test App 2')).toBeInTheDocument()
@ -546,7 +567,7 @@ describe('List', () => {
describe('Footer Visibility', () => {
it('should render footer when branding is disabled', () => {
render(<List />)
renderList()
expect(screen.getByTestId('footer')).toBeInTheDocument()
})
@ -558,14 +579,14 @@ describe('List', () => {
describe('Additional Coverage', () => {
it('should render dragging state overlay when dragging', () => {
mockDragging = true
const { container } = render(<List />)
const { container } = renderList()
// Component should render successfully with dragging state
expect(container).toBeInTheDocument()
})
it('should handle app mode filter in query params', () => {
render(<List />)
renderList()
const workflowTab = screen.getByText('app.types.workflow')
fireEvent.click(workflowTab)
@ -574,7 +595,7 @@ describe('List', () => {
})
it('should render new app card for editors', () => {
render(<List />)
renderList()
expect(screen.getByTestId('new-app-card')).toBeInTheDocument()
})
@ -582,7 +603,7 @@ describe('List', () => {
describe('DSL File Drop', () => {
it('should handle DSL file drop and show modal', () => {
render(<List />)
renderList()
// Simulate DSL file drop via the callback
const mockFile = new File(['test content'], 'test.yml', { type: 'application/yaml' })
@ -596,7 +617,7 @@ describe('List', () => {
})
it('should close DSL modal when onClose is called', () => {
render(<List />)
renderList()
// Open modal via DSL file drop
const mockFile = new File(['test content'], 'test.yml', { type: 'application/yaml' })
@ -614,7 +635,7 @@ describe('List', () => {
})
it('should close DSL modal and refetch when onSuccess is called', () => {
render(<List />)
renderList()
// Open modal via DSL file drop
const mockFile = new File(['test content'], 'test.yml', { type: 'application/yaml' })
@ -637,7 +658,7 @@ describe('List', () => {
describe('Tag Filter Change', () => {
it('should handle tag filter value change', () => {
vi.useFakeTimers()
render(<List />)
renderList()
// TagFilter component is rendered
expect(screen.getByTestId('tag-filter')).toBeInTheDocument()
@ -661,7 +682,7 @@ describe('List', () => {
it('should handle empty tag filter selection', () => {
vi.useFakeTimers()
render(<List />)
renderList()
// Trigger tag filter change with empty array
act(() => {
@ -683,7 +704,7 @@ describe('List', () => {
describe('Infinite Scroll', () => {
it('should call fetchNextPage when intersection observer triggers', () => {
mockServiceState.hasNextPage = true
render(<List />)
renderList()
// Simulate intersection
if (intersectionCallback) {
@ -700,7 +721,7 @@ describe('List', () => {
it('should not call fetchNextPage when not intersecting', () => {
mockServiceState.hasNextPage = true
render(<List />)
renderList()
// Simulate non-intersection
if (intersectionCallback) {
@ -718,7 +739,7 @@ describe('List', () => {
it('should not call fetchNextPage when loading', () => {
mockServiceState.hasNextPage = true
mockServiceState.isLoading = true
render(<List />)
renderList()
if (intersectionCallback) {
act(() => {
@ -736,7 +757,7 @@ describe('List', () => {
describe('Error State', () => {
it('should handle error state in useEffect', () => {
mockServiceState.error = new Error('Test error')
const { container } = render(<List />)
const { container } = renderList()
// Component should still render
expect(container).toBeInTheDocument()

View File

@ -9,13 +9,14 @@ import {
RiMessage3Line,
RiRobot3Line,
} from '@remixicon/react'
import { useQuery } from '@tanstack/react-query'
import { useDebounceFn } from 'ahooks'
import dynamic from 'next/dynamic'
import {
useRouter,
} from 'next/navigation'
import { parseAsString, useQueryState } from 'nuqs'
import { useCallback, useEffect, useRef, useState } from 'react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Input from '@/app/components/base/input'
import TabSliderNew from '@/app/components/base/tab-slider-new'
@ -26,6 +27,7 @@ import { NEED_REFRESH_APP_LIST_KEY } from '@/config'
import { useAppContext } from '@/context/app-context'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { CheckModal } from '@/hooks/use-pay'
import { fetchWorkflowOnlineUsers } from '@/service/apps'
import { useInfiniteAppList } from '@/service/use-apps'
import { AppModeEnum } from '@/types/app'
import { cn } from '@/utils/classnames'
@ -116,6 +118,37 @@ const List: FC<Props> = ({
refetch,
} = useInfiniteAppList(appListQueryParams, { enabled: !isCurrentWorkspaceDatasetOperator })
const apps = useMemo(() => data?.pages?.flatMap(page => page.data) ?? [], [data])
const workflowIds = useMemo(() => {
const ids = new Set<string>()
apps.forEach((appItem) => {
const workflowId = appItem.id
if (!workflowId)
return
if (appItem.mode === 'workflow' || appItem.mode === 'advanced-chat')
ids.add(workflowId)
})
return Array.from(ids)
}, [apps])
const { data: onlineUsersByWorkflow = {}, refetch: refreshOnlineUsers } = useQuery({
queryKey: ['apps', 'workflow-online-users', workflowIds],
queryFn: () => fetchWorkflowOnlineUsers({ workflowIds }),
enabled: workflowIds.length > 0,
})
useEffect(() => {
const timer = window.setInterval(() => {
refetch()
if (workflowIds.length)
refreshOnlineUsers()
}, 10000)
return () => window.clearInterval(timer)
}, [workflowIds, refetch, refreshOnlineUsers])
useEffect(() => {
if (controlRefreshList > 0) {
refetch()
@ -254,7 +287,7 @@ const List: FC<Props> = ({
if (hasAnyApp) {
return pages.flatMap(({ data: apps }) => apps).map(app => (
<AppCard key={app.id} app={app} onRefresh={refetch} />
<AppCard key={app.id} app={app} onRefresh={refetch} onlineUsers={onlineUsersByWorkflow?.[app.id] ?? []} />
))
}

View File

@ -35,12 +35,14 @@ describe('Avatar', () => {
it.each([
{ size: undefined, expected: '30px', label: 'default (30px)' },
{ size: 50, expected: '50px', label: 'custom (50px)' },
])('should apply $label size to img element', ({ size, expected }) => {
])('should apply $label size to avatar container', ({ size, expected }) => {
const props = { name: 'Test', avatar: 'https://example.com/avatar.jpg', size }
render(<Avatar {...props} />)
expect(screen.getByRole('img')).toHaveStyle({
const img = screen.getByRole('img')
const wrapper = img.parentElement as HTMLElement
expect(wrapper).toHaveStyle({
width: expected,
height: expected,
fontSize: expected,
@ -60,7 +62,7 @@ describe('Avatar', () => {
})
describe('className prop', () => {
it('should merge className with default avatar classes on img', () => {
it('should merge className with default avatar classes on container', () => {
const props = {
name: 'Test',
avatar: 'https://example.com/avatar.jpg',
@ -70,8 +72,9 @@ describe('Avatar', () => {
render(<Avatar {...props} />)
const img = screen.getByRole('img')
expect(img).toHaveClass('custom-class')
expect(img).toHaveClass('shrink-0', 'flex', 'items-center', 'rounded-full', 'bg-primary-600')
const wrapper = img.parentElement as HTMLElement
expect(wrapper).toHaveClass('custom-class')
expect(wrapper).toHaveClass('shrink-0', 'flex', 'items-center', 'rounded-full', 'bg-primary-600')
})
it('should merge className with default avatar classes on fallback div', () => {
@ -277,10 +280,11 @@ describe('Avatar', () => {
render(<Avatar {...props} />)
const img = screen.getByRole('img')
const wrapper = img.parentElement as HTMLElement
expect(img).toHaveAttribute('alt', 'Test User')
expect(img).toHaveAttribute('src', 'https://example.com/avatar.jpg')
expect(img).toHaveStyle({ width: '64px', height: '64px' })
expect(img).toHaveClass('custom-avatar')
expect(wrapper).toHaveStyle({ width: '64px', height: '64px' })
expect(wrapper).toHaveClass('custom-avatar')
// Trigger load to verify onError callback
fireEvent.load(img)

View File

@ -9,6 +9,7 @@ export type AvatarProps = {
className?: string
textClassName?: string
onError?: (x: boolean) => void
backgroundColor?: string
}
const Avatar = ({
name,
@ -17,9 +18,18 @@ const Avatar = ({
className,
textClassName,
onError,
backgroundColor,
}: AvatarProps) => {
const avatarClassName = 'shrink-0 flex items-center rounded-full bg-primary-600'
const style = { width: `${size}px`, height: `${size}px`, fontSize: `${size}px`, lineHeight: `${size}px` }
const avatarClassName = backgroundColor
? 'shrink-0 flex items-center rounded-full'
: 'shrink-0 flex items-center rounded-full bg-primary-600'
const style = {
width: `${size}px`,
height: `${size}px`,
fontSize: `${size}px`,
lineHeight: `${size}px`,
...(backgroundColor && !avatar ? { backgroundColor } : {}),
}
const [imgError, setImgError] = useState(false)
const handleError = () => {
@ -35,14 +45,18 @@ const Avatar = ({
if (avatar && !imgError) {
return (
<img
<span
className={cn(avatarClassName, className)}
style={style}
alt={name}
src={avatar}
onError={handleError}
onLoad={() => onError?.(false)}
/>
>
<img
className="h-full w-full rounded-full object-cover"
alt={name}
src={avatar}
onError={handleError}
onLoad={() => onError?.(false)}
/>
</span>
)
}

View File

@ -19,7 +19,7 @@ const ContentDialog = ({
<Transition
show={show}
as="div"
className="absolute left-0 top-0 z-30 box-border h-full w-full p-2"
className="absolute left-0 top-0 z-[70] box-border h-full w-full p-2"
>
<TransitionChild>
<div

View File

@ -0,0 +1,4 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M0 4C0 1.79086 1.79086 0 4 0H12C14.2091 0 16 1.79086 16 4V12C16 14.2091 14.2091 16 12 16H4C1.79086 16 0 14.2091 0 12V4Z" fill="white" fill-opacity="0.12"/>
<path d="M3.42756 8.7358V7.62784H10.8764C11.2003 7.62784 11.4957 7.5483 11.7628 7.3892C12.0298 7.23011 12.2415 7.01705 12.3977 6.75C12.5568 6.48295 12.6364 6.1875 12.6364 5.86364C12.6364 5.53977 12.5568 5.24574 12.3977 4.98153C12.2386 4.71449 12.0256 4.50142 11.7585 4.34233C11.4943 4.18324 11.2003 4.10369 10.8764 4.10369H10.3991V3H10.8764C11.4048 3 11.8849 3.12926 12.3168 3.38778C12.7486 3.64631 13.0938 3.99148 13.3523 4.4233C13.6108 4.85511 13.7401 5.33523 13.7401 5.86364C13.7401 6.25852 13.6648 6.62926 13.5142 6.97585C13.3665 7.32244 13.1619 7.62784 12.9006 7.89205C12.6392 8.15625 12.3352 8.36364 11.9886 8.5142C11.642 8.66193 11.2713 8.7358 10.8764 8.7358H3.42756ZM6.16761 12.0554L2.29403 8.18182L6.16761 4.30824L6.9304 5.07102L3.81534 8.18182L6.9304 11.2926L6.16761 12.0554Z" fill="white"/>
</svg>

After

Width:  |  Height:  |  Size: 1.0 KiB

View File

@ -0,0 +1,3 @@
<svg xmlns="http://www.w3.org/2000/svg" width="14" height="12" viewBox="0 0 14 12" fill="none">
<path d="M12.3334 4C12.3334 2.52725 11.1395 1.33333 9.66671 1.33333H4.33337C2.86062 1.33333 1.66671 2.52724 1.66671 4V10.6667H9.66671C11.1395 10.6667 12.3334 9.47274 12.3334 8V4ZM7.66671 6.66667V8H4.33337V6.66667H7.66671ZM9.66671 4V5.33333H4.33337V4H9.66671ZM13.6667 8C13.6667 10.2091 11.8758 12 9.66671 12H0.333374V4C0.333374 1.79086 2.12424 0 4.33337 0H9.66671C11.8758 0 13.6667 1.79086 13.6667 4V8Z" fill="currentColor"/>
</svg>

After

Width:  |  Height:  |  Size: 527 B

View File

@ -0,0 +1,36 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"width": "16",
"height": "16",
"viewBox": "0 0 16 16",
"fill": "none",
"xmlns": "http://www.w3.org/2000/svg"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M0 4C0 1.79086 1.79086 0 4 0H12C14.2091 0 16 1.79086 16 4V12C16 14.2091 14.2091 16 12 16H4C1.79086 16 0 14.2091 0 12V4Z",
"fill": "white",
"fill-opacity": "0.12"
},
"children": []
},
{
"type": "element",
"name": "path",
"attributes": {
"d": "M3.42756 8.7358V7.62784H10.8764C11.2003 7.62784 11.4957 7.5483 11.7628 7.3892C12.0298 7.23011 12.2415 7.01705 12.3977 6.75C12.5568 6.48295 12.6364 6.1875 12.6364 5.86364C12.6364 5.53977 12.5568 5.24574 12.3977 4.98153C12.2386 4.71449 12.0256 4.50142 11.7585 4.34233C11.4943 4.18324 11.2003 4.10369 10.8764 4.10369H10.3991V3H10.8764C11.4048 3 11.8849 3.12926 12.3168 3.38778C12.7486 3.64631 13.0938 3.99148 13.3523 4.4233C13.6108 4.85511 13.7401 5.33523 13.7401 5.86364C13.7401 6.25852 13.6648 6.62926 13.5142 6.97585C13.3665 7.32244 13.1619 7.62784 12.9006 7.89205C12.6392 8.15625 12.3352 8.36364 11.9886 8.5142C11.642 8.66193 11.2713 8.7358 10.8764 8.7358H3.42756ZM6.16761 12.0554L2.29403 8.18182L6.16761 4.30824L6.9304 5.07102L3.81534 8.18182L6.9304 11.2926L6.16761 12.0554Z",
"fill": "white"
},
"children": []
}
]
},
"name": "EnterKey"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './EnterKey.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'EnterKey'
export default Icon

View File

@ -1,6 +1,7 @@
export { default as D } from './D'
export { default as DiagonalDividingLine } from './DiagonalDividingLine'
export { default as Dify } from './Dify'
export { default as EnterKey } from './EnterKey'
export { default as Gdpr } from './Gdpr'
export { default as Github } from './Github'
export { default as Highlight } from './Highlight'

View File

@ -0,0 +1,26 @@
{
"icon": {
"type": "element",
"isRootNode": true,
"name": "svg",
"attributes": {
"xmlns": "http://www.w3.org/2000/svg",
"width": "14",
"height": "12",
"viewBox": "0 0 14 12",
"fill": "none"
},
"children": [
{
"type": "element",
"name": "path",
"attributes": {
"d": "M12.3334 4C12.3334 2.52725 11.1395 1.33333 9.66671 1.33333H4.33337C2.86062 1.33333 1.66671 2.52724 1.66671 4V10.6667H9.66671C11.1395 10.6667 12.3334 9.47274 12.3334 8V4ZM7.66671 6.66667V8H4.33337V6.66667H7.66671ZM9.66671 4V5.33333H4.33337V4H9.66671ZM13.6667 8C13.6667 10.2091 11.8758 12 9.66671 12H0.333374V4C0.333374 1.79086 2.12424 0 4.33337 0H9.66671C11.8758 0 13.6667 1.79086 13.6667 4V8Z",
"fill": "currentColor"
},
"children": []
}
]
},
"name": "Comment"
}

View File

@ -0,0 +1,20 @@
// GENERATE BY script
// DON NOT EDIT IT MANUALLY
import type { IconData } from '@/app/components/base/icons/IconBase'
import * as React from 'react'
import IconBase from '@/app/components/base/icons/IconBase'
import data from './Comment.json'
const Icon = (
{
ref,
...props
}: React.SVGProps<SVGSVGElement> & {
ref?: React.RefObject<React.RefObject<HTMLOrSVGElement>>
},
) => <IconBase {...props} ref={ref} data={data as IconData} />
Icon.displayName = 'Comment'
export default Icon

View File

@ -1,3 +1,4 @@
export { default as Comment } from './Comment'
export { default as DefaultToolIcon } from './DefaultToolIcon'
export { default as Icon3Dots } from './Icon3Dots'
export { default as Message3Fill } from './Message3Fill'

View File

@ -17,6 +17,7 @@ import type {
} from './types'
import { CodeNode } from '@lexical/code'
import { LexicalComposer } from '@lexical/react/LexicalComposer'
import { useLexicalComposerContext } from '@lexical/react/LexicalComposerContext'
import { ContentEditable } from '@lexical/react/LexicalContentEditable'
import { LexicalErrorBoundary } from '@lexical/react/LexicalErrorBoundary'
import { HistoryPlugin } from '@lexical/react/LexicalHistoryPlugin'
@ -81,6 +82,29 @@ import {
} from './plugins/workflow-variable-block'
import { textToEditorState } from './utils'
const ValueSyncPlugin: FC<{ value?: string }> = ({ value }) => {
const [editor] = useLexicalComposerContext()
useEffect(() => {
if (value === undefined)
return
const incomingValue = value ?? ''
const shouldUpdate = editor.getEditorState().read(() => {
const currentText = $getRoot().getChildren().map(node => node.getTextContent()).join('\n')
return currentText !== incomingValue
})
if (!shouldUpdate)
return
const editorState = editor.parseEditorState(textToEditorState(incomingValue))
editor.setEditorState(editorState)
}, [editor, value])
return null
}
export type PromptEditorProps = {
instanceId?: string
compact?: boolean
@ -294,6 +318,7 @@ const PromptEditor: FC<PromptEditorProps> = ({
<VariableValueBlock />
)
}
<ValueSyncPlugin value={value} />
<OnChangePlugin onChange={handleEditorChange} />
<OnBlurBlock onBlur={onBlur} onFocus={onFocus} />
<UpdateBlock instanceId={instanceId} />

View File

@ -0,0 +1,79 @@
import type { FC } from 'react'
import { memo } from 'react'
import Avatar from '@/app/components/base/avatar'
import { getUserColor } from '@/app/components/workflow/collaboration/utils/user-color'
import { useAppContext } from '@/context/app-context'
type User = {
id: string
name: string
avatar_url?: string | null
}
type UserAvatarListProps = {
users: User[]
maxVisible?: number
size?: number
className?: string
showCount?: boolean
}
export const UserAvatarList: FC<UserAvatarListProps> = memo(({
users,
maxVisible = 3,
size = 24,
className = '',
showCount = true,
}) => {
const { userProfile } = useAppContext()
if (!users.length)
return null
const shouldShowCount = showCount && users.length > maxVisible
const actualMaxVisible = shouldShowCount ? Math.max(1, maxVisible - 1) : maxVisible
const visibleUsers = users.slice(0, actualMaxVisible)
const remainingCount = users.length - actualMaxVisible
const currentUserId = userProfile?.id
return (
<div className={`flex items-center -space-x-1 ${className}`}>
{visibleUsers.map((user, index) => {
const isCurrentUser = user.id === currentUserId
const userColor = isCurrentUser ? undefined : getUserColor(user.id)
return (
<div
key={`${user.id}-${index}`}
className="relative"
style={{ zIndex: visibleUsers.length - index }}
>
<Avatar
name={user.name}
avatar={user.avatar_url || null}
size={size}
className="ring-2 ring-components-panel-bg"
backgroundColor={userColor}
/>
</div>
)
},
)}
{shouldShowCount && remainingCount > 0 && (
<div
className="flex items-center justify-center rounded-full bg-gray-500 text-[10px] leading-none text-white ring-2 ring-components-panel-bg"
style={{
zIndex: 0,
width: size,
height: size,
}}
>
+
{remainingCount}
</div>
)}
</div>
)
})
UserAvatarList.displayName = 'UserAvatarList'

View File

@ -144,7 +144,7 @@ const SystemModel: FC<SystemModelSelectorProps> = ({
{t('modelProvider.systemModelSettings', { ns: 'common' })}
</Button>
</PortalToFollowElemTrigger>
<PortalToFollowElemContent className="z-[60]">
<PortalToFollowElemContent className="z-[75]">
<div className="w-[360px] rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg pt-4 shadow-xl">
<div className="px-6 py-1">
<div className="flex h-8 items-center text-[13px] font-medium text-text-primary">

View File

@ -76,7 +76,7 @@ const ActionList = ({
className='w-full'
onClick={() => setShowSettingAuth(true)}
disabled={!isCurrentWorkspaceManager}
>{t('workflow.nodes.tool.authorize')}</Button>
>{t('nodes.tool.authorize', { ns: 'workflow' })}</Button>
)} */}
</div>
{/* <div className='flex flex-col gap-2'>

View File

@ -23,7 +23,7 @@ export const useAvailableNodesMetaData = () => {
},
knowledgeBaseDefault,
dataSourceEmptyDefault,
], [])
] as AvailableNodesMetaData['nodes'], [])
const helpLinkUri = useMemo(() => docLink(
'/use-dify/knowledge/knowledge-pipeline/knowledge-pipeline-orchestration',
@ -47,7 +47,7 @@ export const useAvailableNodesMetaData = () => {
title,
},
}
}), [mergedNodesMetaData, t])
}) as AvailableNodesMetaData['nodes'], [mergedNodesMetaData, t])
const availableNodesMetaDataMap = useMemo(() => availableNodesMetaData.reduce((acc, node) => {
acc![node.metaData.type] = node

View File

@ -3,8 +3,14 @@ import { useWorkflowStore } from '@/app/components/workflow/store'
export const useGetRunAndTraceUrl = () => {
const workflowStore = useWorkflowStore()
const getWorkflowRunAndTraceUrl = useCallback((runId: string) => {
const getWorkflowRunAndTraceUrl = useCallback((runId?: string) => {
const { pipelineId } = workflowStore.getState()
if (!pipelineId || !runId) {
return {
runUrl: '',
traceUrl: '',
}
}
return {
runUrl: `/rag/pipelines/${pipelineId}/workflow-runs/${runId}`,

View File

@ -10,6 +10,7 @@ import Divider from '@/app/components/base/divider'
import Modal from '@/app/components/base/modal'
import Textarea from '@/app/components/base/textarea'
import MCPServerParamItem from '@/app/components/tools/mcp/mcp-server-param-item'
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
import {
useCreateMCPServer,
useInvalidateMCPServerDetail,
@ -59,6 +60,22 @@ const MCPServerModal = ({
return res
}
const emitMcpServerUpdate = (action: 'created' | 'updated') => {
const socket = webSocketClient.getSocket(appID)
if (!socket)
return
const timestamp = Date.now()
socket.emit('collaboration_event', {
type: 'mcp_server_update',
data: {
action,
timestamp,
},
timestamp,
})
}
const submit = async () => {
if (!data) {
const payload: any = {
@ -71,6 +88,7 @@ const MCPServerModal = ({
await createMCPServer(payload)
invalidateMCPServerDetail(appID)
emitMcpServerUpdate('created')
onHide()
}
else {
@ -83,6 +101,7 @@ const MCPServerModal = ({
payload.description = description
await updateMCPServer(payload)
invalidateMCPServerDetail(appID)
emitMcpServerUpdate('updated')
onHide()
}
}
@ -92,6 +111,7 @@ const MCPServerModal = ({
isShow={show}
onClose={onHide}
className={cn('relative !max-w-[520px] !p-0')}
highPriority
>
<div className="absolute right-5 top-5 z-10 cursor-pointer p-1.5" onClick={onHide}>
<RiCloseLine className="h-5 w-5 text-text-tertiary" />

View File

@ -1,6 +1,8 @@
'use client'
import type { CollaborationUpdate } from '@/app/components/workflow/collaboration/types/collaboration'
import type { InputVar } from '@/app/components/workflow/types'
import type { AppDetailResponse } from '@/models/app'
import type { AppSSO } from '@/types/app'
import type { AppSSO, ModelConfig, UserInputFormItem } from '@/types/app'
import { RiEditLine, RiLoopLeftLine } from '@remixicon/react'
import * as React from 'react'
import { useEffect, useMemo, useState } from 'react'
@ -16,6 +18,8 @@ import Switch from '@/app/components/base/switch'
import Tooltip from '@/app/components/base/tooltip'
import Indicator from '@/app/components/header/indicator'
import MCPServerModal from '@/app/components/tools/mcp/mcp-server-modal'
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
import { webSocketClient } from '@/app/components/workflow/collaboration/core/websocket-manager'
import { BlockEnum } from '@/app/components/workflow/types'
import { useAppContext } from '@/context/app-context'
import { useDocLink } from '@/context/i18n'
@ -36,6 +40,16 @@ export type IAppCardProps = {
triggerModeMessage?: React.ReactNode // display-only message explaining the trigger restriction
}
type BasicAppConfig = Partial<ModelConfig> & {
updated_at?: number
}
type McpServerParam = {
label: string
variable: string
type: string
}
function MCPServiceCard({
appInfo,
triggerModeDisabled = false,
@ -54,16 +68,16 @@ function MCPServiceCard({
const isAdvancedApp = appInfo?.mode === AppModeEnum.ADVANCED_CHAT || appInfo?.mode === AppModeEnum.WORKFLOW
const isBasicApp = !isAdvancedApp
const { data: currentWorkflow } = useAppWorkflow(isAdvancedApp ? appId : '')
const [basicAppConfig, setBasicAppConfig] = useState<any>({})
const basicAppInputForm = useMemo(() => {
if (!isBasicApp || !basicAppConfig?.user_input_form)
const [basicAppConfig, setBasicAppConfig] = useState<BasicAppConfig>({})
const basicAppInputForm = useMemo<McpServerParam[]>(() => {
if (!isBasicApp || !basicAppConfig.user_input_form)
return []
return basicAppConfig.user_input_form.map((item: any) => {
const type = Object.keys(item)[0]
return {
...item[type],
type: type || 'text-input',
}
return basicAppConfig.user_input_form.map((item: UserInputFormItem) => {
if ('text-input' in item)
return { label: item['text-input'].label, variable: item['text-input'].variable, type: 'text-input' }
if ('select' in item)
return { label: item.select.label, variable: item.select.variable, type: 'select' }
return { label: item.paragraph.label, variable: item.paragraph.variable, type: 'paragraph' }
})
}, [basicAppConfig.user_input_form, isBasicApp])
useEffect(() => {
@ -90,12 +104,22 @@ function MCPServiceCard({
const [activated, setActivated] = useState(serverActivated)
const latestParams = useMemo(() => {
const latestParams = useMemo<McpServerParam[]>(() => {
if (isAdvancedApp) {
if (!currentWorkflow?.graph)
return []
const startNode = currentWorkflow?.graph.nodes.find(node => node.data.type === BlockEnum.Start) as any
return startNode?.data.variables as any[] || []
const startNode = currentWorkflow?.graph.nodes.find(node => node.data.type === BlockEnum.Start)
const variables = (startNode?.data as { variables?: InputVar[] } | undefined)?.variables || []
return variables.map((variable) => {
const label = typeof variable.label === 'string'
? variable.label
: (variable.label.variable || variable.label.nodeName)
return {
label,
variable: variable.variable,
type: variable.type,
}
})
}
return basicAppInputForm
}, [currentWorkflow, basicAppInputForm, isAdvancedApp])
@ -103,6 +127,19 @@ function MCPServiceCard({
const onGenCode = async () => {
await refreshMCPServerCode(detail?.id || '')
invalidateMCPServerDetail(appId)
// Emit collaboration event to notify other clients of MCP server changes
const socket = webSocketClient.getSocket(appId)
if (socket) {
socket.emit('collaboration_event', {
type: 'mcp_server_update',
data: {
action: 'codeRegenerated',
timestamp: Date.now(),
},
timestamp: Date.now(),
})
}
}
const onChangeStatus = async (state: boolean) => {
@ -132,6 +169,20 @@ function MCPServiceCard({
})
invalidateMCPServerDetail(appId)
}
// Emit collaboration event to notify other clients of MCP server status change
const socket = webSocketClient.getSocket(appId)
if (socket) {
socket.emit('collaboration_event', {
type: 'mcp_server_update',
data: {
action: 'statusChanged',
status: state ? 'active' : 'inactive',
timestamp: Date.now(),
},
timestamp: Date.now(),
})
}
}
const handleServerModalHide = () => {
@ -144,6 +195,23 @@ function MCPServiceCard({
setActivated(serverActivated)
}, [serverActivated])
// Listen for collaborative MCP server updates from other clients
useEffect(() => {
if (!appId)
return
const unsubscribe = collaborationManager.onMcpServerUpdate(async (_update: CollaborationUpdate) => {
try {
invalidateMCPServerDetail(appId)
}
catch (error) {
console.error('MCP server update failed:', error)
}
})
return unsubscribe
}, [appId, invalidateMCPServerDetail])
if (!currentWorkflow && isAdvancedApp)
return null

View File

@ -108,7 +108,7 @@ vi.mock('@/app/components/app/app-publisher', () => ({
<button type="button" onClick={() => { Promise.resolve(props.onPublish?.()).catch(() => undefined) }}>
publisher-publish
</button>
<button type="button" onClick={() => { Promise.resolve(props.onPublish?.({ title: 'Test title', releaseNotes: 'Test notes' })).catch(() => undefined) }}>
<button type="button" onClick={() => { Promise.resolve(props.onPublish?.({ url: '/apps/app-id/workflows/publish', title: 'Test title', releaseNotes: 'Test notes' })).catch(() => undefined) }}>
publisher-publish-with-params
</button>
</div>

View File

@ -1,3 +1,4 @@
import type { ModelAndParameter } from '@/app/components/app/configuration/debug/types'
import type { EndNodeType } from '@/app/components/workflow/nodes/end/types'
import type { StartNodeType } from '@/app/components/workflow/nodes/start/types'
import type {
@ -140,24 +141,38 @@ const FeaturesTrigger = () => {
const needWarningNodes = useChecklist(nodes, edges)
const updatePublishedWorkflow = useInvalidateAppWorkflow()
const onPublish = useCallback(async (params?: PublishWorkflowParams) => {
const onPublish = useCallback(async (params?: ModelAndParameter | PublishWorkflowParams) => {
const publishParams = params && 'title' in params ? params : undefined
// First check if there are any items in the checklist
// if (!validateBeforeRun())
// throw new Error('Checklist has unresolved items')
if (needWarningNodes.length > 0) {
console.warn('[workflow-header] publish blocked by checklist', {
appId: appID,
warningCount: needWarningNodes.length,
})
notify({ type: 'error', message: t('panel.checklistTip', { ns: 'workflow' }) })
throw new Error('Checklist has unresolved items')
}
// Then perform the detailed validation
if (await handleCheckBeforePublish()) {
console.warn('[workflow-header] publish start', {
appId: appID,
title: publishParams?.title ?? '',
})
const res = await publishWorkflow({
url: `/apps/${appID}/workflows/publish`,
title: params?.title || '',
releaseNotes: params?.releaseNotes || '',
title: publishParams?.title || '',
releaseNotes: publishParams?.releaseNotes || '',
})
console.warn('[workflow-header] publish response', {
appId: appID,
hasResponse: Boolean(res),
createdAt: res?.created_at,
})
if (res) {
notify({ type: 'success', message: t('api.actionSuccess', { ns: 'common' }) })
updatePublishedWorkflow(appID!)

View File

@ -1,11 +1,24 @@
import type { Features as FeaturesData } from '@/app/components/base/features/types'
import type { WorkflowProps } from '@/app/components/workflow'
import type { CollaborationUpdate } from '@/app/components/workflow/collaboration/types/collaboration'
import type { Shape as HooksStoreShape } from '@/app/components/workflow/hooks-store/store'
import type { Edge, Node } from '@/app/components/workflow/types'
import type { FetchWorkflowDraftResponse } from '@/types/workflow'
import {
useCallback,
useEffect,
useMemo,
useRef,
} from 'react'
import { useReactFlow } from 'reactflow'
import { useFeaturesStore } from '@/app/components/base/features/hooks'
import { FILE_EXTS } from '@/app/components/base/prompt-editor/constants'
import { WorkflowWithInnerContext } from '@/app/components/workflow'
import { useWorkflowStore } from '@/app/components/workflow/store'
import { collaborationManager, useCollaboration } from '@/app/components/workflow/collaboration'
import { useWorkflowUpdate } from '@/app/components/workflow/hooks/use-workflow-interactions'
import { useStore, useWorkflowStore } from '@/app/components/workflow/store'
import { SupportUploadFileTypes } from '@/app/components/workflow/types'
import { fetchWorkflowDraft } from '@/service/workflow'
import {
useAvailableNodesMetaData,
useConfigsMap,
@ -21,6 +34,7 @@ import {
import WorkflowChildren from './workflow-children'
type WorkflowMainProps = Pick<WorkflowProps, 'nodes' | 'edges' | 'viewport'>
type WorkflowDataUpdatePayload = Pick<FetchWorkflowDraftResponse, 'features' | 'conversation_variables' | 'environment_variables'>
const WorkflowMain = ({
nodes,
edges,
@ -28,8 +42,48 @@ const WorkflowMain = ({
}: WorkflowMainProps) => {
const featuresStore = useFeaturesStore()
const workflowStore = useWorkflowStore()
const appId = useStore(s => s.appId)
const containerRef = useRef<HTMLDivElement>(null)
const reactFlow = useReactFlow()
const handleWorkflowDataUpdate = useCallback((payload: any) => {
const reactFlowStore = useMemo(() => ({
getState: () => ({
getNodes: () => reactFlow.getNodes(),
setNodes: (nodesToSet: Node[]) => reactFlow.setNodes(nodesToSet),
getEdges: () => reactFlow.getEdges(),
setEdges: (edgesToSet: Edge[]) => reactFlow.setEdges(edgesToSet),
}),
}), [reactFlow])
const {
startCursorTracking,
stopCursorTracking,
onlineUsers,
cursors,
isConnected,
isEnabled: isCollaborationEnabled,
} = useCollaboration(appId || '', reactFlowStore)
const myUserId = useMemo(
() => (isCollaborationEnabled && isConnected ? 'current-user' : null),
[isCollaborationEnabled, isConnected],
)
const filteredCursors = Object.fromEntries(
Object.entries(cursors).filter(([userId]) => userId !== myUserId),
)
useEffect(() => {
if (!isCollaborationEnabled)
return
if (containerRef.current)
startCursorTracking(containerRef as React.RefObject<HTMLElement>, reactFlow)
return () => {
stopCursorTracking()
}
}, [startCursorTracking, stopCursorTracking, reactFlow, isCollaborationEnabled])
const handleWorkflowDataUpdate = useCallback((payload: WorkflowDataUpdatePayload) => {
const {
features,
conversation_variables,
@ -38,7 +92,33 @@ const WorkflowMain = ({
if (features && featuresStore) {
const { setFeatures } = featuresStore.getState()
setFeatures(features)
const transformedFeatures: FeaturesData = {
file: {
image: {
enabled: !!features.file_upload?.image?.enabled,
number_limits: features.file_upload?.image?.number_limits || 3,
transfer_methods: features.file_upload?.image?.transfer_methods || ['local_file', 'remote_url'],
},
enabled: !!(features.file_upload?.enabled || features.file_upload?.image?.enabled),
allowed_file_types: features.file_upload?.allowed_file_types || [SupportUploadFileTypes.image],
allowed_file_extensions: features.file_upload?.allowed_file_extensions || FILE_EXTS[SupportUploadFileTypes.image].map(ext => `.${ext}`),
allowed_file_upload_methods: features.file_upload?.allowed_file_upload_methods || features.file_upload?.image?.transfer_methods || ['local_file', 'remote_url'],
number_limits: features.file_upload?.number_limits || features.file_upload?.image?.number_limits || 3,
},
opening: {
enabled: !!features.opening_statement,
opening_statement: features.opening_statement,
suggested_questions: features.suggested_questions,
},
suggested: features.suggested_questions_after_answer || { enabled: false },
speech2text: features.speech_to_text || { enabled: false },
text2speech: features.text_to_speech || { enabled: false },
citation: features.retriever_resource || { enabled: false },
moderation: features.sensitive_word_avoidance || { enabled: false },
annotationReply: features.annotation_reply || { enabled: false },
}
setFeatures(transformedFeatures)
}
if (conversation_variables) {
const { setConversationVariables } = workflowStore.getState()
@ -55,6 +135,7 @@ const WorkflowMain = ({
syncWorkflowDraftWhenPageClose,
} = useNodesSyncDraft()
const { handleRefreshWorkflowDraft } = useWorkflowRefreshDraft()
const { handleUpdateWorkflowCanvas } = useWorkflowUpdate()
const {
handleBackupDraft,
handleLoadBackupDraft,
@ -62,6 +143,64 @@ const WorkflowMain = ({
handleRun,
handleStopRun,
} = useWorkflowRun()
useEffect(() => {
if (!appId || !isCollaborationEnabled)
return
const unsubscribe = collaborationManager.onVarsAndFeaturesUpdate(async (_update: CollaborationUpdate) => {
try {
const response = await fetchWorkflowDraft(`/apps/${appId}/workflows/draft`)
handleWorkflowDataUpdate(response)
}
catch (error) {
console.error('workflow vars and features update failed:', error)
}
})
return unsubscribe
}, [appId, handleWorkflowDataUpdate, isCollaborationEnabled])
// Listen for workflow updates from other users
useEffect(() => {
if (!appId || !isCollaborationEnabled)
return
const unsubscribe = collaborationManager.onWorkflowUpdate(async () => {
try {
const response = await fetchWorkflowDraft(`/apps/${appId}/workflows/draft`)
// Handle features, variables etc.
handleWorkflowDataUpdate(response)
// Update workflow canvas (nodes, edges, viewport)
if (response.graph) {
handleUpdateWorkflowCanvas({
nodes: response.graph.nodes || [],
edges: response.graph.edges || [],
viewport: response.graph.viewport || { x: 0, y: 0, zoom: 1 },
})
}
}
catch (error) {
console.error('Failed to fetch updated workflow:', error)
}
})
return unsubscribe
}, [appId, handleWorkflowDataUpdate, handleUpdateWorkflowCanvas, isCollaborationEnabled])
// Listen for sync requests from other users (only processed by leader)
useEffect(() => {
if (!appId || !isCollaborationEnabled)
return
const unsubscribe = collaborationManager.onSyncRequest(() => {
doSyncWorkflowDraft()
})
return unsubscribe
}, [appId, doSyncWorkflowDraft, isCollaborationEnabled])
const {
handleStartWorkflowRun,
handleWorkflowStartRunInChatflow,
@ -79,6 +218,7 @@ const WorkflowMain = ({
} = useDSL()
const configsMap = useConfigsMap()
const { fetchInspectVars } = useSetWorkflowVarsWithValue({
...configsMap,
})
@ -99,7 +239,7 @@ const WorkflowMain = ({
invalidateConversationVarValues,
} = useInspectVarsCrud()
const hooksStore = useMemo(() => {
const hooksStore = useMemo<Partial<HooksStoreShape>>(() => {
return {
syncWorkflowDraftWhenPageClose,
doSyncWorkflowDraft,
@ -176,15 +316,23 @@ const WorkflowMain = ({
])
return (
<WorkflowWithInnerContext
nodes={nodes}
edges={edges}
viewport={viewport}
onWorkflowDataUpdate={handleWorkflowDataUpdate}
hooksStore={hooksStore as any}
<div
ref={containerRef}
className="relative h-full w-full"
>
<WorkflowChildren />
</WorkflowWithInnerContext>
<WorkflowWithInnerContext
nodes={nodes}
edges={edges}
viewport={viewport}
onWorkflowDataUpdate={handleWorkflowDataUpdate}
hooksStore={hooksStore}
cursors={filteredCursors}
myUserId={myUserId}
onlineUsers={onlineUsers}
>
<WorkflowChildren />
</WorkflowWithInnerContext>
</div>
)
}

View File

@ -7,6 +7,7 @@ import {
import { useShallow } from 'zustand/react/shallow'
import { useStore as useAppStore } from '@/app/components/app/store'
import Panel from '@/app/components/workflow/panel'
import CommentsPanel from '@/app/components/workflow/panel/comments-panel'
import { useStore } from '@/app/components/workflow/store'
import {
useIsChatMode,
@ -67,6 +68,7 @@ const WorkflowPanelOnRight = () => {
const showDebugAndPreviewPanel = useStore(s => s.showDebugAndPreviewPanel)
const showChatVariablePanel = useStore(s => s.showChatVariablePanel)
const showGlobalVariablePanel = useStore(s => s.showGlobalVariablePanel)
const controlMode = useStore(s => s.controlMode)
return (
<>
@ -100,6 +102,7 @@ const WorkflowPanelOnRight = () => {
<GlobalVariablePanel />
)
}
{controlMode === 'comment' && <CommentsPanel />}
</>
)
}

View File

@ -39,7 +39,7 @@ export const useAvailableNodesMetaData = () => {
TriggerPluginDefault,
]
),
], [isChatMode, startNodeMetaData])
] as AvailableNodesMetaData['nodes'], [isChatMode, startNodeMetaData])
const availableNodesMetaData = useMemo(() => mergedNodesMetaData.map((node) => {
const { metaData } = node
@ -60,7 +60,7 @@ export const useAvailableNodesMetaData = () => {
title,
},
}
}), [mergedNodesMetaData, t, docLink])
}) as AvailableNodesMetaData['nodes'], [mergedNodesMetaData, t, docLink])
const availableNodesMetaDataMap = useMemo(() => availableNodesMetaData.reduce((acc, node) => {
acc![node.metaData.type] = node

View File

@ -3,8 +3,14 @@ import { useWorkflowStore } from '@/app/components/workflow/store'
export const useGetRunAndTraceUrl = () => {
const workflowStore = useWorkflowStore()
const getWorkflowRunAndTraceUrl = useCallback((runId: string) => {
const getWorkflowRunAndTraceUrl = useCallback((runId?: string) => {
const { appId } = workflowStore.getState()
if (!appId || !runId) {
return {
runUrl: '',
traceUrl: '',
}
}
return {
runUrl: `/apps/${appId}/workflow-runs/${runId}`,

View File

@ -1,705 +0,0 @@
/**
* Test Suite for useNodesSyncDraft Hook
*
* PURPOSE:
* This hook handles syncing workflow draft to the server. The key fix being tested
* is the error handling behavior when `draft_workflow_not_sync` error occurs.
*
* MULTI-TAB PROBLEM SCENARIO:
* 1. User opens the same workflow in Tab A and Tab B (both have hash: v1)
* 2. Tab A saves successfully, server returns new hash: v2
* 3. Tab B tries to save with old hash: v1, server returns 400 error with code
* 'draft_workflow_not_sync'
* 4. BEFORE FIX: handleRefreshWorkflowDraft() was called without args, which fetched
* draft AND overwrote canvas - user lost unsaved changes in Tab B
* 5. AFTER FIX: handleRefreshWorkflowDraft(true) is called, which fetches draft but
* only updates hash (notUpdateCanvas=true), preserving user's canvas changes
*
* TESTING STRATEGY:
* We don't simulate actual tab switching UI behavior. Instead, we mock the API to
* return `draft_workflow_not_sync` error and verify:
* - The hook calls handleRefreshWorkflowDraft(true) - not handleRefreshWorkflowDraft()
* - This ensures canvas data is preserved while hash is updated for retry
*
* This is behavior-driven testing - we verify "what the code does when receiving
* specific API errors" rather than simulating complete user interaction flows.
* True multi-tab integration testing would require E2E frameworks like Playwright.
*/
import { act, renderHook, waitFor } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { useNodesSyncDraft } from './use-nodes-sync-draft'
// Mock reactflow store
const mockGetNodes = vi.fn()
type MockEdge = {
id: string
source: string
target: string
data: Record<string, unknown>
}
const mockStoreState: {
getNodes: ReturnType<typeof vi.fn>
edges: MockEdge[]
transform: number[]
} = {
getNodes: mockGetNodes,
edges: [],
transform: [0, 0, 1],
}
vi.mock('reactflow', () => ({
useStoreApi: () => ({
getState: () => mockStoreState,
}),
}))
// Mock features store
const mockFeaturesState = {
features: {
opening: { enabled: false, opening_statement: '', suggested_questions: [] },
suggested: {},
text2speech: {},
speech2text: {},
citation: {},
moderation: {},
file: {},
},
}
vi.mock('@/app/components/base/features/hooks', () => ({
useFeaturesStore: () => ({
getState: () => mockFeaturesState,
}),
}))
// Mock workflow service
const mockSyncWorkflowDraft = vi.fn()
vi.mock('@/service/workflow', () => ({
syncWorkflowDraft: (...args: unknown[]) => mockSyncWorkflowDraft(...args),
}))
// Mock useNodesReadOnly
const mockGetNodesReadOnly = vi.fn()
vi.mock('@/app/components/workflow/hooks/use-workflow', () => ({
useNodesReadOnly: () => ({
getNodesReadOnly: mockGetNodesReadOnly,
}),
}))
// Mock useSerialAsyncCallback - pass through the callback
vi.mock('@/app/components/workflow/hooks/use-serial-async-callback', () => ({
useSerialAsyncCallback: (callback: (...args: unknown[]) => unknown) => callback,
}))
// Mock workflow store
const mockSetSyncWorkflowDraftHash = vi.fn()
const mockSetDraftUpdatedAt = vi.fn()
const createMockWorkflowStoreState = (overrides = {}) => ({
appId: 'test-app-id',
conversationVariables: [],
environmentVariables: [],
syncWorkflowDraftHash: 'current-hash-123',
isWorkflowDataLoaded: true,
setSyncWorkflowDraftHash: mockSetSyncWorkflowDraftHash,
setDraftUpdatedAt: mockSetDraftUpdatedAt,
...overrides,
})
const mockWorkflowStoreGetState = vi.fn()
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: mockWorkflowStoreGetState,
}),
}))
// Mock useWorkflowRefreshDraft (THE KEY DEPENDENCY FOR THIS TEST)
const mockHandleRefreshWorkflowDraft = vi.fn()
vi.mock('.', () => ({
useWorkflowRefreshDraft: () => ({
handleRefreshWorkflowDraft: mockHandleRefreshWorkflowDraft,
}),
}))
// Mock API_PREFIX
vi.mock('@/config', () => ({
API_PREFIX: '/api',
}))
// Create a mock error response that mimics the actual API error
const createMockErrorResponse = (code: string) => {
const errorBody = { code, message: 'Draft not in sync' }
let bodyUsed = false
return {
json: vi.fn().mockImplementation(() => {
bodyUsed = true
return Promise.resolve(errorBody)
}),
get bodyUsed() {
return bodyUsed
},
}
}
describe('useNodesSyncDraft', () => {
beforeEach(() => {
vi.clearAllMocks()
mockGetNodesReadOnly.mockReturnValue(false)
mockGetNodes.mockReturnValue([
{ id: 'node-1', type: 'start', data: { type: 'start' } },
{ id: 'node-2', type: 'llm', data: { type: 'llm' } },
])
mockStoreState.edges = [
{ id: 'edge-1', source: 'node-1', target: 'node-2', data: {} },
]
mockWorkflowStoreGetState.mockReturnValue(createMockWorkflowStoreState())
mockSyncWorkflowDraft.mockResolvedValue({
hash: 'new-hash-456',
updated_at: Date.now(),
})
})
afterEach(() => {
vi.resetAllMocks()
})
describe('doSyncWorkflowDraft function', () => {
it('should return doSyncWorkflowDraft function', () => {
const { result } = renderHook(() => useNodesSyncDraft())
expect(result.current.doSyncWorkflowDraft).toBeDefined()
expect(typeof result.current.doSyncWorkflowDraft).toBe('function')
})
it('should return syncWorkflowDraftWhenPageClose function', () => {
const { result } = renderHook(() => useNodesSyncDraft())
expect(result.current.syncWorkflowDraftWhenPageClose).toBeDefined()
expect(typeof result.current.syncWorkflowDraftWhenPageClose).toBe('function')
})
})
describe('successful sync', () => {
it('should call syncWorkflowDraft service on successful sync', async () => {
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).toHaveBeenCalledWith({
url: '/apps/test-app-id/workflows/draft',
params: expect.objectContaining({
hash: 'current-hash-123',
graph: expect.objectContaining({
nodes: expect.any(Array),
edges: expect.any(Array),
viewport: expect.any(Object),
}),
}),
})
})
it('should update syncWorkflowDraftHash on success', async () => {
mockSyncWorkflowDraft.mockResolvedValue({
hash: 'new-hash-789',
updated_at: 1234567890,
})
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-789')
})
it('should update draftUpdatedAt on success', async () => {
const updatedAt = 1234567890
mockSyncWorkflowDraft.mockResolvedValue({
hash: 'new-hash',
updated_at: updatedAt,
})
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSetDraftUpdatedAt).toHaveBeenCalledWith(updatedAt)
})
it('should call onSuccess callback on success', async () => {
const onSuccess = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSuccess })
})
expect(onSuccess).toHaveBeenCalled()
})
it('should call onSettled callback after success', async () => {
const onSettled = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSettled })
})
expect(onSettled).toHaveBeenCalled()
})
})
describe('sync error handling - draft_workflow_not_sync (THE KEY FIX)', () => {
/**
* This is THE KEY TEST for the bug fix.
*
* SCENARIO: Multi-tab editing
* 1. User opens workflow in Tab A and Tab B
* 2. Tab A saves draft successfully, gets new hash
* 3. Tab B tries to save with old hash
* 4. Server returns 400 with code 'draft_workflow_not_sync'
*
* BEFORE FIX:
* - handleRefreshWorkflowDraft() was called without arguments
* - This would fetch draft AND overwrite the canvas
* - User loses their unsaved changes in Tab B
*
* AFTER FIX:
* - handleRefreshWorkflowDraft(true) is called
* - This fetches draft but DOES NOT overwrite canvas
* - Only hash is updated for the next sync attempt
* - User's unsaved changes are preserved
*/
it('should call handleRefreshWorkflowDraft with notUpdateCanvas=true when draft_workflow_not_sync error occurs', async () => {
const mockError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// THE KEY ASSERTION: handleRefreshWorkflowDraft must be called with true
await waitFor(() => {
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledWith(true)
})
})
it('should NOT call handleRefreshWorkflowDraft when notRefreshWhenSyncError is true', async () => {
const mockError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
// First parameter is notRefreshWhenSyncError
await result.current.doSyncWorkflowDraft(true)
})
// Wait a bit for async operations
await new Promise(resolve => setTimeout(resolve, 100))
expect(mockHandleRefreshWorkflowDraft).not.toHaveBeenCalled()
})
it('should call onError callback when draft_workflow_not_sync error occurs', async () => {
const mockError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const onError = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onError })
})
expect(onError).toHaveBeenCalled()
})
it('should call onSettled callback after error', async () => {
const mockError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const onSettled = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSettled })
})
expect(onSettled).toHaveBeenCalled()
})
})
describe('other error handling', () => {
it('should NOT call handleRefreshWorkflowDraft for non-draft_workflow_not_sync errors', async () => {
const mockError = createMockErrorResponse('some_other_error')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Wait a bit for async operations
await new Promise(resolve => setTimeout(resolve, 100))
expect(mockHandleRefreshWorkflowDraft).not.toHaveBeenCalled()
})
it('should handle error without json method', async () => {
const mockError = new Error('Network error')
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
const onError = vi.fn()
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onError })
})
expect(onError).toHaveBeenCalled()
expect(mockHandleRefreshWorkflowDraft).not.toHaveBeenCalled()
})
it('should handle error with bodyUsed already true', async () => {
const mockError = {
json: vi.fn(),
bodyUsed: true,
}
mockSyncWorkflowDraft.mockRejectedValue(mockError)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Should not call json() when bodyUsed is true
expect(mockError.json).not.toHaveBeenCalled()
expect(mockHandleRefreshWorkflowDraft).not.toHaveBeenCalled()
})
})
describe('read-only mode', () => {
it('should not sync when nodes are read-only', async () => {
mockGetNodesReadOnly.mockReturnValue(true)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).not.toHaveBeenCalled()
})
it('should not sync on page close when nodes are read-only', () => {
mockGetNodesReadOnly.mockReturnValue(true)
// Mock sendBeacon
const mockSendBeacon = vi.fn()
Object.defineProperty(navigator, 'sendBeacon', {
value: mockSendBeacon,
writable: true,
})
const { result } = renderHook(() => useNodesSyncDraft())
act(() => {
result.current.syncWorkflowDraftWhenPageClose()
})
expect(mockSendBeacon).not.toHaveBeenCalled()
})
})
describe('workflow data not loaded', () => {
it('should not sync when workflow data is not loaded', async () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockWorkflowStoreState({ isWorkflowDataLoaded: false }),
)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).not.toHaveBeenCalled()
})
})
describe('no appId', () => {
it('should not sync when appId is not set', async () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockWorkflowStoreState({ appId: null }),
)
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).not.toHaveBeenCalled()
})
})
describe('node filtering', () => {
it('should filter out temp nodes', async () => {
mockGetNodes.mockReturnValue([
{ id: 'node-1', type: 'start', data: { type: 'start' } },
{ id: 'node-temp', type: 'custom', data: { type: 'custom', _isTempNode: true } },
{ id: 'node-2', type: 'llm', data: { type: 'llm' } },
])
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).toHaveBeenCalledWith(
expect.objectContaining({
params: expect.objectContaining({
graph: expect.objectContaining({
nodes: expect.not.arrayContaining([
expect.objectContaining({ id: 'node-temp' }),
]),
}),
}),
}),
)
})
it('should remove internal underscore properties from nodes', async () => {
mockGetNodes.mockReturnValue([
{
id: 'node-1',
type: 'start',
data: {
type: 'start',
_internalProp: 'should be removed',
_anotherInternal: true,
publicProp: 'should remain',
},
},
])
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
const callArgs = mockSyncWorkflowDraft.mock.calls[0][0]
const sentNode = callArgs.params.graph.nodes[0]
expect(sentNode.data).not.toHaveProperty('_internalProp')
expect(sentNode.data).not.toHaveProperty('_anotherInternal')
expect(sentNode.data).toHaveProperty('publicProp', 'should remain')
})
})
describe('edge filtering', () => {
it('should filter out temp edges', async () => {
mockStoreState.edges = [
{ id: 'edge-1', source: 'node-1', target: 'node-2', data: {} },
{ id: 'edge-temp', source: 'node-1', target: 'node-3', data: { _isTemp: true } },
]
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
const callArgs = mockSyncWorkflowDraft.mock.calls[0][0]
const sentEdges = callArgs.params.graph.edges
expect(sentEdges).toHaveLength(1)
expect(sentEdges[0].id).toBe('edge-1')
})
it('should remove internal underscore properties from edges', async () => {
mockStoreState.edges = [
{
id: 'edge-1',
source: 'node-1',
target: 'node-2',
data: {
_internalEdgeProp: 'should be removed',
publicEdgeProp: 'should remain',
},
},
]
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
const callArgs = mockSyncWorkflowDraft.mock.calls[0][0]
const sentEdge = callArgs.params.graph.edges[0]
expect(sentEdge.data).not.toHaveProperty('_internalEdgeProp')
expect(sentEdge.data).toHaveProperty('publicEdgeProp', 'should remain')
})
})
describe('viewport handling', () => {
it('should send current viewport from transform', async () => {
mockStoreState.transform = [100, 200, 1.5]
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
expect(mockSyncWorkflowDraft).toHaveBeenCalledWith(
expect.objectContaining({
params: expect.objectContaining({
graph: expect.objectContaining({
viewport: { x: 100, y: 200, zoom: 1.5 },
}),
}),
}),
)
})
})
describe('multi-tab concurrent editing scenario (END-TO-END TEST)', () => {
/**
* Simulates the complete multi-tab scenario to verify the fix works correctly.
*
* Scenario:
* 1. Tab A and Tab B both have the workflow open with hash 'hash-v1'
* 2. Tab A saves successfully, server returns 'hash-v2'
* 3. Tab B tries to save with 'hash-v1', gets 'draft_workflow_not_sync' error
* 4. Tab B should only update hash to 'hash-v2', not overwrite canvas
* 5. Tab B can now retry save with correct hash
*/
it('should preserve canvas data during hash conflict resolution', async () => {
// Initial state: both tabs have hash-v1
mockWorkflowStoreGetState.mockReturnValue(
createMockWorkflowStoreState({ syncWorkflowDraftHash: 'hash-v1' }),
)
// Tab B tries to save with old hash, server returns error
const syncError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(syncError)
const { result } = renderHook(() => useNodesSyncDraft())
// Tab B attempts to sync
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Verify the sync was attempted with old hash
expect(mockSyncWorkflowDraft).toHaveBeenCalledWith(
expect.objectContaining({
params: expect.objectContaining({
hash: 'hash-v1',
}),
}),
)
// Verify handleRefreshWorkflowDraft was called with true (not overwrite canvas)
await waitFor(() => {
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledWith(true)
})
// The key assertion: only one argument (true) was passed
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledTimes(1)
expect(mockHandleRefreshWorkflowDraft.mock.calls[0]).toEqual([true])
})
it('should handle multiple consecutive sync failures gracefully', async () => {
// Create fresh error for each call to avoid bodyUsed issue
mockSyncWorkflowDraft
.mockRejectedValueOnce(createMockErrorResponse('draft_workflow_not_sync'))
.mockRejectedValueOnce(createMockErrorResponse('draft_workflow_not_sync'))
const { result } = renderHook(() => useNodesSyncDraft())
// First sync attempt
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Wait for first refresh call
await waitFor(() => {
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledTimes(1)
})
// Second sync attempt
await act(async () => {
await result.current.doSyncWorkflowDraft()
})
// Both should call handleRefreshWorkflowDraft with true
await waitFor(() => {
expect(mockHandleRefreshWorkflowDraft).toHaveBeenCalledTimes(2)
})
mockHandleRefreshWorkflowDraft.mock.calls.forEach((call) => {
expect(call).toEqual([true])
})
})
})
describe('callbacks behavior', () => {
it('should not call onSuccess when sync fails', async () => {
const syncError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(syncError)
const onSuccess = vi.fn()
const onError = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSuccess, onError })
})
expect(onSuccess).not.toHaveBeenCalled()
expect(onError).toHaveBeenCalled()
})
it('should always call onSettled regardless of success or failure', async () => {
const onSettled = vi.fn()
const { result } = renderHook(() => useNodesSyncDraft())
// Test success case
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSettled })
})
expect(onSettled).toHaveBeenCalledTimes(1)
// Reset
onSettled.mockClear()
// Test failure case
const syncError = createMockErrorResponse('draft_workflow_not_sync')
mockSyncWorkflowDraft.mockRejectedValue(syncError)
await act(async () => {
await result.current.doSyncWorkflowDraft(false, { onSettled })
})
expect(onSettled).toHaveBeenCalledTimes(1)
})
})
})

View File

@ -1,11 +1,15 @@
import type { WorkflowDraftFeaturesPayload } from '@/service/workflow'
import { produce } from 'immer'
import { useParams } from 'next/navigation'
import { useCallback } from 'react'
import { useStoreApi } from 'reactflow'
import { useFeaturesStore } from '@/app/components/base/features/hooks'
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
import { useSerialAsyncCallback } from '@/app/components/workflow/hooks/use-serial-async-callback'
import { useNodesReadOnly } from '@/app/components/workflow/hooks/use-workflow'
import { useWorkflowStore } from '@/app/components/workflow/store'
import { API_PREFIX } from '@/config'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { syncWorkflowDraft } from '@/service/workflow'
import { useWorkflowRefreshDraft } from '.'
@ -15,6 +19,8 @@ export const useNodesSyncDraft = () => {
const featuresStore = useFeaturesStore()
const { getNodesReadOnly } = useNodesReadOnly()
const { handleRefreshWorkflowDraft } = useWorkflowRefreshDraft()
const params = useParams()
const isCollaborationEnabled = useGlobalPublicStore(s => s.systemFeatures.enable_collaboration_mode)
const getPostParams = useCallback(() => {
const {
@ -52,7 +58,16 @@ export const useNodesSyncDraft = () => {
})
})
})
const viewport = { x, y, zoom }
const featuresPayload: WorkflowDraftFeaturesPayload = {
opening_statement: features.opening?.enabled ? (features.opening?.opening_statement || '') : '',
suggested_questions: features.opening?.enabled ? (features.opening?.suggested_questions || []) : [],
suggested_questions_after_answer: features.suggested,
text_to_speech: features.text2speech,
speech_to_text: features.speech2text,
retriever_resource: features.citation,
sensitive_word_avoidance: features.moderation,
file_upload: features.file,
}
return {
url: `/apps/${appId}/workflows/draft`,
@ -60,33 +75,41 @@ export const useNodesSyncDraft = () => {
graph: {
nodes: producedNodes,
edges: producedEdges,
viewport,
},
features: {
opening_statement: features.opening?.enabled ? (features.opening?.opening_statement || '') : '',
suggested_questions: features.opening?.enabled ? (features.opening?.suggested_questions || []) : [],
suggested_questions_after_answer: features.suggested,
text_to_speech: features.text2speech,
speech_to_text: features.speech2text,
retriever_resource: features.citation,
sensitive_word_avoidance: features.moderation,
file_upload: features.file,
viewport: {
x,
y,
zoom,
},
},
features: featuresPayload,
environment_variables: environmentVariables,
conversation_variables: conversationVariables,
hash: syncWorkflowDraftHash,
_is_collaborative: isCollaborationEnabled,
},
}
}, [store, featuresStore, workflowStore])
}, [store, featuresStore, workflowStore, isCollaborationEnabled])
const syncWorkflowDraftWhenPageClose = useCallback(() => {
if (getNodesReadOnly())
return
// Check leader status at sync time
const currentIsLeader = isCollaborationEnabled ? collaborationManager.getIsLeader() : true
// Only allow leader to sync data
if (isCollaborationEnabled && !currentIsLeader)
return
const postParams = getPostParams()
if (postParams)
navigator.sendBeacon(`${API_PREFIX}${postParams.url}`, JSON.stringify(postParams.params))
}, [getPostParams, getNodesReadOnly])
if (postParams) {
navigator.sendBeacon(
`${API_PREFIX}/apps/${params.appId}/workflows/draft`,
JSON.stringify(postParams.params),
)
}
}, [getPostParams, params.appId, getNodesReadOnly, isCollaborationEnabled])
const performSync = useCallback(async (
notRefreshWhenSyncError?: boolean,
@ -98,6 +121,17 @@ export const useNodesSyncDraft = () => {
) => {
if (getNodesReadOnly())
return
// Check leader status at sync time
const currentIsLeader = isCollaborationEnabled ? collaborationManager.getIsLeader() : true
// If not leader, request the leader to sync
if (isCollaborationEnabled && !currentIsLeader) {
if (isCollaborationEnabled)
collaborationManager.emitSyncRequest()
callback?.onSettled?.()
return
}
const postParams = getPostParams()
if (postParams) {
@ -105,8 +139,12 @@ export const useNodesSyncDraft = () => {
setSyncWorkflowDraftHash,
setDraftUpdatedAt,
} = workflowStore.getState()
try {
const res = await syncWorkflowDraft(postParams)
const res = await syncWorkflowDraft({
url: postParams.url,
params: postParams.params,
})
setSyncWorkflowDraftHash(res.hash)
setDraftUpdatedAt(res.updated_at)
callback?.onSuccess?.()
@ -115,7 +153,7 @@ export const useNodesSyncDraft = () => {
if (error && error.json && !error.bodyUsed) {
error.json().then((err: any) => {
if (err.code === 'draft_workflow_not_sync' && !notRefreshWhenSyncError)
handleRefreshWorkflowDraft(true)
handleRefreshWorkflowDraft()
})
}
callback?.onError?.()
@ -124,7 +162,7 @@ export const useNodesSyncDraft = () => {
callback?.onSettled?.()
}
}
}, [workflowStore, getPostParams, getNodesReadOnly, handleRefreshWorkflowDraft])
}, [workflowStore, getPostParams, getNodesReadOnly, handleRefreshWorkflowDraft, isCollaborationEnabled])
const doSyncWorkflowDraft = useSerialAsyncCallback(performSync, getNodesReadOnly)

View File

@ -1,556 +0,0 @@
/**
* Test Suite for useWorkflowRefreshDraft Hook
*
* PURPOSE:
* This hook is responsible for refreshing workflow draft data from the server.
* The key fix being tested is the `notUpdateCanvas` parameter behavior.
*
* MULTI-TAB PROBLEM SCENARIO:
* 1. User opens the same workflow in Tab A and Tab B (both have hash: v1)
* 2. Tab A saves successfully, server returns new hash: v2
* 3. Tab B tries to save with old hash: v1, server returns 400 error (draft_workflow_not_sync)
* 4. BEFORE FIX: handleRefreshWorkflowDraft() was called without args, which fetched
* draft AND overwrote canvas - user lost unsaved changes in Tab B
* 5. AFTER FIX: handleRefreshWorkflowDraft(true) is called, which fetches draft but
* only updates hash, preserving user's canvas changes
*
* TESTING STRATEGY:
* We don't simulate actual tab switching UI behavior. Instead, we test the hook's
* response to specific inputs:
* - When notUpdateCanvas=true: should NOT call handleUpdateWorkflowCanvas
* - When notUpdateCanvas=false/undefined: should call handleUpdateWorkflowCanvas
*
* This is behavior-driven testing - we verify "what the code does when given specific
* inputs" rather than simulating complete user interaction flows.
*/
import { act, renderHook, waitFor } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { useWorkflowRefreshDraft } from './use-workflow-refresh-draft'
// Mock the workflow service
const mockFetchWorkflowDraft = vi.fn()
vi.mock('@/service/workflow', () => ({
fetchWorkflowDraft: (...args: unknown[]) => mockFetchWorkflowDraft(...args),
}))
// Mock the workflow update hook
const mockHandleUpdateWorkflowCanvas = vi.fn()
vi.mock('@/app/components/workflow/hooks', () => ({
useWorkflowUpdate: () => ({
handleUpdateWorkflowCanvas: mockHandleUpdateWorkflowCanvas,
}),
}))
// Mock store state
const mockSetSyncWorkflowDraftHash = vi.fn()
const mockSetIsSyncingWorkflowDraft = vi.fn()
const mockSetEnvironmentVariables = vi.fn()
const mockSetEnvSecrets = vi.fn()
const mockSetConversationVariables = vi.fn()
const mockSetIsWorkflowDataLoaded = vi.fn()
const mockCancelDebouncedSync = vi.fn()
const createMockStoreState = (overrides = {}) => ({
appId: 'test-app-id',
setSyncWorkflowDraftHash: mockSetSyncWorkflowDraftHash,
setIsSyncingWorkflowDraft: mockSetIsSyncingWorkflowDraft,
setEnvironmentVariables: mockSetEnvironmentVariables,
setEnvSecrets: mockSetEnvSecrets,
setConversationVariables: mockSetConversationVariables,
setIsWorkflowDataLoaded: mockSetIsWorkflowDataLoaded,
isWorkflowDataLoaded: true,
debouncedSyncWorkflowDraft: {
cancel: mockCancelDebouncedSync,
},
...overrides,
})
const mockWorkflowStoreGetState = vi.fn()
vi.mock('@/app/components/workflow/store', () => ({
useWorkflowStore: () => ({
getState: mockWorkflowStoreGetState,
}),
}))
// Default mock response from fetchWorkflowDraft
const createMockDraftResponse = (overrides = {}) => ({
hash: 'new-hash-12345',
graph: {
nodes: [{ id: 'node-1', type: 'start', data: {} }],
edges: [{ id: 'edge-1', source: 'node-1', target: 'node-2' }],
viewport: { x: 100, y: 200, zoom: 1.5 },
},
environment_variables: [
{ id: 'env-1', name: 'API_KEY', value: 'secret-key', value_type: 'secret' },
{ id: 'env-2', name: 'BASE_URL', value: 'https://api.example.com', value_type: 'string' },
],
conversation_variables: [
{ id: 'conv-1', name: 'user_input', value: 'test' },
],
...overrides,
})
describe('useWorkflowRefreshDraft', () => {
beforeEach(() => {
vi.clearAllMocks()
mockWorkflowStoreGetState.mockReturnValue(createMockStoreState())
mockFetchWorkflowDraft.mockResolvedValue(createMockDraftResponse())
})
afterEach(() => {
vi.resetAllMocks()
})
describe('handleRefreshWorkflowDraft function', () => {
it('should return handleRefreshWorkflowDraft function', () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
expect(result.current.handleRefreshWorkflowDraft).toBeDefined()
expect(typeof result.current.handleRefreshWorkflowDraft).toBe('function')
})
})
describe('notUpdateCanvas parameter behavior (THE KEY FIX)', () => {
it('should NOT call handleUpdateWorkflowCanvas when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalledWith('/apps/test-app-id/workflows/draft')
})
await waitFor(() => {
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
// THE KEY ASSERTION: Canvas should NOT be updated when notUpdateCanvas is true
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
it('should call handleUpdateWorkflowCanvas when notUpdateCanvas is false', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(false)
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalledWith('/apps/test-app-id/workflows/draft')
})
await waitFor(() => {
// Canvas SHOULD be updated when notUpdateCanvas is false
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalledWith({
nodes: [{ id: 'node-1', type: 'start', data: {} }],
edges: [{ id: 'edge-1', source: 'node-1', target: 'node-2' }],
viewport: { x: 100, y: 200, zoom: 1.5 },
})
})
await waitFor(() => {
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
})
it('should call handleUpdateWorkflowCanvas when notUpdateCanvas is undefined (default)', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalled()
})
await waitFor(() => {
// Canvas SHOULD be updated when notUpdateCanvas is undefined
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalled()
})
})
it('should still update hash even when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
// Verify canvas was NOT updated
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
it('should still update environment variables when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvironmentVariables).toHaveBeenCalledWith([
{ id: 'env-1', name: 'API_KEY', value: '[__HIDDEN__]', value_type: 'secret' },
{ id: 'env-2', name: 'BASE_URL', value: 'https://api.example.com', value_type: 'string' },
])
})
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
it('should still update env secrets when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvSecrets).toHaveBeenCalledWith({
'env-1': 'secret-key',
})
})
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
it('should still update conversation variables when notUpdateCanvas is true', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetConversationVariables).toHaveBeenCalledWith([
{ id: 'conv-1', name: 'user_input', value: 'test' },
])
})
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
})
})
describe('syncing state management', () => {
it('should set isSyncingWorkflowDraft to true before fetch', () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
expect(mockSetIsSyncingWorkflowDraft).toHaveBeenCalledWith(true)
})
it('should set isSyncingWorkflowDraft to false after fetch completes', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockSetIsSyncingWorkflowDraft).toHaveBeenCalledWith(false)
})
})
it('should set isSyncingWorkflowDraft to false even when fetch fails', async () => {
mockFetchWorkflowDraft.mockRejectedValue(new Error('Network error'))
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockSetIsSyncingWorkflowDraft).toHaveBeenCalledWith(false)
})
})
})
describe('isWorkflowDataLoaded flag management', () => {
it('should set isWorkflowDataLoaded to false before fetch when it was true', () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockStoreState({ isWorkflowDataLoaded: true }),
)
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
expect(mockSetIsWorkflowDataLoaded).toHaveBeenCalledWith(false)
})
it('should set isWorkflowDataLoaded to true after fetch succeeds', async () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
expect(mockSetIsWorkflowDataLoaded).toHaveBeenCalledWith(true)
})
})
it('should restore isWorkflowDataLoaded when fetch fails and it was previously loaded', async () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockStoreState({ isWorkflowDataLoaded: true }),
)
mockFetchWorkflowDraft.mockRejectedValue(new Error('Network error'))
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
await waitFor(() => {
// Should restore to true because wasLoaded was true
expect(mockSetIsWorkflowDataLoaded).toHaveBeenLastCalledWith(true)
})
})
})
describe('debounced sync cancellation', () => {
it('should cancel debounced sync before fetching draft', () => {
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft()
})
expect(mockCancelDebouncedSync).toHaveBeenCalled()
})
it('should handle case when debouncedSyncWorkflowDraft has no cancel method', () => {
mockWorkflowStoreGetState.mockReturnValue(
createMockStoreState({ debouncedSyncWorkflowDraft: {} }),
)
const { result } = renderHook(() => useWorkflowRefreshDraft())
// Should not throw
expect(() => {
act(() => {
result.current.handleRefreshWorkflowDraft()
})
}).not.toThrow()
})
})
describe('edge cases', () => {
it('should handle empty graph in response', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-empty',
graph: null,
environment_variables: [],
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(false)
})
await waitFor(() => {
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalledWith({
nodes: [],
edges: [],
viewport: { x: 0, y: 0, zoom: 1 },
})
})
})
it('should handle missing viewport in response', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-no-viewport',
graph: {
nodes: [{ id: 'node-1' }],
edges: [],
viewport: null,
},
environment_variables: [],
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(false)
})
await waitFor(() => {
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalledWith({
nodes: [{ id: 'node-1' }],
edges: [],
viewport: { x: 0, y: 0, zoom: 1 },
})
})
})
it('should handle missing environment_variables in response', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-no-env',
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
environment_variables: undefined,
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvironmentVariables).toHaveBeenCalledWith([])
expect(mockSetEnvSecrets).toHaveBeenCalledWith({})
})
})
it('should handle missing conversation_variables in response', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-no-conv',
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
environment_variables: [],
conversation_variables: undefined,
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetConversationVariables).toHaveBeenCalledWith([])
})
})
it('should filter only secret type for envSecrets', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-mixed-env',
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
environment_variables: [
{ id: 'env-1', name: 'SECRET_KEY', value: 'secret-value', value_type: 'secret' },
{ id: 'env-2', name: 'PUBLIC_URL', value: 'https://example.com', value_type: 'string' },
{ id: 'env-3', name: 'ANOTHER_SECRET', value: 'another-secret', value_type: 'secret' },
],
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvSecrets).toHaveBeenCalledWith({
'env-1': 'secret-value',
'env-3': 'another-secret',
})
})
})
it('should hide secret values in environment variables', async () => {
mockFetchWorkflowDraft.mockResolvedValue({
hash: 'hash-secrets',
graph: { nodes: [], edges: [], viewport: { x: 0, y: 0, zoom: 1 } },
environment_variables: [
{ id: 'env-1', name: 'SECRET_KEY', value: 'super-secret', value_type: 'secret' },
{ id: 'env-2', name: 'PUBLIC_URL', value: 'https://example.com', value_type: 'string' },
],
conversation_variables: [],
})
const { result } = renderHook(() => useWorkflowRefreshDraft())
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockSetEnvironmentVariables).toHaveBeenCalledWith([
{ id: 'env-1', name: 'SECRET_KEY', value: '[__HIDDEN__]', value_type: 'secret' },
{ id: 'env-2', name: 'PUBLIC_URL', value: 'https://example.com', value_type: 'string' },
])
})
})
})
describe('multi-tab scenario simulation (THE BUG FIX VERIFICATION)', () => {
/**
* This test verifies the fix for the multi-tab scenario:
* 1. User opens workflow in Tab A and Tab B
* 2. Tab A saves draft successfully
* 3. Tab B tries to save but gets 'draft_workflow_not_sync' error (hash mismatch)
* 4. BEFORE FIX: Tab B would fetch draft and overwrite canvas with old data
* 5. AFTER FIX: Tab B only updates hash, preserving user's canvas changes
*/
it('should only update hash when called with notUpdateCanvas=true (simulating sync error recovery)', async () => {
const mockResponse = createMockDraftResponse()
mockFetchWorkflowDraft.mockResolvedValue(mockResponse)
const { result } = renderHook(() => useWorkflowRefreshDraft())
// Simulate the sync error recovery scenario where notUpdateCanvas is true
act(() => {
result.current.handleRefreshWorkflowDraft(true)
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalled()
})
await waitFor(() => {
// Hash should be updated for next sync attempt
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
// Canvas should NOT be updated - user's changes are preserved
expect(mockHandleUpdateWorkflowCanvas).not.toHaveBeenCalled()
// Other states should still be updated
expect(mockSetEnvironmentVariables).toHaveBeenCalled()
expect(mockSetConversationVariables).toHaveBeenCalled()
})
it('should update canvas when called with notUpdateCanvas=false (normal refresh)', async () => {
const mockResponse = createMockDraftResponse()
mockFetchWorkflowDraft.mockResolvedValue(mockResponse)
const { result } = renderHook(() => useWorkflowRefreshDraft())
// Simulate normal refresh scenario
act(() => {
result.current.handleRefreshWorkflowDraft(false)
})
await waitFor(() => {
expect(mockFetchWorkflowDraft).toHaveBeenCalled()
})
await waitFor(() => {
expect(mockSetSyncWorkflowDraftHash).toHaveBeenCalledWith('new-hash-12345')
})
// Canvas SHOULD be updated in normal refresh
await waitFor(() => {
expect(mockHandleUpdateWorkflowCanvas).toHaveBeenCalled()
})
})
})
})

View File

@ -8,7 +8,7 @@ export const useWorkflowRefreshDraft = () => {
const workflowStore = useWorkflowStore()
const { handleUpdateWorkflowCanvas } = useWorkflowUpdate()
const handleRefreshWorkflowDraft = useCallback((notUpdateCanvas?: boolean) => {
const handleRefreshWorkflowDraft = useCallback(() => {
const {
appId,
setSyncWorkflowDraftHash,
@ -31,14 +31,12 @@ export const useWorkflowRefreshDraft = () => {
fetchWorkflowDraft(`/apps/${appId}/workflows/draft`)
.then((response) => {
// Ensure we have a valid workflow structure with viewport
if (!notUpdateCanvas) {
const workflowData: WorkflowDataUpdater = {
nodes: response.graph?.nodes || [],
edges: response.graph?.edges || [],
viewport: response.graph?.viewport || { x: 0, y: 0, zoom: 1 },
}
handleUpdateWorkflowCanvas(workflowData)
const workflowData: WorkflowDataUpdater = {
nodes: response.graph?.nodes || [],
edges: response.graph?.edges || [],
viewport: response.graph?.viewport || { x: 0, y: 0, zoom: 1 },
}
handleUpdateWorkflowCanvas(workflowData)
setSyncWorkflowDraftHash(response.hash)
setEnvSecrets((response.environment_variables || []).filter(env => env.value_type === 'secret').reduce((acc, env) => {
acc[env.id] = env.value

View File

@ -12,6 +12,7 @@ import { FeaturesProvider } from '@/app/components/base/features'
import Loading from '@/app/components/base/loading'
import { FILE_EXTS } from '@/app/components/base/prompt-editor/constants'
import WorkflowWithDefaultContext from '@/app/components/workflow'
import { collaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
import {
WorkflowContextProvider,
} from '@/app/components/workflow/context'
@ -83,15 +84,20 @@ const WorkflowAppWithAdditionalContext = () => {
}, [workflowStore])
const nodesData = useMemo(() => {
if (data)
return initialNodes(data.graph.nodes, data.graph.edges)
if (data) {
const processedNodes = initialNodes(data.graph.nodes, data.graph.edges)
collaborationManager.setNodes([], processedNodes)
return processedNodes
}
return []
}, [data])
const edgesData = useMemo(() => {
if (data)
return initialEdges(data.graph.edges, data.graph.nodes)
const edgesData = useMemo(() => {
if (data) {
const processedEdges = initialEdges(data.graph.edges, data.graph.nodes)
collaborationManager.setEdges([], processedEdges)
return processedEdges
}
return []
}, [data])

View File

@ -35,7 +35,7 @@ const NodeSelectorWrapper = (props: NodeSelectorProps) => {
return true
})
}, [availableNodesMetaData?.nodes])
}, [availableNodesMetaData?.nodes]) as NodeSelectorProps['blocks']
return (
<NodeSelector

View File

@ -11,9 +11,9 @@ import {
} from 'react'
import {
useReactFlow,
useStoreApi,
useViewport,
} from 'reactflow'
import { useCollaborativeWorkflow } from '@/app/components/workflow/hooks/use-collaborative-workflow'
import { CUSTOM_NODE } from './constants'
import { useAutoGenerateWebhookUrl, useNodesInteractions, useNodesSyncDraft, useWorkflowHistory, WorkflowHistoryEvent } from './hooks'
import CustomNode from './nodes'
@ -32,7 +32,6 @@ type Props = {
const CandidateNodeMain: FC<Props> = ({
candidateNode,
}) => {
const store = useStoreApi()
const reactflow = useReactFlow()
const workflowStore = useWorkflowStore()
const mousePosition = useStore(s => s.mousePosition)
@ -41,15 +40,12 @@ const CandidateNodeMain: FC<Props> = ({
const { saveStateToHistory } = useWorkflowHistory()
const { handleSyncWorkflowDraft } = useNodesSyncDraft()
const autoGenerateWebhookUrl = useAutoGenerateWebhookUrl()
const collaborativeWorkflow = useCollaborativeWorkflow()
useEventListener('click', (e) => {
e.preventDefault()
const {
getNodes,
setNodes,
} = store.getState()
const { screenToFlowPosition } = reactflow
const nodes = getNodes()
const { nodes, setNodes } = collaborativeWorkflow.getState()
const { x, y } = screenToFlowPosition({ x: mousePosition.pageX, y: mousePosition.pageY })
const newNodes = produce(nodes, (draft) => {
draft.push({

View File

@ -0,0 +1,78 @@
import type { FC } from 'react'
import type { CursorPosition, OnlineUser } from '@/app/components/workflow/collaboration/types'
import { useViewport } from 'reactflow'
import { getUserColor } from '../utils/user-color'
type UserCursorsProps = {
cursors: Record<string, CursorPosition>
myUserId: string | null
onlineUsers: OnlineUser[]
}
const UserCursors: FC<UserCursorsProps> = ({
cursors,
myUserId,
onlineUsers,
}) => {
const viewport = useViewport()
const convertToScreenCoordinates = (cursor: CursorPosition) => {
// Convert world coordinates to screen coordinates using current viewport
const screenX = cursor.x * viewport.zoom + viewport.x
const screenY = cursor.y * viewport.zoom + viewport.y
return { x: screenX, y: screenY }
}
return (
<>
{Object.entries(cursors || {}).map(([userId, cursor]) => {
if (userId === myUserId)
return null
const userInfo = onlineUsers.find(user => user.user_id === userId)
const userName = userInfo?.username || `User ${userId.slice(-4)}`
const userColor = getUserColor(userId)
const screenPos = convertToScreenCoordinates(cursor)
return (
<div
key={userId}
className="pointer-events-none absolute z-[8] transition-all duration-150 ease-out"
style={{
left: screenPos.x,
top: screenPos.y,
}}
>
<svg
width="20"
height="20"
viewBox="0 0 20 20"
fill="none"
xmlns="http://www.w3.org/2000/svg"
className="drop-shadow-md"
>
<path
d="M5 3L5 15L8 11.5L11 16L13 15L10 10.5L14 10.5L5 3Z"
fill={userColor}
stroke="white"
strokeWidth="1.5"
strokeLinejoin="round"
/>
</svg>
<div
className="absolute left-4 top-4 max-w-[120px] overflow-hidden text-ellipsis whitespace-nowrap rounded px-1.5 py-0.5 text-[11px] font-medium text-white shadow-sm"
style={{
backgroundColor: userColor,
}}
>
{userName}
</div>
</div>
)
})}
</>
)
}
export default UserCursors

View File

@ -0,0 +1,331 @@
import type { LoroMap } from 'loro-crdt'
import type { Node } from '@/app/components/workflow/types'
import { LoroDoc } from 'loro-crdt'
import { BlockEnum } from '@/app/components/workflow/types'
import { CollaborationManager } from '../collaboration-manager'
const NODE_ID = 'node-1'
const LLM_NODE_ID = 'llm-node'
const PARAM_NODE_ID = 'parameter-node'
type WorkflowVariable = {
variable: string
label: string
type: string
required: boolean
default: string
max_length: number
placeholder: string
options: string[]
hint: string
}
type PromptTemplateItem = {
id: string
role: string
text: string
}
type ParameterItem = {
description: string
name: string
required: boolean
type: string
}
type StartNodeData = {
variables: WorkflowVariable[]
}
type LLMNodeData = {
model: {
mode: string
name: string
provider: string
completion_params: {
temperature: number
}
}
context: {
enabled: boolean
variable_selector: string[]
}
vision: {
enabled: boolean
}
prompt_template: PromptTemplateItem[]
}
type ParameterExtractorNodeData = {
model: {
mode: string
name: string
provider: string
completion_params: {
temperature: number
}
}
parameters: ParameterItem[]
query: unknown[]
reasoning_mode: string
vision: {
enabled: boolean
}
}
type CollaborationManagerInternals = {
doc: LoroDoc
nodesMap: LoroMap
edgesMap: LoroMap
syncNodes: (oldNodes: Node[], newNodes: Node[]) => void
}
const createNode = (variables: string[]): Node<StartNodeData> => ({
id: NODE_ID,
type: 'custom',
position: { x: 0, y: 0 },
data: {
type: BlockEnum.Start,
title: 'Start',
desc: '',
variables: variables.map(name => ({
variable: name,
label: name,
type: 'text-input',
required: true,
default: '',
max_length: 48,
placeholder: '',
options: [],
hint: '',
})),
},
})
const createLLMNode = (templates: PromptTemplateItem[]): Node<LLMNodeData> => ({
id: LLM_NODE_ID,
type: 'custom',
position: { x: 200, y: 200 },
data: {
type: BlockEnum.LLM,
title: 'LLM',
desc: '',
selected: false,
model: {
mode: 'chat',
name: 'gemini-2.5-pro',
provider: 'langgenius/gemini/google',
completion_params: {
temperature: 0.7,
},
},
context: {
enabled: false,
variable_selector: [],
},
vision: {
enabled: false,
},
prompt_template: templates,
},
})
const createParameterExtractorNode = (parameters: ParameterItem[]): Node<ParameterExtractorNodeData> => ({
id: PARAM_NODE_ID,
type: 'custom',
position: { x: 400, y: 120 },
data: {
type: BlockEnum.ParameterExtractor,
title: 'ParameterExtractor',
desc: '',
selected: true,
model: {
mode: 'chat',
name: '',
provider: '',
completion_params: {
temperature: 0.7,
},
},
query: [],
reasoning_mode: 'prompt',
parameters,
vision: {
enabled: false,
},
},
})
const getManagerInternals = (manager: CollaborationManager): CollaborationManagerInternals =>
manager as unknown as CollaborationManagerInternals
const getManager = (doc: LoroDoc) => {
const manager = new CollaborationManager()
const internals = getManagerInternals(manager)
internals.doc = doc
internals.nodesMap = doc.getMap('nodes')
internals.edgesMap = doc.getMap('edges')
return manager
}
const deepClone = <T>(value: T): T => JSON.parse(JSON.stringify(value))
const syncNodes = (manager: CollaborationManager, previous: Node[], next: Node[]) => {
const internals = getManagerInternals(manager)
internals.syncNodes(previous, next)
}
const exportNodes = (manager: CollaborationManager) => manager.getNodes()
describe('Loro merge behavior smoke test', () => {
it('inspects concurrent edits after merge', () => {
const docA = new LoroDoc()
const managerA = getManager(docA)
syncNodes(managerA, [], [createNode(['a'])])
const snapshot = docA.export({ mode: 'snapshot' })
const docB = LoroDoc.fromSnapshot(snapshot)
const managerB = getManager(docB)
syncNodes(managerA, [createNode(['a'])], [createNode(['a', 'b'])])
syncNodes(managerB, [createNode(['a'])], [createNode(['a', 'c'])])
const updateForA = docB.export({ mode: 'update', from: docA.version() })
docA.import(updateForA)
const updateForB = docA.export({ mode: 'update', from: docB.version() })
docB.import(updateForB)
const finalA = exportNodes(managerA)
const finalB = exportNodes(managerB)
expect(finalA.length).toBe(1)
expect(finalB.length).toBe(1)
})
it('merges prompt template insertions and edits across replicas', () => {
const baseTemplate = [
{
id: 'system-1',
role: 'system',
text: 'base instruction',
},
]
const docA = new LoroDoc()
const managerA = getManager(docA)
syncNodes(managerA, [], [createLLMNode(deepClone(baseTemplate))])
const snapshot = docA.export({ mode: 'snapshot' })
const docB = LoroDoc.fromSnapshot(snapshot)
const managerB = getManager(docB)
const additionTemplate = [
...baseTemplate,
{
id: 'user-1',
role: 'user',
text: 'hello from docA',
},
]
syncNodes(managerA, [createLLMNode(deepClone(baseTemplate))], [createLLMNode(deepClone(additionTemplate))])
const editedTemplate = [
{
id: 'system-1',
role: 'system',
text: 'updated by docB',
},
]
syncNodes(managerB, [createLLMNode(deepClone(baseTemplate))], [createLLMNode(deepClone(editedTemplate))])
const updateForA = docB.export({ mode: 'update', from: docA.version() })
docA.import(updateForA)
const updateForB = docA.export({ mode: 'update', from: docB.version() })
docB.import(updateForB)
const finalA = exportNodes(managerA).find(node => node.id === LLM_NODE_ID) as Node<LLMNodeData> | undefined
const finalB = exportNodes(managerB).find(node => node.id === LLM_NODE_ID) as Node<LLMNodeData> | undefined
expect(finalA).toBeDefined()
expect(finalB).toBeDefined()
const expectedTemplates = [
{
id: 'system-1',
role: 'system',
text: 'updated by docB',
},
{
id: 'user-1',
role: 'user',
text: 'hello from docA',
},
]
expect(finalA!.data.prompt_template).toEqual(expectedTemplates)
expect(finalB!.data.prompt_template).toEqual(expectedTemplates)
})
it('converges when parameter lists are edited concurrently', () => {
const baseParameters = [
{ description: 'bb', name: 'aa', required: false, type: 'string' },
{ description: 'dd', name: 'cc', required: false, type: 'string' },
]
const docA = new LoroDoc()
const managerA = getManager(docA)
syncNodes(managerA, [], [createParameterExtractorNode(deepClone(baseParameters))])
const snapshot = docA.export({ mode: 'snapshot' })
const docB = LoroDoc.fromSnapshot(snapshot)
const managerB = getManager(docB)
const docAUpdate = [
{ description: 'bb updated by A', name: 'aa', required: true, type: 'string' },
{ description: 'dd', name: 'cc', required: false, type: 'string' },
{ description: 'new from A', name: 'ee', required: false, type: 'number' },
]
syncNodes(
managerA,
[createParameterExtractorNode(deepClone(baseParameters))],
[createParameterExtractorNode(deepClone(docAUpdate))],
)
const docBUpdate = [
{ description: 'bb', name: 'aa', required: false, type: 'string' },
{ description: 'dd updated by B', name: 'cc', required: true, type: 'string' },
]
syncNodes(
managerB,
[createParameterExtractorNode(deepClone(baseParameters))],
[createParameterExtractorNode(deepClone(docBUpdate))],
)
const updateForA = docB.export({ mode: 'update', from: docA.version() })
docA.import(updateForA)
const updateForB = docA.export({ mode: 'update', from: docB.version() })
docB.import(updateForB)
const finalA = exportNodes(managerA).find(node => node.id === PARAM_NODE_ID) as
| Node<ParameterExtractorNodeData>
| undefined
const finalB = exportNodes(managerB).find(node => node.id === PARAM_NODE_ID) as
| Node<ParameterExtractorNodeData>
| undefined
expect(finalA).toBeDefined()
expect(finalB).toBeDefined()
const expectedParameters = [
{ description: 'bb updated by A', name: 'aa', required: true, type: 'string' },
{ description: 'dd updated by B', name: 'cc', required: true, type: 'string' },
{ description: 'new from A', name: 'ee', required: false, type: 'number' },
]
expect(finalA!.data.parameters).toEqual(expectedParameters)
expect(finalB!.data.parameters).toEqual(expectedParameters)
})
})

View File

@ -0,0 +1,763 @@
import type { LoroMap } from 'loro-crdt'
import type {
NodePanelPresenceMap,
NodePanelPresenceUser,
} from '@/app/components/workflow/collaboration/types/collaboration'
import type { CommonNodeType, Edge, Node } from '@/app/components/workflow/types'
import { LoroDoc } from 'loro-crdt'
import { Position } from 'reactflow'
import { CollaborationManager } from '@/app/components/workflow/collaboration/core/collaboration-manager'
import { BlockEnum } from '@/app/components/workflow/types'
const NODE_ID = '1760342909316'
type WorkflowVariable = {
default: string
hint: string
label: string
max_length: number
options: string[]
placeholder: string
required: boolean
type: string
variable: string
}
type PromptTemplateItem = {
id: string
role: string
text: string
}
type ParameterItem = {
description: string
name: string
required: boolean
type: string
}
type NodePanelPresenceEventData = {
nodeId: string
action: 'open' | 'close'
user: NodePanelPresenceUser
clientId: string
timestamp?: number
}
type StartNodeData = {
variables: WorkflowVariable[]
}
type LLMNodeData = {
context: {
enabled: boolean
variable_selector: string[]
}
model: {
mode: string
name: string
provider: string
completion_params: {
temperature: number
}
}
prompt_template: PromptTemplateItem[]
vision: {
enabled: boolean
}
}
type ParameterExtractorNodeData = {
model: {
mode: string
name: string
provider: string
completion_params: {
temperature: number
}
}
parameters: ParameterItem[]
query: unknown[]
reasoning_mode: string
vision: {
enabled: boolean
}
}
type LLMNodeDataWithUnknownTemplate = Omit<LLMNodeData, 'prompt_template'> & {
prompt_template: unknown
}
type ManagerDoc = LoroDoc | { commit: () => void }
type CollaborationManagerInternals = {
doc: ManagerDoc
nodesMap: LoroMap
edgesMap: LoroMap
syncNodes: (oldNodes: Node[], newNodes: Node[]) => void
syncEdges: (oldEdges: Edge[], newEdges: Edge[]) => void
applyNodePanelPresenceUpdate: (update: NodePanelPresenceEventData) => void
forceDisconnect: () => void
activeConnections: Set<string>
isUndoRedoInProgress: boolean
}
const createVariable = (name: string, overrides: Partial<WorkflowVariable> = {}): WorkflowVariable => ({
default: '',
hint: '',
label: name,
max_length: 48,
options: [],
placeholder: '',
required: true,
type: 'text-input',
variable: name,
...overrides,
})
const deepClone = <T>(value: T): T => JSON.parse(JSON.stringify(value))
const createNodeSnapshot = (variableNames: string[]): Node<StartNodeData> => ({
id: NODE_ID,
type: 'custom',
position: { x: 0, y: 24 },
positionAbsolute: { x: 0, y: 24 },
height: 88,
width: 242,
selected: true,
selectable: true,
draggable: true,
sourcePosition: Position.Right,
targetPosition: Position.Left,
data: {
selected: true,
title: '开始',
desc: '',
type: BlockEnum.Start,
variables: variableNames.map(name => createVariable(name)),
},
})
const LLM_NODE_ID = 'llm-node'
const PARAM_NODE_ID = 'param-extractor-node'
const createLLMNodeSnapshot = (promptTemplates: PromptTemplateItem[]): Node<LLMNodeData> => ({
id: LLM_NODE_ID,
type: 'custom',
position: { x: 200, y: 120 },
positionAbsolute: { x: 200, y: 120 },
height: 320,
width: 460,
selected: false,
selectable: true,
draggable: true,
sourcePosition: Position.Right,
targetPosition: Position.Left,
data: {
type: BlockEnum.LLM,
title: 'LLM',
desc: '',
selected: false,
context: {
enabled: false,
variable_selector: [],
},
model: {
mode: 'chat',
name: 'gemini-2.5-pro',
provider: 'langgenius/gemini/google',
completion_params: {
temperature: 0.7,
},
},
vision: {
enabled: false,
},
prompt_template: promptTemplates,
},
})
const createParameterExtractorNodeSnapshot = (parameters: ParameterItem[]): Node<ParameterExtractorNodeData> => ({
id: PARAM_NODE_ID,
type: 'custom',
position: { x: 420, y: 220 },
positionAbsolute: { x: 420, y: 220 },
height: 260,
width: 420,
selected: true,
selectable: true,
draggable: true,
sourcePosition: Position.Right,
targetPosition: Position.Left,
data: {
type: BlockEnum.ParameterExtractor,
title: '参数提取器',
desc: '',
selected: true,
model: {
mode: 'chat',
name: '',
provider: '',
completion_params: {
temperature: 0.7,
},
},
reasoning_mode: 'prompt',
parameters,
query: [],
vision: {
enabled: false,
},
},
})
const getVariables = (node: Node): string[] => {
const data = node.data as CommonNodeType<{ variables?: WorkflowVariable[] }>
const variables = data.variables ?? []
return variables.map(item => item.variable)
}
const getVariableObject = (node: Node, name: string): WorkflowVariable | undefined => {
const data = node.data as CommonNodeType<{ variables?: WorkflowVariable[] }>
const variables = data.variables ?? []
return variables.find(item => item.variable === name)
}
const getPromptTemplates = (node: Node): PromptTemplateItem[] => {
const data = node.data as CommonNodeType<{ prompt_template?: PromptTemplateItem[] }>
return data.prompt_template ?? []
}
const getParameters = (node: Node): ParameterItem[] => {
const data = node.data as CommonNodeType<{ parameters?: ParameterItem[] }>
return data.parameters ?? []
}
const getManagerInternals = (manager: CollaborationManager): CollaborationManagerInternals =>
manager as unknown as CollaborationManagerInternals
const setupManager = (): { manager: CollaborationManager, internals: CollaborationManagerInternals } => {
const manager = new CollaborationManager()
const doc = new LoroDoc()
const internals = getManagerInternals(manager)
internals.doc = doc
internals.nodesMap = doc.getMap('nodes')
internals.edgesMap = doc.getMap('edges')
return { manager, internals }
}
describe('CollaborationManager syncNodes', () => {
let manager: CollaborationManager
let internals: CollaborationManagerInternals
beforeEach(() => {
const setup = setupManager()
manager = setup.manager
internals = setup.internals
const initialNode = createNodeSnapshot(['a'])
internals.syncNodes([], [deepClone(initialNode)])
})
it('updates collaborators map when a single client adds a variable', () => {
const base = [createNodeSnapshot(['a'])]
const next = [createNodeSnapshot(['a', 'b'])]
internals.syncNodes(base, next)
const stored = (manager.getNodes() as Node[]).find(node => node.id === NODE_ID)
expect(stored).toBeDefined()
expect(getVariables(stored!)).toEqual(['a', 'b'])
})
it('applies the latest parallel additions derived from the same base snapshot', () => {
const base = [createNodeSnapshot(['a'])]
const userA = [createNodeSnapshot(['a', 'b'])]
const userB = [createNodeSnapshot(['a', 'c'])]
internals.syncNodes(base, userA)
const afterUserA = (manager.getNodes() as Node[]).find(node => node.id === NODE_ID)
expect(getVariables(afterUserA!)).toEqual(['a', 'b'])
internals.syncNodes(base, userB)
const finalNode = (manager.getNodes() as Node[]).find(node => node.id === NODE_ID)
const finalVariables = getVariables(finalNode!)
expect(finalVariables).toEqual(['a', 'c'])
})
it('prefers the incoming mutation when the same variable is edited concurrently', () => {
const base = [createNodeSnapshot(['a'])]
const userA = [
{
...createNodeSnapshot(['a']),
data: {
...createNodeSnapshot(['a']).data,
variables: [
createVariable('a', { label: 'A from userA', hint: 'hintA' }),
],
},
},
]
const userB = [
{
...createNodeSnapshot(['a']),
data: {
...createNodeSnapshot(['a']).data,
variables: [
createVariable('a', { label: 'A from userB', hint: 'hintB' }),
],
},
},
]
internals.syncNodes(base, userA)
internals.syncNodes(base, userB)
const finalNode = (manager.getNodes() as Node[]).find(node => node.id === NODE_ID)
const finalVariable = getVariableObject(finalNode!, 'a')
expect(finalVariable?.label).toBe('A from userB')
expect(finalVariable?.hint).toBe('hintB')
})
it('reflects the last writer when concurrent removal and edits happen', () => {
const base = [createNodeSnapshot(['a', 'b'])]
internals.syncNodes([], [deepClone(base[0])])
const userA = [
{
...createNodeSnapshot(['a']),
data: {
...createNodeSnapshot(['a']).data,
variables: [
createVariable('a', { label: 'A after deletion' }),
],
},
},
]
const userB = [
{
...createNodeSnapshot(['a', 'b']),
data: {
...createNodeSnapshot(['a']).data,
variables: [
createVariable('a'),
createVariable('b', { label: 'B edited but should vanish' }),
],
},
},
]
internals.syncNodes(base, userA)
internals.syncNodes(base, userB)
const finalNode = (manager.getNodes() as Node[]).find(node => node.id === NODE_ID)
const finalVariables = getVariables(finalNode!)
expect(finalVariables).toEqual(['a', 'b'])
expect(getVariableObject(finalNode!, 'b')).toBeDefined()
})
it('synchronizes prompt_template list updates across collaborators', () => {
const { manager: promptManager, internals: promptInternals } = setupManager()
const baseTemplate = [
{
id: 'abcfa5f9-3c44-4252-aeba-4b6eaf0acfc4',
role: 'system',
text: 'avc',
},
]
const baseNode = createLLMNodeSnapshot(baseTemplate)
promptInternals.syncNodes([], [deepClone(baseNode)])
const updatedTemplates = [
...baseTemplate,
{
id: 'user-1',
role: 'user',
text: 'hello world',
},
]
const updatedNode = createLLMNodeSnapshot(updatedTemplates)
promptInternals.syncNodes([deepClone(baseNode)], [deepClone(updatedNode)])
const stored = (promptManager.getNodes() as Node[]).find(node => node.id === LLM_NODE_ID)
expect(stored).toBeDefined()
const storedTemplates = getPromptTemplates(stored!)
expect(storedTemplates).toHaveLength(2)
expect(storedTemplates[0]).toEqual(baseTemplate[0])
expect(storedTemplates[1]).toEqual(updatedTemplates[1])
const editedTemplates = [
{
id: 'abcfa5f9-3c44-4252-aeba-4b6eaf0acfc4',
role: 'system',
text: 'updated system prompt',
},
]
const editedNode = createLLMNodeSnapshot(editedTemplates)
promptInternals.syncNodes([deepClone(updatedNode)], [deepClone(editedNode)])
const final = (promptManager.getNodes() as Node[]).find(node => node.id === LLM_NODE_ID)
const finalTemplates = getPromptTemplates(final!)
expect(finalTemplates).toHaveLength(1)
expect(finalTemplates[0].text).toBe('updated system prompt')
})
it('keeps parameter list in sync when nodes add, edit, or remove parameters', () => {
const { manager: parameterManager, internals: parameterInternals } = setupManager()
const baseParameters: ParameterItem[] = [
{ description: 'bb', name: 'aa', required: false, type: 'string' },
{ description: 'dd', name: 'cc', required: false, type: 'string' },
]
const baseNode = createParameterExtractorNodeSnapshot(baseParameters)
parameterInternals.syncNodes([], [deepClone(baseNode)])
const updatedParameters: ParameterItem[] = [
...baseParameters,
{ description: 'ff', name: 'ee', required: true, type: 'number' },
]
const updatedNode = createParameterExtractorNodeSnapshot(updatedParameters)
parameterInternals.syncNodes([deepClone(baseNode)], [deepClone(updatedNode)])
const stored = (parameterManager.getNodes() as Node[]).find(node => node.id === PARAM_NODE_ID)
expect(stored).toBeDefined()
expect(getParameters(stored!)).toEqual(updatedParameters)
const editedParameters: ParameterItem[] = [
{ description: 'bb edited', name: 'aa', required: true, type: 'string' },
]
const editedNode = createParameterExtractorNodeSnapshot(editedParameters)
parameterInternals.syncNodes([deepClone(updatedNode)], [deepClone(editedNode)])
const final = (parameterManager.getNodes() as Node[]).find(node => node.id === PARAM_NODE_ID)
expect(getParameters(final!)).toEqual(editedParameters)
})
it('handles nodes without data gracefully', () => {
const emptyNode: Node = {
id: 'empty-node',
type: 'custom',
position: { x: 0, y: 0 },
data: undefined as unknown as CommonNodeType<Record<string, never>>,
}
internals.syncNodes([], [deepClone(emptyNode)])
const stored = (manager.getNodes() as Node[]).find(node => node.id === 'empty-node')
expect(stored).toBeDefined()
expect(stored?.data).toEqual({})
})
it('preserves CRDT list instances when synchronizing parsed state back into the manager', () => {
const { manager: promptManager, internals: promptInternals } = setupManager()
const base = createLLMNodeSnapshot([
{ id: 'system', role: 'system', text: 'base' },
])
promptInternals.syncNodes([], [deepClone(base)])
const storedBefore = promptManager.getNodes().find(node => node.id === LLM_NODE_ID) as Node<LLMNodeData> | undefined
expect(storedBefore).toBeDefined()
const firstTemplate = storedBefore?.data.prompt_template?.[0]
expect(firstTemplate?.text).toBe('base')
// simulate consumer mutating the plain JSON array and syncing back
const baseNode = storedBefore!
const mutatedNode = deepClone(baseNode)
mutatedNode.data.prompt_template.push({
id: 'user',
role: 'user',
text: 'mutated',
})
promptInternals.syncNodes([baseNode], [mutatedNode])
const storedAfter = promptManager.getNodes().find(node => node.id === LLM_NODE_ID) as Node<LLMNodeData> | undefined
const templatesAfter = storedAfter?.data.prompt_template
expect(Array.isArray(templatesAfter)).toBe(true)
expect(templatesAfter).toHaveLength(2)
})
it('reuses CRDT list when syncing parameters repeatedly', () => {
const { manager: parameterManager, internals: parameterInternals } = setupManager()
const initialParameters: ParameterItem[] = [
{ description: 'desc', name: 'param', required: false, type: 'string' },
]
const node = createParameterExtractorNodeSnapshot(initialParameters)
parameterInternals.syncNodes([], [deepClone(node)])
const stored = parameterManager.getNodes().find(n => n.id === PARAM_NODE_ID) as Node<ParameterExtractorNodeData>
const mutatedNode = deepClone(stored)
mutatedNode.data.parameters[0].description = 'updated'
parameterInternals.syncNodes([stored], [mutatedNode])
const storedAfter = parameterManager.getNodes().find(n => n.id === PARAM_NODE_ID) as
| Node<ParameterExtractorNodeData>
| undefined
const params = storedAfter?.data.parameters ?? []
expect(params).toHaveLength(1)
expect(params[0].description).toBe('updated')
})
it('filters out transient/private data keys while keeping allowlisted ones', () => {
const nodeWithPrivate: Node<{ _foo: string, variables: WorkflowVariable[] }> = {
id: 'private-node',
type: 'custom',
position: { x: 0, y: 0 },
data: {
type: BlockEnum.Start,
title: 'private',
desc: '',
_foo: 'should disappear',
_children: [{ nodeId: 'child-a', nodeType: BlockEnum.Start }],
selected: true,
variables: [],
},
}
internals.syncNodes([], [deepClone(nodeWithPrivate)])
const stored = (manager.getNodes() as Node[]).find(node => node.id === 'private-node')!
const storedData = stored.data as CommonNodeType<{ _foo?: string }>
expect(storedData._foo).toBeUndefined()
expect(storedData._children).toEqual([{ nodeId: 'child-a', nodeType: BlockEnum.Start }])
expect(storedData.selected).toBeUndefined()
})
it('removes list fields when they are omitted in the update snapshot', () => {
const baseNode = createNodeSnapshot(['alpha'])
internals.syncNodes([], [deepClone(baseNode)])
const withoutVariables: Node<StartNodeData> = {
...deepClone(baseNode),
data: {
...deepClone(baseNode).data,
},
}
delete (withoutVariables.data as CommonNodeType<{ variables?: WorkflowVariable[] }>).variables
internals.syncNodes([deepClone(baseNode)], [withoutVariables])
const stored = (manager.getNodes() as Node[]).find(node => node.id === NODE_ID)!
const storedData = stored.data as CommonNodeType<{ variables?: WorkflowVariable[] }>
expect(storedData.variables).toBeUndefined()
})
it('treats non-array list inputs as empty lists during synchronization', () => {
const { manager: promptManager, internals: promptInternals } = setupManager()
const nodeWithInvalidTemplate = createLLMNodeSnapshot([])
promptInternals.syncNodes([], [deepClone(nodeWithInvalidTemplate)])
const mutated = deepClone(nodeWithInvalidTemplate) as Node<LLMNodeDataWithUnknownTemplate>
mutated.data.prompt_template = 'not-an-array'
promptInternals.syncNodes([deepClone(nodeWithInvalidTemplate)], [mutated])
const stored = promptManager.getNodes().find(node => node.id === LLM_NODE_ID) as Node<LLMNodeData>
expect(Array.isArray(stored.data.prompt_template)).toBe(true)
expect(stored.data.prompt_template).toHaveLength(0)
})
it('updates edges map when edges are added, modified, and removed', () => {
const { manager: edgeManager } = setupManager()
const edge: Edge = {
id: 'edge-1',
source: 'node-a',
target: 'node-b',
type: 'default',
data: {
sourceType: BlockEnum.Start,
targetType: BlockEnum.LLM,
_waitingRun: false,
},
}
edgeManager.setEdges([], [edge])
expect(edgeManager.getEdges()).toHaveLength(1)
const storedEdge = edgeManager.getEdges()[0]!
expect(storedEdge.data).toBeDefined()
expect(storedEdge.data!._waitingRun).toBe(false)
const updatedEdge: Edge = {
...edge,
data: {
sourceType: BlockEnum.Start,
targetType: BlockEnum.LLM,
_waitingRun: true,
},
}
edgeManager.setEdges([edge], [updatedEdge])
expect(edgeManager.getEdges()).toHaveLength(1)
const updatedStoredEdge = edgeManager.getEdges()[0]!
expect(updatedStoredEdge.data).toBeDefined()
expect(updatedStoredEdge.data!._waitingRun).toBe(true)
edgeManager.setEdges([updatedEdge], [])
expect(edgeManager.getEdges()).toHaveLength(0)
})
})
describe('CollaborationManager public API wrappers', () => {
let manager: CollaborationManager
let internals: CollaborationManagerInternals
const baseNodes: Node[] = []
const updatedNodes: Node[] = [
{
id: 'new-node',
type: 'custom',
position: { x: 0, y: 0 },
data: {
type: BlockEnum.Start,
title: 'New node',
desc: '',
},
},
]
const baseEdges: Edge[] = []
const updatedEdges: Edge[] = [
{
id: 'edge-1',
source: 'source',
target: 'target',
type: 'default',
data: {
sourceType: BlockEnum.Start,
targetType: BlockEnum.End,
},
},
]
beforeEach(() => {
manager = new CollaborationManager()
internals = getManagerInternals(manager)
})
it('setNodes delegates to syncNodes and commits the CRDT document', () => {
const commit = vi.fn()
internals.doc = { commit }
const syncSpy = vi.spyOn(internals, 'syncNodes').mockImplementation(() => undefined)
manager.setNodes(baseNodes, updatedNodes)
expect(syncSpy).toHaveBeenCalledWith(baseNodes, updatedNodes)
expect(commit).toHaveBeenCalled()
syncSpy.mockRestore()
})
it('setNodes skips syncing when undo/redo replay is running', () => {
const commit = vi.fn()
internals.doc = { commit }
internals.isUndoRedoInProgress = true
const syncSpy = vi.spyOn(internals, 'syncNodes').mockImplementation(() => undefined)
manager.setNodes(baseNodes, updatedNodes)
expect(syncSpy).not.toHaveBeenCalled()
expect(commit).not.toHaveBeenCalled()
syncSpy.mockRestore()
})
it('setEdges delegates to syncEdges and commits the CRDT document', () => {
const commit = vi.fn()
internals.doc = { commit }
const syncSpy = vi.spyOn(internals, 'syncEdges').mockImplementation(() => undefined)
manager.setEdges(baseEdges, updatedEdges)
expect(syncSpy).toHaveBeenCalledWith(baseEdges, updatedEdges)
expect(commit).toHaveBeenCalled()
syncSpy.mockRestore()
})
it('disconnect tears down the collaboration state only when last connection closes', () => {
const forceSpy = vi.spyOn(internals, 'forceDisconnect').mockImplementation(() => undefined)
internals.activeConnections.add('conn-a')
internals.activeConnections.add('conn-b')
manager.disconnect('conn-a')
expect(forceSpy).not.toHaveBeenCalled()
manager.disconnect('conn-b')
expect(forceSpy).toHaveBeenCalledTimes(1)
forceSpy.mockRestore()
})
it('applyNodePanelPresenceUpdate keeps a client visible on a single node at a time', () => {
const updates: NodePanelPresenceMap[] = []
manager.onNodePanelPresenceUpdate((presence) => {
updates.push(presence)
})
const user: NodePanelPresenceUser = { userId: 'user-1', username: 'Dana' }
internals.applyNodePanelPresenceUpdate({
nodeId: 'node-a',
action: 'open',
user,
clientId: 'client-1',
timestamp: 100,
})
internals.applyNodePanelPresenceUpdate({
nodeId: 'node-b',
action: 'open',
user,
clientId: 'client-1',
timestamp: 200,
})
const finalSnapshot = updates[updates.length - 1]!
expect(finalSnapshot).toEqual({
'node-b': {
'client-1': {
userId: 'user-1',
username: 'Dana',
clientId: 'client-1',
timestamp: 200,
},
},
})
})
it('applyNodePanelPresenceUpdate clears node entries when last viewer closes the panel', () => {
const updates: NodePanelPresenceMap[] = []
manager.onNodePanelPresenceUpdate((presence) => {
updates.push(presence)
})
const user: NodePanelPresenceUser = { userId: 'user-2', username: 'Kai' }
internals.applyNodePanelPresenceUpdate({
nodeId: 'node-a',
action: 'open',
user,
clientId: 'client-9',
timestamp: 300,
})
internals.applyNodePanelPresenceUpdate({
nodeId: 'node-a',
action: 'close',
user,
clientId: 'client-9',
timestamp: 301,
})
expect(updates[updates.length - 1]).toEqual({})
})
})

View File

@ -0,0 +1,138 @@
import type { LoroDoc } from 'loro-crdt'
import type { Socket } from 'socket.io-client'
import { CRDTProvider } from '../crdt-provider'
type FakeDocEvent = {
by: string
}
type FakeDoc = {
export: ReturnType<typeof vi.fn>
import: ReturnType<typeof vi.fn>
subscribe: ReturnType<typeof vi.fn>
trigger: (event: FakeDocEvent) => void
}
const createFakeDoc = (): FakeDoc => {
let handler: ((payload: FakeDocEvent) => void) | null = null
const exportFn = vi.fn(() => new Uint8Array([1, 2, 3]))
const importFn = vi.fn()
const subscribeFn = vi.fn((cb: (payload: FakeDocEvent) => void) => {
handler = cb
})
return {
export: exportFn,
import: importFn,
subscribe: subscribeFn,
trigger: (event: FakeDocEvent) => {
handler?.(event)
},
}
}
type MockSocket = {
trigger: (event: string, ...args: unknown[]) => void
emit: ReturnType<typeof vi.fn>
on: ReturnType<typeof vi.fn>
off: ReturnType<typeof vi.fn>
}
const createMockSocket = (): MockSocket => {
const handlers = new Map<string, (...args: unknown[]) => void>()
const socket: MockSocket = {
emit: vi.fn(),
on: vi.fn((event: string, handler: (...args: unknown[]) => void) => {
handlers.set(event, handler)
}),
off: vi.fn((event: string) => {
handlers.delete(event)
}),
trigger: (event: string, ...args: unknown[]) => {
const handler = handlers.get(event)
if (handler)
handler(...args)
},
}
return socket
}
describe('CRDTProvider', () => {
it('emits graph_event when local changes happen', () => {
const doc = createFakeDoc()
const socket = createMockSocket()
const provider = new CRDTProvider(socket as unknown as Socket, doc as unknown as LoroDoc)
expect(provider).toBeInstanceOf(CRDTProvider)
doc.trigger({ by: 'local' })
expect(socket.emit).toHaveBeenCalledWith(
'graph_event',
expect.any(Uint8Array),
expect.any(Function),
)
expect(doc.export).toHaveBeenCalledWith({ mode: 'update' })
})
it('ignores non-local events', () => {
const doc = createFakeDoc()
const socket = createMockSocket()
const provider = new CRDTProvider(socket as unknown as Socket, doc as unknown as LoroDoc)
doc.trigger({ by: 'remote' })
expect(socket.emit).not.toHaveBeenCalled()
provider.destroy()
})
it('imports remote updates on graph_update', () => {
const doc = createFakeDoc()
const socket = createMockSocket()
const provider = new CRDTProvider(socket as unknown as Socket, doc as unknown as LoroDoc)
const payload = new Uint8Array([9, 9, 9])
socket.trigger('graph_update', payload)
expect(doc.import).toHaveBeenCalledWith(expect.any(Uint8Array))
expect(Array.from(doc.import.mock.calls[0][0])).toEqual([9, 9, 9])
provider.destroy()
})
it('removes graph_update listener on destroy', () => {
const doc = createFakeDoc()
const socket = createMockSocket()
const provider = new CRDTProvider(socket as unknown as Socket, doc as unknown as LoroDoc)
provider.destroy()
expect(socket.off).toHaveBeenCalledWith('graph_update')
})
it('logs an error when graph_update import fails but continues operating', () => {
const doc = createFakeDoc()
const socket = createMockSocket()
doc.import.mockImplementation(() => {
throw new Error('boom')
})
const provider = new CRDTProvider(socket as unknown as Socket, doc as unknown as LoroDoc)
const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => undefined)
socket.trigger('graph_update', new Uint8Array([1]))
expect(errorSpy).toHaveBeenCalledWith('Error importing graph update:', expect.any(Error))
doc.import.mockReset()
socket.trigger('graph_update', new Uint8Array([2, 3]))
expect(doc.import).toHaveBeenCalled()
provider.destroy()
errorSpy.mockRestore()
})
})

View File

@ -0,0 +1,93 @@
import { EventEmitter } from '../event-emitter'
describe('EventEmitter', () => {
it('registers and invokes handlers via on/emit', () => {
const emitter = new EventEmitter()
const handler = vi.fn()
emitter.on('test', handler)
emitter.emit('test', { value: 42 })
expect(handler).toHaveBeenCalledWith({ value: 42 })
})
it('removes specific handler with off', () => {
const emitter = new EventEmitter()
const handlerA = vi.fn()
const handlerB = vi.fn()
emitter.on('test', handlerA)
emitter.on('test', handlerB)
emitter.off('test', handlerA)
emitter.emit('test', 'payload')
expect(handlerA).not.toHaveBeenCalled()
expect(handlerB).toHaveBeenCalledWith('payload')
})
it('clears all listeners when off is called without handler', () => {
const emitter = new EventEmitter()
const handlerA = vi.fn()
const handlerB = vi.fn()
emitter.on('trigger', handlerA)
emitter.on('trigger', handlerB)
emitter.off('trigger')
emitter.emit('trigger', 'payload')
expect(handlerA).not.toHaveBeenCalled()
expect(handlerB).not.toHaveBeenCalled()
expect(emitter.getListenerCount('trigger')).toBe(0)
})
it('removeAllListeners clears every registered event', () => {
const emitter = new EventEmitter()
emitter.on('one', vi.fn())
emitter.on('two', vi.fn())
emitter.removeAllListeners()
expect(emitter.getListenerCount('one')).toBe(0)
expect(emitter.getListenerCount('two')).toBe(0)
})
it('returns an unsubscribe function from on', () => {
const emitter = new EventEmitter()
const handler = vi.fn()
const unsubscribe = emitter.on('detach', handler)
unsubscribe()
emitter.emit('detach', 'value')
expect(handler).not.toHaveBeenCalled()
})
it('continues emitting when a handler throws', () => {
const emitter = new EventEmitter()
const errorHandler = vi
.spyOn(console, 'error')
.mockImplementation(() => undefined)
const failingHandler = vi.fn(() => {
throw new Error('boom')
})
const succeedingHandler = vi.fn()
emitter.on('safe', failingHandler)
emitter.on('safe', succeedingHandler)
emitter.emit('safe', 7)
expect(failingHandler).toHaveBeenCalledWith(7)
expect(succeedingHandler).toHaveBeenCalledWith(7)
expect(errorHandler).toHaveBeenCalledWith(
expect.stringContaining('Error in event handler for safe:'),
expect.any(Error),
)
errorHandler.mockRestore()
})
})

View File

@ -0,0 +1,161 @@
type MockSocket = {
trigger: (event: string, ...args: unknown[]) => void
emit: ReturnType<typeof vi.fn>
on: ReturnType<typeof vi.fn>
disconnect: ReturnType<typeof vi.fn>
connected: boolean
}
type IoOptions = {
auth?: unknown
path?: string
transports?: string[]
withCredentials?: boolean
}
const ioMock = vi.hoisted(() => vi.fn())
vi.mock('socket.io-client', () => ({
io: (...args: Parameters<typeof ioMock>) => ioMock(...args),
}))
const createMockSocket = (id: string): MockSocket => {
const handlers = new Map<string, (...args: unknown[]) => void>()
const socket: MockSocket & { id: string } = {
id,
connected: true,
emit: vi.fn(),
disconnect: vi.fn(() => {
socket.connected = false
}),
on: vi.fn((event: string, handler: (...args: unknown[]) => void) => {
handlers.set(event, handler)
}),
trigger: (event: string, ...args: unknown[]) => {
const handler = handlers.get(event)
if (handler)
handler(...args)
},
}
return socket
}
describe('WebSocketClient', () => {
beforeEach(() => {
vi.resetModules()
ioMock.mockReset()
})
it('connects with default url and registers base listeners', async () => {
const mockSocket = createMockSocket('socket-fallback')
ioMock.mockImplementation(() => mockSocket)
const { WebSocketClient } = await import('../websocket-manager')
const client = new WebSocketClient()
const socket = client.connect('app-1')
expect(ioMock).toHaveBeenCalledWith(
'ws://localhost:5001',
expect.objectContaining({
path: '/socket.io',
transports: ['websocket'],
withCredentials: true,
}),
)
expect(socket).toBe(mockSocket)
expect(mockSocket.on).toHaveBeenCalledWith('connect', expect.any(Function))
expect(mockSocket.on).toHaveBeenCalledWith('disconnect', expect.any(Function))
expect(mockSocket.on).toHaveBeenCalledWith('connect_error', expect.any(Function))
})
it('reuses existing connected socket and avoids duplicate connections', async () => {
const mockSocket = createMockSocket('socket-reuse')
ioMock.mockImplementation(() => mockSocket)
const { WebSocketClient } = await import('../websocket-manager')
const client = new WebSocketClient()
const first = client.connect('app-reuse')
const second = client.connect('app-reuse')
expect(ioMock).toHaveBeenCalledTimes(1)
expect(second).toBe(first)
})
it('emits user_connect on connect without auth payload', async () => {
const mockSocket = createMockSocket('socket-auth')
ioMock.mockImplementation((url: string, options: IoOptions) => {
expect(options.auth).toBeUndefined()
return mockSocket
})
const { WebSocketClient } = await import('../websocket-manager')
const client = new WebSocketClient()
client.connect('app-auth')
const connectHandler = mockSocket.on.mock.calls.find(call => call[0] === 'connect')?.[1] as () => void
expect(connectHandler).toBeDefined()
connectHandler()
expect(mockSocket.emit).toHaveBeenCalledWith(
'user_connect',
{ workflow_id: 'app-auth' },
expect.any(Function),
)
})
it('disconnects a specific app and clears internal maps', async () => {
const mockSocket = createMockSocket('socket-disconnect-one')
ioMock.mockImplementation(() => mockSocket)
const { WebSocketClient } = await import('../websocket-manager')
const client = new WebSocketClient()
client.connect('app-disconnect')
expect(client.isConnected('app-disconnect')).toBe(true)
client.disconnect('app-disconnect')
expect(mockSocket.disconnect).toHaveBeenCalled()
expect(client.getSocket('app-disconnect')).toBeNull()
expect(client.isConnected('app-disconnect')).toBe(false)
})
it('disconnects all apps when no id is provided', async () => {
const socketA = createMockSocket('socket-a')
const socketB = createMockSocket('socket-b')
ioMock.mockImplementationOnce(() => socketA).mockImplementationOnce(() => socketB)
const { WebSocketClient } = await import('../websocket-manager')
const client = new WebSocketClient()
client.connect('app-a')
client.connect('app-b')
client.disconnect()
expect(socketA.disconnect).toHaveBeenCalled()
expect(socketB.disconnect).toHaveBeenCalled()
expect(client.getConnectedApps()).toEqual([])
})
it('reports connected apps, sockets, and debug info correctly', async () => {
const socketA = createMockSocket('socket-debug-a')
const socketB = createMockSocket('socket-debug-b')
socketB.connected = false
ioMock.mockImplementationOnce(() => socketA).mockImplementationOnce(() => socketB)
const { WebSocketClient } = await import('../websocket-manager')
const client = new WebSocketClient()
client.connect('app-a')
client.connect('app-b')
expect(client.getConnectedApps()).toEqual(['app-a'])
const debugInfo = client.getDebugInfo()
expect(debugInfo).toMatchObject({
'app-a': { connected: true, socketId: 'socket-debug-a' },
'app-b': { connected: false, socketId: 'socket-debug-b' },
})
})
})

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,39 @@
import type { LoroDoc } from 'loro-crdt'
import type { Socket } from 'socket.io-client'
import { emitWithAuthGuard } from './websocket-manager'
export class CRDTProvider {
private doc: LoroDoc
private socket: Socket
private onUnauthorized?: () => void
constructor(socket: Socket, doc: LoroDoc, onUnauthorized?: () => void) {
this.socket = socket
this.doc = doc
this.onUnauthorized = onUnauthorized
this.setupEventListeners()
}
private setupEventListeners(): void {
this.doc.subscribe((event: { by?: string }) => {
if (event.by === 'local') {
const update = this.doc.export({ mode: 'update' })
emitWithAuthGuard(this.socket, 'graph_event', update, { onUnauthorized: this.onUnauthorized })
}
})
this.socket.on('graph_update', (updateData: Uint8Array) => {
try {
const data = new Uint8Array(updateData)
this.doc.import(data)
}
catch (error) {
console.error('Error importing graph update:', error)
}
})
}
destroy(): void {
this.socket.off('graph_update')
}
}

View File

@ -0,0 +1,51 @@
export type EventHandler<T = unknown> = (data: T) => void
export class EventEmitter {
private events: Map<string, Set<EventHandler<unknown>>> = new Map()
on<T = unknown>(event: string, handler: EventHandler<T>): () => void {
if (!this.events.has(event))
this.events.set(event, new Set())
this.events.get(event)!.add(handler as EventHandler<unknown>)
return () => this.off(event, handler)
}
off<T = unknown>(event: string, handler?: EventHandler<T>): void {
if (!this.events.has(event))
return
const handlers = this.events.get(event)!
if (handler)
handlers.delete(handler as EventHandler<unknown>)
else
handlers.clear()
if (handlers.size === 0)
this.events.delete(event)
}
emit<T = unknown>(event: string, data: T): void {
if (!this.events.has(event))
return
const handlers = this.events.get(event)!
handlers.forEach((handler) => {
try {
handler(data)
}
catch (error) {
console.error(`Error in event handler for ${event}:`, error)
}
})
}
removeAllListeners(): void {
this.events.clear()
}
getListenerCount(event: string): number {
return this.events.get(event)?.size || 0
}
}

View File

@ -0,0 +1,157 @@
import type { Socket } from 'socket.io-client'
import type { DebugInfo, WebSocketConfig } from '../types/websocket'
import { io } from 'socket.io-client'
import { SOCKET_URL } from '@/config'
type AckArgs = unknown[]
const isUnauthorizedAck = (...ackArgs: AckArgs): boolean => {
const [first, second] = ackArgs
if (second === 401 || first === 401)
return true
if (first && typeof first === 'object' && 'msg' in first) {
const message = (first as { msg?: unknown }).msg
return message === 'unauthorized'
}
return false
}
export type EmitAckOptions = {
onAck?: (...ackArgs: AckArgs) => void
onUnauthorized?: (...ackArgs: AckArgs) => void
}
export const emitWithAuthGuard = (
socket: Socket | null | undefined,
event: string,
payload: unknown,
options?: EmitAckOptions,
): void => {
if (!socket)
return
socket.emit(
event,
payload,
(...ackArgs: AckArgs) => {
options?.onAck?.(...ackArgs)
if (isUnauthorizedAck(...ackArgs))
options?.onUnauthorized?.(...ackArgs)
},
)
}
export class WebSocketClient {
private connections: Map<string, Socket> = new Map()
private connecting: Set<string> = new Set()
private readonly url: string
private readonly transports: WebSocketConfig['transports']
private readonly withCredentials?: boolean
constructor(config: WebSocketConfig = {}) {
this.url = SOCKET_URL
this.transports = config.transports || ['websocket']
this.withCredentials = config.withCredentials !== false
}
connect(appId: string): Socket {
const existingSocket = this.connections.get(appId)
if (existingSocket?.connected)
return existingSocket
if (this.connecting.has(appId)) {
const pendingSocket = this.connections.get(appId)
if (pendingSocket)
return pendingSocket
}
if (existingSocket && !existingSocket.connected) {
existingSocket.disconnect()
this.connections.delete(appId)
}
this.connecting.add(appId)
const socketOptions: {
path: string
transports: WebSocketConfig['transports']
withCredentials?: boolean
} = {
path: '/socket.io',
transports: this.transports,
withCredentials: this.withCredentials,
}
const socket = io(this.url, socketOptions)
this.connections.set(appId, socket)
this.setupBaseEventListeners(socket, appId)
return socket
}
disconnect(appId?: string): void {
if (appId) {
const socket = this.connections.get(appId)
if (socket) {
socket.disconnect()
this.connections.delete(appId)
this.connecting.delete(appId)
}
}
else {
this.connections.forEach(socket => socket.disconnect())
this.connections.clear()
this.connecting.clear()
}
}
getSocket(appId: string): Socket | null {
return this.connections.get(appId) || null
}
isConnected(appId: string): boolean {
return this.connections.get(appId)?.connected || false
}
getConnectedApps(): string[] {
const connectedApps: string[] = []
this.connections.forEach((socket, appId) => {
if (socket.connected)
connectedApps.push(appId)
})
return connectedApps
}
getDebugInfo(): DebugInfo {
const info: DebugInfo = {}
this.connections.forEach((socket, appId) => {
info[appId] = {
connected: socket.connected,
connecting: this.connecting.has(appId),
socketId: socket.id,
}
})
return info
}
private setupBaseEventListeners(socket: Socket, appId: string): void {
socket.on('connect', () => {
this.connecting.delete(appId)
emitWithAuthGuard(socket, 'user_connect', { workflow_id: appId })
})
socket.on('disconnect', () => {
this.connecting.delete(appId)
})
socket.on('connect_error', () => {
this.connecting.delete(appId)
})
}
}
export const webSocketClient = new WebSocketClient()

View File

@ -0,0 +1,144 @@
import type { ReactFlowInstance } from 'reactflow'
import type {
CollaborationState,
CursorPosition,
NodePanelPresenceMap,
OnlineUser,
} from '../types/collaboration'
import { useEffect, useRef, useState } from 'react'
import Toast from '@/app/components/base/toast'
import { useGlobalPublicStore } from '@/context/global-public-context'
import { collaborationManager } from '../core/collaboration-manager'
import { CursorService } from '../services/cursor-service'
type CollaborationViewState = {
isConnected: boolean
onlineUsers: OnlineUser[]
cursors: Record<string, CursorPosition>
nodePanelPresence: NodePanelPresenceMap
isLeader: boolean
}
type ReactFlowStore = NonNullable<Parameters<typeof collaborationManager.connect>[1]>
const initialState: CollaborationViewState = {
isConnected: false,
onlineUsers: [],
cursors: {},
nodePanelPresence: {},
isLeader: false,
}
export function useCollaboration(appId: string, reactFlowStore?: ReactFlowStore) {
const [state, setState] = useState<CollaborationViewState>(initialState)
const cursorServiceRef = useRef<CursorService | null>(null)
const isCollaborationEnabled = useGlobalPublicStore(s => s.systemFeatures.enable_collaboration_mode)
useEffect(() => {
if (!appId || !isCollaborationEnabled) {
Promise.resolve().then(() => {
setState(initialState)
})
return
}
let connectionId: string | null = null
let isUnmounted = false
if (!cursorServiceRef.current)
cursorServiceRef.current = new CursorService()
const initCollaboration = async () => {
try {
const id = await collaborationManager.connect(appId, reactFlowStore)
if (isUnmounted) {
collaborationManager.disconnect(id)
return
}
connectionId = id
setState(prev => ({ ...prev, isConnected: collaborationManager.isConnected() }))
}
catch (error) {
console.error('Failed to initialize collaboration:', error)
}
}
initCollaboration()
const unsubscribeStateChange = collaborationManager.onStateChange((newState: Partial<CollaborationState>) => {
if (newState.isConnected === undefined)
return
setState(prev => ({ ...prev, isConnected: newState.isConnected ?? prev.isConnected }))
})
const unsubscribeCursors = collaborationManager.onCursorUpdate((cursors: Record<string, CursorPosition>) => {
setState(prev => ({ ...prev, cursors }))
})
const unsubscribeUsers = collaborationManager.onOnlineUsersUpdate((users: OnlineUser[]) => {
setState(prev => ({ ...prev, onlineUsers: users }))
})
const unsubscribeNodePanelPresence = collaborationManager.onNodePanelPresenceUpdate((presence: NodePanelPresenceMap) => {
setState(prev => ({ ...prev, nodePanelPresence: presence }))
})
const unsubscribeLeaderChange = collaborationManager.onLeaderChange((isLeader: boolean) => {
setState(prev => ({ ...prev, isLeader }))
})
return () => {
isUnmounted = true
unsubscribeStateChange()
unsubscribeCursors()
unsubscribeUsers()
unsubscribeNodePanelPresence()
unsubscribeLeaderChange()
cursorServiceRef.current?.stopTracking()
if (connectionId)
collaborationManager.disconnect(connectionId)
}
}, [appId, reactFlowStore, isCollaborationEnabled])
const prevIsConnected = useRef(false)
useEffect(() => {
if (prevIsConnected.current && !state.isConnected) {
Toast.notify({
type: 'error',
message: 'Network connection lost. Please check your network.',
})
}
prevIsConnected.current = state.isConnected || false
}, [state.isConnected])
const startCursorTracking = (containerRef: React.RefObject<HTMLElement>, reactFlowInstance?: ReactFlowInstance) => {
if (!isCollaborationEnabled || !cursorServiceRef.current)
return
if (cursorServiceRef.current) {
cursorServiceRef.current.startTracking(containerRef, (position) => {
collaborationManager.emitCursorMove(position)
}, reactFlowInstance)
}
}
const stopCursorTracking = () => {
cursorServiceRef.current?.stopTracking()
}
const result = {
isConnected: state.isConnected || false,
onlineUsers: state.onlineUsers || [],
cursors: state.cursors || {},
nodePanelPresence: state.nodePanelPresence || {},
isLeader: state.isLeader || false,
leaderId: collaborationManager.getLeaderId(),
isEnabled: isCollaborationEnabled,
startCursorTracking,
stopCursorTracking,
}
return result
}

View File

@ -0,0 +1,5 @@
export { collaborationManager } from './core/collaboration-manager'
export { webSocketClient } from './core/websocket-manager'
export { useCollaboration } from './hooks/use-collaboration'
export { CursorService } from './services/cursor-service'
export * from './types'

View File

@ -0,0 +1,90 @@
import type { RefObject } from 'react'
import type { ReactFlowInstance } from 'reactflow'
import type { CursorPosition } from '../types/collaboration'
const CURSOR_MIN_MOVE_DISTANCE = 10
const CURSOR_THROTTLE_MS = 300
export class CursorService {
private containerRef: RefObject<HTMLElement> | null = null
private reactFlowInstance: ReactFlowInstance | null = null
private isTracking = false
private onCursorUpdate: ((cursors: Record<string, CursorPosition>) => void) | null = null
private onEmitPosition: ((position: CursorPosition) => void) | null = null
private lastEmitTime = 0
private lastPosition: { x: number, y: number } | null = null
startTracking(
containerRef: RefObject<HTMLElement>,
onEmitPosition: (position: CursorPosition) => void,
reactFlowInstance?: ReactFlowInstance,
): void {
if (this.isTracking)
this.stopTracking()
this.containerRef = containerRef
this.onEmitPosition = onEmitPosition
this.reactFlowInstance = reactFlowInstance || null
this.isTracking = true
if (containerRef.current)
containerRef.current.addEventListener('mousemove', this.handleMouseMove)
}
stopTracking(): void {
if (this.containerRef?.current)
this.containerRef.current.removeEventListener('mousemove', this.handleMouseMove)
this.containerRef = null
this.reactFlowInstance = null
this.onEmitPosition = null
this.isTracking = false
this.lastPosition = null
}
setCursorUpdateHandler(handler: (cursors: Record<string, CursorPosition>) => void): void {
this.onCursorUpdate = handler
}
updateCursors(cursors: Record<string, CursorPosition>): void {
if (this.onCursorUpdate)
this.onCursorUpdate(cursors)
}
private handleMouseMove = (event: MouseEvent): void => {
if (!this.containerRef?.current || !this.onEmitPosition)
return
const rect = this.containerRef.current.getBoundingClientRect()
let x = event.clientX - rect.left
let y = event.clientY - rect.top
// Transform coordinates to ReactFlow world coordinates if ReactFlow instance is available
if (this.reactFlowInstance) {
const viewport = this.reactFlowInstance.getViewport()
// Convert screen coordinates to world coordinates
// World coordinates = (screen coordinates - viewport translation) / zoom
x = (x - viewport.x) / viewport.zoom
y = (y - viewport.y) / viewport.zoom
}
// Always emit cursor position (remove boundary check since world coordinates can be negative)
const now = Date.now()
const timeThrottled = now - this.lastEmitTime > CURSOR_THROTTLE_MS
const minDistance = CURSOR_MIN_MOVE_DISTANCE / (this.reactFlowInstance?.getZoom() || 1)
const distanceThrottled = !this.lastPosition
|| (Math.abs(x - this.lastPosition.x) > minDistance)
|| (Math.abs(y - this.lastPosition.y) > minDistance)
if (timeThrottled && distanceThrottled) {
this.lastPosition = { x, y }
this.lastEmitTime = now
this.onEmitPosition({
x,
y,
userId: '',
timestamp: now,
})
}
}
}

View File

@ -0,0 +1,103 @@
import type { Viewport } from 'reactflow'
import type { ConversationVariable, Edge, EnvironmentVariable, Node } from '../../types'
import type { Features } from '@/app/components/base/features/types'
export type OnlineUser = {
user_id: string
username: string
avatar: string
sid: string
}
export type WorkflowOnlineUsers = {
workflow_id: string
users: OnlineUser[]
}
export type OnlineUserListResponse = {
data: WorkflowOnlineUsers[]
}
export type CursorPosition = {
x: number
y: number
userId: string
timestamp: number
}
export type NodePanelPresenceUser = {
userId: string
username: string
avatar?: string | null
}
export type NodePanelPresenceInfo = NodePanelPresenceUser & {
clientId: string
timestamp: number
}
export type NodePanelPresenceMap = Record<string, Record<string, NodePanelPresenceInfo>>
export type CollaborationState = {
appId: string
isConnected: boolean
onlineUsers: OnlineUser[]
cursors: Record<string, CursorPosition>
nodePanelPresence: NodePanelPresenceMap
}
export type GraphSyncData = {
nodes: Node[]
edges: Edge[]
}
export type CollaborationEventType
= | 'mouse_move'
| 'vars_and_features_update'
| 'sync_request'
| 'app_state_update'
| 'app_meta_update'
| 'mcp_server_update'
| 'workflow_update'
| 'comments_update'
| 'node_panel_presence'
| 'app_publish_update'
| 'graph_resync_request'
| 'workflow_restore_request'
| 'workflow_restore_intent'
| 'workflow_restore_complete'
export type CollaborationUpdate = {
type: CollaborationEventType
userId: string
data: Record<string, unknown>
timestamp: number
}
export type RestoreRequestData = {
versionId: string
versionName?: string
initiatorUserId: string
initiatorName: string
graphData: {
nodes: Node[]
edges: Edge[]
viewport?: Viewport
}
features?: Features
environmentVariables?: EnvironmentVariable[]
conversationVariables?: ConversationVariable[]
}
export type RestoreIntentData = {
versionId: string
versionName?: string
initiatorUserId: string
initiatorName: string
}
export type RestoreCompleteData = {
versionId: string
success: boolean
error?: string
}

View File

@ -0,0 +1,34 @@
export type CollaborationEvent<TData = unknown> = {
type: string
data: TData
timestamp: number
}
export type GraphUpdateEvent = {
type: 'graph_update'
} & CollaborationEvent<Uint8Array>
export type CursorMoveEvent = {
type: 'cursor_move'
} & CollaborationEvent<{
x: number
y: number
userId: string
}>
export type UserConnectEvent = {
type: 'user_connect'
} & CollaborationEvent<{
workflow_id: string
}>
export type OnlineUsersEvent = {
type: 'online_users'
} & CollaborationEvent<{
users: Array<{
user_id: string
username: string
avatar: string
sid: string
}>
}>

View File

@ -0,0 +1,3 @@
export * from './collaboration'
export * from './events'
export * from './websocket'

View File

@ -0,0 +1,15 @@
export type WebSocketConfig = {
token?: string
transports?: string[]
withCredentials?: boolean
}
export type ConnectionInfo = {
connected: boolean
connecting: boolean
socketId?: string
}
export type DebugInfo = {
[appId: string]: ConnectionInfo
}

View File

@ -0,0 +1,12 @@
/**
* Generate a consistent color for a user based on their ID
* Used for cursor colors and avatar backgrounds
*/
export const getUserColor = (id: string): string => {
const colors = ['#155AEF', '#0BA5EC', '#444CE7', '#7839EE', '#4CA30D', '#0E9384', '#DD2590', '#FF4405', '#D92D20', '#F79009', '#828DAD']
const hash = id.split('').reduce((a, b) => {
a = ((a << 5) - a) + b.charCodeAt(0)
return a & a
}, 0)
return colors[Math.abs(hash) % colors.length]
}

View File

@ -0,0 +1,34 @@
import { useEventListener } from 'ahooks'
import { useWorkflowComment } from './hooks/use-workflow-comment'
import { useWorkflowStore } from './store'
const CommentManager = () => {
const workflowStore = useWorkflowStore()
const { handleCreateComment, handleCommentCancel } = useWorkflowComment()
useEventListener('click', (e) => {
const { controlMode, mousePosition, pendingComment } = workflowStore.getState()
if (controlMode === 'comment') {
const target = e.target as HTMLElement
const isInDropdown = target.closest('[data-mention-dropdown]')
const isInCommentInput = target.closest('[data-comment-input]')
const isOnCanvasPane = target.closest('.react-flow__pane')
// Only when clicking on the React Flow canvas pane (background),
// and not inside comment input or its dropdown
if (!isInDropdown && !isInCommentInput && isOnCanvasPane) {
e.preventDefault()
e.stopPropagation()
if (pendingComment)
handleCommentCancel()
else
handleCreateComment(mousePosition)
}
}
})
return null
}
export default CommentManager

View File

@ -0,0 +1,148 @@
import type { WorkflowCommentList } from '@/service/workflow-comment'
import { fireEvent, render, screen } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { CommentIcon } from './comment-icon'
type Position = { x: number, y: number }
let mockUserId = 'user-1'
const mockFlowToScreenPosition = vi.fn((position: Position) => position)
const mockScreenToFlowPosition = vi.fn((position: Position) => position)
vi.mock('reactflow', () => ({
useReactFlow: () => ({
flowToScreenPosition: mockFlowToScreenPosition,
screenToFlowPosition: mockScreenToFlowPosition,
}),
useViewport: () => ({
x: 0,
y: 0,
zoom: 1,
}),
}))
vi.mock('@/context/app-context', () => ({
useAppContext: () => ({
userProfile: {
id: mockUserId,
name: 'User',
avatar_url: 'avatar',
},
}),
}))
vi.mock('@/app/components/base/user-avatar-list', () => ({
UserAvatarList: ({ users }: { users: Array<{ id: string }> }) => (
<div data-testid="avatar-list">{users.map(user => user.id).join(',')}</div>
),
}))
vi.mock('./comment-preview', () => ({
default: ({ onClick }: { onClick?: () => void }) => (
<button type="button" data-testid="comment-preview" onClick={onClick}>
Preview
</button>
),
}))
const createComment = (overrides: Partial<WorkflowCommentList> = {}): WorkflowCommentList => ({
id: 'comment-1',
position_x: 0,
position_y: 0,
content: 'Hello',
created_by: 'user-1',
created_by_account: {
id: 'user-1',
name: 'Alice',
email: 'alice@example.com',
},
created_at: 1,
updated_at: 2,
resolved: false,
mention_count: 0,
reply_count: 0,
participants: [],
...overrides,
})
describe('CommentIcon', () => {
beforeEach(() => {
vi.clearAllMocks()
mockUserId = 'user-1'
})
it('toggles preview on hover when inactive', () => {
const comment = createComment()
const { container } = render(
<CommentIcon comment={comment} onClick={vi.fn()} isActive={false} />,
)
const marker = container.querySelector('[data-role="comment-marker"]') as HTMLElement
const hoverTarget = marker.firstElementChild as HTMLElement
fireEvent.mouseEnter(hoverTarget)
expect(screen.getByTestId('comment-preview')).toBeInTheDocument()
fireEvent.mouseLeave(hoverTarget)
expect(screen.queryByTestId('comment-preview')).not.toBeInTheDocument()
})
it('calls onPositionUpdate after dragging by author', () => {
const comment = createComment({ position_x: 0, position_y: 0 })
const onClick = vi.fn()
const onPositionUpdate = vi.fn()
const { container } = render(
<CommentIcon
comment={comment}
onClick={onClick}
onPositionUpdate={onPositionUpdate}
/>,
)
const marker = container.querySelector('[data-role="comment-marker"]') as HTMLElement
fireEvent.pointerDown(marker, {
pointerId: 1,
button: 0,
clientX: 100,
clientY: 100,
})
fireEvent.pointerMove(marker, {
pointerId: 1,
clientX: 110,
clientY: 110,
})
fireEvent.pointerUp(marker, {
pointerId: 1,
clientX: 110,
clientY: 110,
})
expect(mockScreenToFlowPosition).toHaveBeenCalledWith({ x: 10, y: 10 })
expect(onPositionUpdate).toHaveBeenCalledWith({ x: 10, y: 10 })
expect(onClick).not.toHaveBeenCalled()
})
it('calls onClick for non-author clicks', () => {
mockUserId = 'user-2'
const comment = createComment()
const onClick = vi.fn()
const { container } = render(
<CommentIcon comment={comment} onClick={onClick} isActive={false} />,
)
const marker = container.querySelector('[data-role="comment-marker"]') as HTMLElement
fireEvent.pointerDown(marker, {
pointerId: 1,
button: 0,
clientX: 50,
clientY: 60,
})
fireEvent.pointerUp(marker, {
pointerId: 1,
clientX: 50,
clientY: 60,
})
expect(onClick).toHaveBeenCalledTimes(1)
})
})

Some files were not shown because too many files have changed in this diff Show More