Compare commits

..

248 Commits

Author SHA1 Message Date
5874b920b2 fix: code owners 2025-11-28 14:36:30 +08:00
c51ab6ec37 fix: the consistency of the go-to-anything interaction (#28857) 2025-11-28 14:29:15 +08:00
1fc2255219 test: add comprehensive unit tests for EndUserService (#28840) 2025-11-28 14:22:19 +08:00
037389137d feat: complete test script of indexing runner (#28828)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-28 14:18:59 +08:00
8cd3e84c06 chore: bump dify plugin version in docker.middleware (#28847) 2025-11-28 13:55:13 +08:00
b3c6ac1430 chore: assign code owners to frontend and backend modules in CODEOWNERS (#28713) 2025-11-28 12:42:58 +08:00
68bb97919a feat: add comprehensive unit tests for MessageService (#28837)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-28 12:36:15 +08:00
f268d7c7be feat: complete test script of website crawl (#28826) 2025-11-28 12:34:27 +08:00
d695a79ba1 test: add comprehensive unit tests for DocumentIndexingTaskProxy (#28830)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-28 12:30:54 +08:00
cd5a745bd2 feat: complete test script of notion provider (#28833) 2025-11-28 12:30:45 +08:00
51e5f422c4 test: add comprehensive unit tests for VectorService and Vector classes (#28834)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-28 12:30:02 +08:00
ec3b2b40c2 test: add comprehensive unit tests for FeedbackService (#28771)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-28 11:33:56 +08:00
67ae3e9253 docker: use COPY --chown in api Dockerfile to avoid adding layers by explicit chown calls (#28756) 2025-11-28 11:33:06 +08:00
d38e3b7792 test: add unit tests for document service status management (#28804)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-28 11:25:36 +08:00
43d27edef2 feat: complete test script of embedding service (#28817)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-28 11:24:30 +08:00
94b87eac72 feat: add comprehensive unit tests for provider models (#28702)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-28 11:24:20 +08:00
yyh
fd31af6012 fix(ci): use dynamic branch name for i18n workflow to prevent race condition (#28823)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-28 11:23:28 +08:00
yyh
228deccec2 chore: update packageManager version in package.json to pnpm@10.24.0 (#28820) 2025-11-28 11:23:20 +08:00
639f1d31f7 feat: complete test script of text splitter (#28813)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-28 11:22:52 +08:00
ec786fe236 test: add unit tests for document service validation and configuration (#28810)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-28 11:21:45 +08:00
fe3a6ef049 feat: complete test script of reranker (#28806)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-28 11:21:35 +08:00
8b761319f6 Refactor workflow nodes to use generic node_data (#28782) 2025-11-27 20:46:56 +08:00
002d8769b0 chore: translate i18n files and update type definitions (#28784)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-27 20:28:17 +08:00
5aba111297 Feat zen mode (#28794) 2025-11-27 20:10:50 +08:00
dc9b3a7e03 refactor: rename VariableAssignerNodeData to VariableAggregatorNodeData (#28780) 2025-11-27 17:45:48 +08:00
5f2e0d6347 pref: reduce next step components reRender (#28783) 2025-11-27 17:12:00 +08:00
1f72571c06 edit analyze-component (#28781)
Co-authored-by: CodingOnStar <hanxujiang@dify.ai>
Co-authored-by: 姜涵煦 <hanxujiang@jianghanxudeMacBook-Pro.local>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 16:54:44 +08:00
820925a866 feat(workflow): workflow as tool output schema (#26241)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: Novice <novice12185727@gmail.com>
2025-11-27 16:50:48 +08:00
299bd351fd perf: reduce reRender in candidate node (#28776) 2025-11-27 15:57:36 +08:00
13bf6547ee Refactor: centralize node data hydration (#27771) 2025-11-27 15:41:56 +08:00
1b733abe82 feat: creates logs immediately when workflows start (not at completion) (#28701) 2025-11-27 15:22:33 +08:00
5782e26ab2 test: add unit tests for dataset service update/delete operations (#28757)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 15:01:43 +08:00
38d329e75a test: add unit tests for dataset permission service (#28760)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 15:00:55 +08:00
58f448a926 chore: remove outdated model config doc (#28765) 2025-11-27 14:40:06 +08:00
7a7fea40d9 feat: complete test script of dataset retrieval (#28762)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 14:39:33 +08:00
0309545ff1 Feat/test script of workflow service (#28726)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-27 11:23:55 +08:00
6deabfdad3 Use naive_utc_now in graph engine tests (#28735)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 11:23:20 +08:00
f9b4c31344 fix: MCP tool time configuration not work (#28740) 2025-11-27 11:22:49 +08:00
8d8800e632 upgrade docker compose milvus version to 2.6.0 to fix installation error (#26618)
Co-authored-by: crazywoola <427733928@qq.com>
2025-11-27 11:01:14 +08:00
4ca4493084 Add comprehensive unit tests for MetadataService (dataset metadata CRUD operations and filtering) (#28748)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 11:00:10 +08:00
7efa0df1fd Add comprehensive API/controller tests for dataset endpoints (list, create, update, delete, documents, segments, hit testing, external datasets) (#28750)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 10:59:17 +08:00
b786e101e5 fix: querying and setting the system default model (#28743) 2025-11-27 11:58:35 +09:00
09a8046b10 fix: querying webhook trigger issue (#28753) 2025-11-27 10:56:21 +08:00
2f6b3f1c5f hotfix: fix _extract_filename for rfc 5987 (#26230)
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-11-27 10:54:00 +08:00
2551f6f279 feat: add APP_DEFAULT_ACTIVE_REQUESTS as the default value for APP_AC… (#26930) 2025-11-27 10:51:48 +08:00
01afa56166 chore: enhance the test script of current billing service (#28747)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 10:37:24 +08:00
5815950092 add unit tests for iteration node (#28719)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 10:36:47 +08:00
766e16b26f add unit tests for code node (#28717) 2025-11-27 10:36:37 +08:00
0fdb4e7c12 chore: enhance the test script of conversation service (#28739)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 09:57:52 +08:00
64babb35e2 feat: Add comprehensive unit tests for DatasetCollectionBindingService (dataset collection binding methods) (#28724)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-27 09:55:42 +08:00
38522e5dfa fix: use default_factory for callable defaults in ORM dataclasses (#28730) 2025-11-27 09:39:49 +09:00
4ccc150fd1 test: add comprehensive unit tests for ExternalDatasetService (external knowledge API integration) (#28716)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-26 23:33:46 +08:00
a4c57017d5 add: badges (#28722) 2025-11-26 23:30:41 +08:00
b2a7cec644 add unit tests for template transform node (#28595)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-26 22:50:20 +08:00
ddc5cbe865 feat: complete test script of dataset service (#28710)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-26 22:48:08 +08:00
1e23957657 fix(ops): add streaming metrics and LLM span for agent-chat traces (#28320)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-26 22:45:20 +08:00
2731b04ff9 Pydantic models (#28697)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-26 22:44:14 +08:00
e8ca80a61a add unit tests for list operator node (#28597)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-26 22:43:30 +08:00
e76129b5a4 test: add comprehensive unit tests for HitTestingService Fix: #28667 (#28668)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-26 22:42:58 +08:00
6635ea62c2 fix: change existing node to a webhook node raise 404 (#28686) 2025-11-26 22:41:52 +08:00
6b8c649876 fix: prevent auto-scrolling from stopping in chat (#28690)
Signed-off-by: Yuichiro Utsumi <utsumi.yuichiro@fujitsu.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-26 22:39:29 +08:00
af587f3869 chore: update packageManager version to pnpm@10.23.0 (#28708) 2025-11-26 22:37:05 +08:00
1c1f124891 Enhanced GraphEngine Pause Handling (#28196)
This commit: 

1. Convert `pause_reason` to `pause_reasons` in `GraphExecution` and relevant classes. Change the field from a scalar value to a list that can contain multiple `PauseReason` objects, ensuring all pause events are properly captured.
2. Introduce a new `WorkflowPauseReason` model to record reasons associated with a specific `WorkflowPause`.

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-26 19:59:34 +08:00
b353a126d8 chore: bump version to 1.10.1 (#28696) 2025-11-26 18:32:10 +08:00
ef0e1031b0 pref: reduce the times of useNodes reRender (#28682)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-26 16:52:47 +08:00
d7010f582f Fix 500 error in knowledge base, select weightedScore and click retrieve. (#28586)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-26 16:44:00 +08:00
d696b9f35e Use pnpm dev in dev/start-web (#28684) 2025-11-26 16:24:01 +08:00
665d49d375 Fixes session scope bug in FileService.delete_file (#27911)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
2025-11-26 16:21:33 +08:00
26a1c84881 chore: upgrade system libraries and Python dependencies (#28624)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: Xiyuan Chen <52963600+GareArc@users.noreply.github.com>
2025-11-26 15:25:28 +08:00
dbecba710b frontend auto testing rules (#28679)
Co-authored-by: CodingOnStar <hanxujiang@dify.ai>
Co-authored-by: 姜涵煦 <hanxujiang@jianghanxudeMacBook-Pro.local>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-26 15:18:07 +08:00
591414307a fix: fixed workflow as tool files field return empty problem (#27925)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
2025-11-26 14:00:36 +08:00
1241cab113 chore: enhance the hint when the user triggers an invalid webhook request (#28671) 2025-11-26 14:00:16 +08:00
490b7ac43c fix: fix feedback like or dislike not display in logs (#28652) 2025-11-26 13:59:47 +08:00
0f521b26ae Feat/add test script for tool models (#28653) 2025-11-26 09:43:39 +08:00
e4ec4e1470 test: add comprehensive unit tests for SegmentService - Fix: #28656 (#28568)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-26 09:43:00 +08:00
203c2f0456 feat(web): update marketplace description & icon (#28662) 2025-11-26 09:42:09 +08:00
b502d30e77 fix(web): resolve readme-panel display and styling issues (#28658) 2025-11-26 02:21:50 +09:00
a486c47b1e fix: ensure advanced-chat workflows stop correctly (#27803)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-25 20:09:03 +08:00
f76a3f545c Feat/add weaviate tokenization configurable (#28159)
Co-authored-by: lijiezhao <lijiezhao@perfect99.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-25 20:07:45 +08:00
b5650b579d fix [Chore/Refactor] Generate complete API documentation using Flask-RESTX #24421 (#28649)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-25 20:04:27 +08:00
83702762c8 use no-root user in docker image by default (#26419) 2025-11-25 19:59:45 +08:00
abc13ef762 Feat/web workflow improvements (#27981)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: johnny0120 <johnny0120@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Wood <tuiskuwood@outlook.com>
2025-11-25 19:54:40 +08:00
ce00388278 fix(TriggerProviderIdentity): avoid nullable tags (#28646) 2025-11-25 19:37:06 +08:00
4a76318877 fix: draft run any nodes raise 500 (#28636) 2025-11-25 18:09:02 +08:00
yyh
e073e755f9 Fix start tab marketplace trigger search and plugin list scroll (#28645) 2025-11-25 18:08:46 +08:00
57b405c4c2 fix(style): update ExternalDataToolModal to support dark mode using semantic tokens (#28630) 2025-11-25 15:58:43 +08:00
2181ffdc89 fix: chatflow log details always navigate to page first (#28626) 2025-11-25 15:54:15 +08:00
yyh
82dac2eba0 chore: add missing translations (#28631) 2025-11-25 14:52:17 +08:00
yyh
58be008676 chore: refactor i18n scripts and remove extra keys (#28618) 2025-11-25 13:23:19 +08:00
Jax
eed38c8b2a Fix(workflow): Prevent token overcount caused by loop/iteration (#28406) 2025-11-25 09:56:59 +08:00
6bd114285c fix: i18n: exexutions translation (#28610)
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-11-25 09:39:17 +08:00
25698ccd54 Feat/test workflow models (#28604) 2025-11-25 09:38:27 +08:00
bb3aa0178d fix: update plugin verification logic to use unique identifier instea… (#28608) 2025-11-25 00:40:25 +08:00
751ce4ec41 more typed orm (#28577)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-24 21:01:46 +08:00
da98a38b14 fix: i18n: standardize trigger events terminology in billing translations (#28543)
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-24 21:01:32 +08:00
yyh
034e3e85e9 Fix Node.js SDK routes and multipart handling (#28573) 2025-11-24 21:00:40 +08:00
aab95d0626 fix: Failed to load API definition (#28509)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Asuka Minato <i@asukaminato.eu.org>
2025-11-24 21:44:09 +09:00
15ea27868e pref: workflow (#28591) 2025-11-24 17:02:18 +08:00
bcbd3de336 fix: i18n: stop running translation (#28571)
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-11-24 12:45:06 +08:00
ice
a0daab2711 feat(seo): add meaningful <h1> headings across all public pages (#28569)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-24 12:42:04 +08:00
e1d11681c0 fix: plugin auto update display issues (#28564) 2025-11-24 11:08:40 +08:00
8a995d0c21 chore: not using db.session.get (#28555)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-24 11:06:06 +08:00
6241b87f90 more typed orm (#28519)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-24 10:50:20 +08:00
2c9e435558 feat: complete app modesls test script (#28549)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-24 10:50:09 +08:00
b12057b7e5 fix: add COMPOSE_PROFILES param to middleware.env.example file (#28541)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-24 10:49:33 +08:00
yyh
2445d04d19 chore: fix de-DE translations (#28552) 2025-11-24 10:11:19 +08:00
a58986eb06 chore(deps): bump clickhouse-connect from 0.7.19 to 0.10.0 in /api (#28559)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-24 10:11:00 +08:00
a39b151d5f feat: add comprehensive unit tests for dataset service retrieval/list… (#28539)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-24 10:08:43 +08:00
3841e8578f fix: use default values for optional workflow input variables (#28546) (#28527)
Co-authored-by: Claude <noreply@anthropic.com>
2025-11-24 10:08:26 +08:00
e0824c2d93 api -> console_ns (#28246)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-24 10:04:11 +08:00
c75a4e6309 chore: translate i18n files and update type definitions (#28528)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: yyh <92089059+lyzno1@users.noreply.github.com>
2025-11-23 15:47:57 +08:00
1dfde240cb chore: translate i18n files and update type definitions (#28518)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-22 13:54:08 +08:00
c6e6f3b7cb feat: MLflow tracing (#26093)
Signed-off-by: B-Step62 <yuki.watanabe@databricks.com>
Co-authored-by: Asuka Minato <i@asukaminato.eu.org>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-22 13:53:58 +08:00
ea320ce055 feat: add comprehensive unit tests for dataset service creation methods (#28522)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-22 13:38:35 +08:00
6d3ed468d8 feat: add a stop run button to the published app UI (#27509)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-21 22:26:30 +08:00
a6c6bcf95c more typed orm (#28507) 2025-11-21 21:45:51 +08:00
63b8bbbab3 feat: complete test script for dataset models (#28512)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-21 21:37:25 +08:00
33ff01d04c Support node reference multiple structured_output variables in single-step run (#26661)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-21 17:54:57 +08:00
ae126fd56f Fix/24655 (#26527)
Co-authored-by: charles liu <dearcharles.liu@gmail.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-21 17:49:12 +08:00
9fed2dc065 fix: Code editor throws dozen of errors (#28500) 2025-11-21 16:48:35 +08:00
2e0964e0b0 fix(api): SegmentType.is_valid() raises AssertionError for SegmentType.GROUP (#28249)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-21 16:43:09 +09:00
237bb4595b more typed orm (#28494)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-21 16:42:27 +09:00
4486b54680 Clean up legacy conditions data in if-else nodes to prevent misjudgments (#28148)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-21 14:26:57 +08:00
1a2f8dfcb4 use deco (#28153)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-21 14:25:53 +08:00
3c30d0f41b more typed orm (#28331)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-21 14:23:32 +09:00
5f61ca5e6f feat: Implement partial update for document metadata, allowing merging of new values with existing ones. (#28390)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-21 12:58:20 +08:00
06466cb73a fix: fix numeric type conversion issue in if-else condition comparison (#28155)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-21 12:58:08 +08:00
c5b6219006 Feat/add test script for account models (#28479) 2025-11-21 12:54:50 +08:00
znn
ae5b5a6aa9 disable sticky scroll (#28248) 2025-11-21 11:24:26 +08:00
a4c4d18f42 fix(api): add session_id validation for webapp JWT authentication (#28297)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-21 11:23:52 +08:00
3cf19dc07f add two test examples (#28236)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-21 10:36:41 +08:00
73c58e4cbb chore: translate i18n files and update type definitions (#28478)
Co-authored-by: asukaminato0721 <30024051+asukaminato0721@users.noreply.github.com>
2025-11-21 10:35:04 +08:00
c2043d0f6d fix: allow API to access conversations created before upgrade to 1.10.0 (#28462)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-21 10:34:55 +08:00
cad2991946 feat: support redis 7.0 shared pub and sub (#28333) 2025-11-21 10:33:52 +08:00
e260815c5e fix: adjust overflow styles in EditMetadataBatchModal for better layout (#28445) 2025-11-21 10:30:52 +08:00
b4e7239ac7 fix: correct trigger events limit modal wording (#28463) 2025-11-21 03:23:08 +09:00
4b6f4081d6 fix: treat -1 as unlimited for API rate limit and trigger events (#28460) 2025-11-21 03:22:00 +09:00
d1c9183d3b fix: correct monitor and fix trigger billing rate limit (#28465) 2025-11-20 20:37:10 +08:00
2f9705eb6f refactor: remove TimeSliceLayer before the release of HITL (#28441)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-20 18:20:20 +08:00
0e3fab1f9f fix: add missing particle in Japanese trigger events translation (#28452) 2025-11-20 16:59:30 +08:00
2431ddfde6 Feat integrate partner stack (#28353)
Co-authored-by: Joel <iamjoel007@gmail.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 15:58:05 +08:00
1e4e963d8c chore: update celery command for debugging trigger (#28443) 2025-11-20 15:43:22 +08:00
522508df28 fix: add app_id to Redis cache keys for trigger nodes to ensure uniqueness (#28243)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 13:34:05 +08:00
859f73c19d fix: add .ts and .mjs to EditorConfig indent rules (#28397)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 13:27:00 +08:00
82c11e36ea fix: remove deprecated UnsafeUnwrappedHeaders usage (#28219)
Co-authored-by: Claude <noreply@anthropic.com>
2025-11-20 13:20:41 +08:00
a6cd2ad880 fix(web): remove StatusPanel's internal useStore to fix context issues (#28348) 2025-11-20 12:50:46 +08:00
b2a604b801 Add Comprehensive Unit Tests for Console Auth Controllers (#28349)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 12:50:16 +08:00
7c060fc35c fix: lazy init audioplayer to fix no tts message also switch audio source bug (#28433) 2025-11-20 12:48:11 +08:00
48e39b60a8 fix: update table alias in document service display status test asser… (#28436) 2025-11-20 12:47:45 +08:00
f038aa4746 fix: resolve CSRF token cookie name mismatch in browser (#28228) (#28378)
Co-authored-by: Claude <noreply@anthropic.com>
2025-11-20 11:40:35 +08:00
4833d39ab3 fix(workflow): validate node compatibility when importing dsl between chatflows and workflows (#28012) 2025-11-20 11:40:24 +08:00
fa910be0f6 Fix duration displayed for workflow steps on Weave dashboard (#28289) 2025-11-20 11:37:01 +08:00
bc274e7300 refactor(web): remove redundant dataset card-item components and related code (#28199) 2025-11-20 11:36:41 +08:00
7b1fc4d2e6 fix: add make test for short cut backend unittest (#28380)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
2025-11-20 11:33:42 +08:00
204d5f1bb9 chore: translate i18n files and update type definitions (#28429)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:32:55 +08:00
8fc1c7d994 chore: remove redundant reimports (#28415)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Asuka Minato <i@asukaminato.eu.org>
2025-11-20 11:28:29 +08:00
879869d3e3 fix(web): fix checkbox unselectable bug & optimize document-list/app-annotation styles (#28244) 2025-11-20 11:28:20 +08:00
1d2cdf3489 feat: add display status filtering to document list and API (#28342)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:27:44 +08:00
a5d0e68675 feat(workflow): optimize workflow canvas pan and scroll behavior (#28250)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:27:30 +08:00
605e543372 chore: translate i18n files and update type definitions (#28425)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:26:49 +08:00
c432f601ab fix: change TenantApi endpoint from GET to POST (#27858)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:22:37 +08:00
e8d03a422d fix: improve email code sign-in experience (#28307) 2025-11-20 11:19:15 +08:00
6be013e072 feat: implement RFC-compliant OAuth discovery with dynamic scope selection for MCP providers (#28294)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-20 11:18:16 +08:00
znn
014cbaf387 make expand/collapse in question classifier node (#26772)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: lyzno1 <yuanyouhuilyz@gmail.com>
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
2025-11-20 11:17:34 +08:00
1be38183e5 fix(frontend): add missing vertical type to divider in provider config modal (#28387)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-20 11:17:04 +08:00
ice
8bab42e224 style(web): fix vertical alignment of search button on apps page (#28398) 2025-11-20 11:14:09 +08:00
99e9fc751b refactor: refactor python sdk (#28118)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-20 11:10:53 +08:00
a1b735a4c0 feat: trigger billing (#28335)
Signed-off-by: lyzno1 <yuanyouhuilyz@gmail.com>
Co-authored-by: lyzno1 <yuanyouhuilyz@gmail.com>
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-20 10:15:23 +08:00
c0b7ffd5d0 feat:mysql adaptation for metadb (#28188) 2025-11-20 09:44:39 +08:00
012877d8d4 fix: address user input preparation in workflow app generator (#28410)
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
2025-11-20 02:09:40 +08:00
41bb6f3109 Revert "add vdb-test workflow run filter" (#28382)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-19 20:02:24 +08:00
88c9b18cb6 fix(docker): start-up TiFlash (#28376) 2025-11-19 13:59:56 +08:00
6efdc94661 refactor: consume events after pause/abort and improve API clarity (#28328)
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
2025-11-18 19:04:11 +08:00
68526c09fc chore: translate i18n files and update type definitions (#28284)
Co-authored-by: zhsama <33454514+zhsama@users.noreply.github.com>
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
2025-11-18 18:52:36 +08:00
a78bc507c0 fix: dataset metadata counts when documents are deleted (#28305)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-11-18 17:36:07 +08:00
e83c7438cb doc: add doc for env config when site and backend are in different domains (#28318)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-18 17:29:54 +08:00
82068a6918 add vdb-test workflow run filter (#28336) 2025-11-18 17:22:15 +08:00
108bcbeb7c add cnt script and one more example (#28272)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-18 16:44:14 +09:00
c4b02be6d3 fix: published webhook can't receive inputs (#28205) 2025-11-18 11:14:26 +08:00
30eebf804f chore: remove unused style.module.css from app-icon component (#28302) 2025-11-18 10:36:39 +08:00
ad7fdd18d0 fix: update currentTriggerPlugin check in BasePanel component (#28287) 2025-11-17 17:19:35 +08:00
5d2fbf5215 Perf/mutual node UI (#28282) 2025-11-17 16:23:04 +08:00
4a89403566 fix: click log panel of log page cause whole page crash (#28218) 2025-11-14 16:38:43 +09:00
e0c05b2123 add icon for forum (#28164) 2025-11-14 16:38:19 +09:00
85b99580ea fix: card view render (#28189) 2025-11-14 14:16:11 +08:00
15fbedfcad feat: add icon gallery stories (#28214)
Signed-off-by: lyzno1 <yuanyouhuilyz@gmail.com>
2025-11-14 13:34:23 +08:00
1e6d0de48b fix: knowledge pipeline can not published (#28203) 2025-11-14 09:47:37 +08:00
cad751c00c Upgrade weave version to fix weave configuration failure (#28197) 2025-11-14 09:47:21 +08:00
a47276ac24 chore: bump to 1.10.0 (#28186)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-13 22:36:04 +08:00
20403c69b2 refactor(web): remove redundant add-tool-modal components and related code (#27996) 2025-11-13 20:21:04 +08:00
ffc04f2a9b fix: StreamableHTTPTransport got invalid json exception when receive a ping event from mcp server #28111 (#28116) 2025-11-13 20:19:48 +08:00
d1580791e4 TypedBase + TypedDict (#28137)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-13 20:18:51 +08:00
c74eb4fcf3 minor fix(rag): return early when pushing empty tasks to avoid Redis DataError (#28027)
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-11-13 20:18:11 +08:00
a798534337 fix(web): fix unit promotion in formatNumberAbbreviated (#27918)
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-11-13 20:17:26 +08:00
470883858e fix: adjust padding in AgentNode and NodeComponent for consistent layout (#28175) 2025-11-13 20:16:56 +08:00
4f4911686d fix: update start-worker alias to include additional queues for bette… (#28179) 2025-11-13 20:16:44 +08:00
6d479dcdbb fix: update package manager version to 10.22.0 (#28181) 2025-11-13 20:16:00 +08:00
24348c40a6 feat: enhance start node metadata to be undeletable in chat mode (#28173) 2025-11-13 18:11:15 +08:00
a39b50adbb fix: skip tests if no database run (#28102)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:57:13 +08:00
81832c14ee Fix: Correctly handle merged cells in DOCX tables to prevent content duplication and loss (#27871)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:56:24 +08:00
b86022c64a feat: add draft trigger detection to app model and UI (#28163)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-13 15:43:58 +08:00
45e816a9f6 fix(knowledge-base): regenerate child chunks not working completely (#27934) 2025-11-13 15:36:27 +08:00
667b1c37a3 fix: can still invite when api is pending (#28161) 2025-11-13 15:28:32 +08:00
b75d533f9b fix(moderation): change OpenAI moderation model to omni-moderation-la… (#28119)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:21:44 +08:00
aece55d82f fix: fixed error when clear value of INTEGER and FLOAT type (#27954)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:21:34 +08:00
c432b398f4 fix: missing pipeline_templates.json when HOSTED_FETCH_PIPELINE_TEMPLATES_MODE is builtin (#27946)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-13 15:04:35 +08:00
9cb2645793 fix: update input field width for retry configuration in RetryOnPanel (#28142) 2025-11-13 15:00:22 +08:00
6ac61bd585 fix: correct spelling of "模板" in translation files (#28151) 2025-11-13 14:58:10 +08:00
b02165ffe6 fix: inconsistent behaviour of zoom in button and shortcut (#27944) 2025-11-13 14:37:27 +08:00
6c576e2c66 add doc (#28016)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-13 13:38:45 +09:00
b0e7e7752f refactor(web): reuse the same edit-custom-collection-modal component, and fix the pop up error (#28003) 2025-11-13 11:44:21 +08:00
2799b79e8c fix: app's ai site text to speech api (#28091) 2025-11-13 11:44:04 +08:00
805a1479f9 fix: simplify graph structure validation in WorkflowService (#28146)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-13 10:59:31 +08:00
fe6538b08d chore: disable workflow logs auto-cleanup by default (#28136)
This PR changes the default value of `WORKFLOW_LOG_CLEANUP_ENABLED` from `true` to `false` across all configuration files.

## Motivation

Setting the default to `false` provides safer default behavior by:

- Preventing unintended data loss for new installations
- Giving users explicit control over when to enable log cleanup
- Following the opt-in principle for data deletion features

Users who need automatic cleanup can enable it by setting `WORKFLOW_LOG_CLEANUP_ENABLED=true` in their configuration.
2025-11-12 22:55:02 +08:00
1bbb9d6644 convert to TypeBase (#27935)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-12 21:50:13 +08:00
5c06e285ec test: create some hooks and utils test script, modified clipboard test script (#27928) 2025-11-12 21:47:06 +08:00
19c92fd670 Add file type validation to paste upload (#28017) 2025-11-12 19:27:56 +08:00
6026bd873b fix: variable assigner can't assign float number (#28068) 2025-11-12 19:27:36 +08:00
1369119a0c fix: determine cpu cores determination in baseedpyright-check script on macos (#28058) 2025-11-12 19:27:27 +08:00
b76e17b25d feat: introduce trigger functionality (#27644)
Signed-off-by: lyzno1 <yuanyouhuilyz@gmail.com>
Co-authored-by: Stream <Stream_2@qq.com>
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
Co-authored-by: zhsama <torvalds@linux.do>
Co-authored-by: Harry <xh001x@hotmail.com>
Co-authored-by: lyzno1 <yuanyouhuilyz@gmail.com>
Co-authored-by: yessenia <yessenia.contact@gmail.com>
Co-authored-by: hjlarry <hjlarry@163.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: WTW0313 <twwu@dify.ai>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-12 17:59:37 +08:00
ca7794305b add transform-datasource-credentials command online check (#28124)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Garfield Dai <dai.hai@foxmail.com>
2025-11-12 17:13:44 +08:00
fd255e81e1 feat(api): Introduce WorkflowResumptionContext for pause state management (#28122)
Certain metadata (including but not limited to `InvokeFrom`, `call_depth`, and `streaming`)  is required when resuming a paused workflow. However, these fields are not part of `GraphRuntimeState` and were not saved in the previous
 implementation of  `PauseStatePersistenceLayer`.

This commit addresses this limitation by introducing a `WorkflowResumptionContext` model that wraps both the `*GenerateEntity` and `GraphRuntimeState`. This approach provides:

- A structured container for all necessary resumption data
- Better separation of concerns between execution state and persistence
- Enhanced extensibility for future metadata additions
- Clearer naming that distinguishes from `GraphRuntimeState`

The `WorkflowResumptionContext` model makes extending the pause state easier while maintaining backward compatibility and proper version management for the entire execution state ecosystem.

Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-12 17:00:02 +08:00
09d31d1263 chore: improve the user experience of not login into apps (#28120) 2025-11-12 16:47:45 +08:00
47dc26f011 fix document index test (#28113) 2025-11-12 16:00:10 +08:00
123bb3ec08 When graph_engine worker run exception, keep the node_id for deep res… (#26205)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
2025-11-12 15:03:45 +08:00
90f77282e3 chore: not SaaS version can query long log time range (#28109) 2025-11-12 14:45:56 +08:00
5208867ccc fix document enable (#28081) 2025-11-11 17:50:45 +08:00
edc7ccc795 chore: add type-check to pre-commit (#28005) 2025-11-11 16:14:39 +08:00
c9798f6425 fix(api): Trace Hierarchy, Span Status, and Broken Workflow for Arize & Phoenix Integration (#27937)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-11-11 11:49:19 +08:00
20ecf7f1d0 chore: remove unused enterprise bot from the readme (#28073)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-11 10:52:27 +08:00
9dcb780fcb chore: translate i18n files and update type definitions (#28054)
Co-authored-by: iamjoel <2120155+iamjoel@users.noreply.github.com>
2025-11-11 09:32:53 +08:00
1cb7b09933 chore: Remove trailing space from migration filename (#28040) 2025-11-11 09:32:42 +08:00
2c62a77cf4 Chore: change query log time range (#28052)
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-10 18:39:12 +08:00
b9bc48d8dd feat(api): Introduce Broadcast Channel (#27835)
This PR introduces a `BroadcastChannel` abstraction with broadcasting and at-most once delivery semantics, serving as the communication component between celery worker and API server.

It also includes a reference implementation backed by Redis PubSub.

Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-11-10 17:23:21 +08:00
ed234e311b fix workflow default updated_at (#28047) 2025-11-10 18:20:38 +09:00
9843fec393 fix: elasticsearch_vector version (#28028)
Co-authored-by: huangzhuo <huangzhuo1@xiaomi.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-10 13:17:13 +09:00
aa4cabdeb5 feat: Add Audio Content Support for MCP Tools (#27979) 2025-11-10 10:12:11 +08:00
eea713b668 Fix typo in weaviate comment, improve time test precision, and add security tests for get-icon utility (#27919)
Signed-off-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-10 10:11:54 +08:00
fc62538a94 chore(deps): bump scipy-stubs from 1.16.2.3 to 1.16.3.0 in /api (#28025)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-10 09:54:56 +08:00
7994144df7 add onupdate=func.current_timestamp() (#28014)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2025-11-10 01:48:52 +09:00
e153c483b6 fix: the model list encountered two children with the same key (#27956)
Co-authored-by: haokai <haokai@shuwen.com>
2025-11-09 21:39:59 +08:00
422bb4d4bb fix: fix https://github.com/langgenius/dify/issues/27939 (#27985) 2025-11-09 21:39:05 +08:00
87a80d7613 docs: clarify how to obtain workflow_id for version execution (#28007)
Signed-off-by: OneZero-Y <aukovyps@163.com>
2025-11-09 21:38:06 +08:00
e91105ca87 fix: bump brotli to 1.2.0 resloved CVE-2025-6176 (#27950)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-11-07 15:57:29 +08:00
37903722fe refactor: implement tenant self queue for rag tasks (#27559)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
2025-11-06 21:25:50 +08:00
f4c82d0010 fix(api): fix VariablePool.get adding unexpected keys to variable_dictionary (#26767)
Co-authored-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-11-06 18:30:35 +08:00
fe50093c18 fix: prevent fetch version info in enterprise edition (#27923) 2025-11-06 17:59:53 +08:00
4317af1e90 fix jina reader transform (#27922) 2025-11-06 17:35:53 +08:00
1980 changed files with 144103 additions and 43559 deletions

6
.cursorrules Normal file
View File

@ -0,0 +1,6 @@
# Cursor Rules for Dify Project
## Automated Test Generation
- Use `web/testing/testing.md` as the canonical instruction set for generating frontend automated tests.
- When proposing or saving tests, re-read that document and follow every requirement.

View File

@ -6,11 +6,10 @@ cd web && pnpm install
pipx install uv
echo "alias start-api=\"cd $WORKSPACE_ROOT/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug\"" >> ~/.bashrc
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage\"" >> ~/.bashrc
echo "alias start-worker=\"cd $WORKSPACE_ROOT/api && uv run python -m celery -A app.celery worker -P threads -c 1 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor\"" >> ~/.bashrc
echo "alias start-web=\"cd $WORKSPACE_ROOT/web && pnpm dev\"" >> ~/.bashrc
echo "alias start-web-prod=\"cd $WORKSPACE_ROOT/web && pnpm build && pnpm start\"" >> ~/.bashrc
echo "alias start-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d\"" >> ~/.bashrc
echo "alias stop-containers=\"cd $WORKSPACE_ROOT/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down\"" >> ~/.bashrc
source /home/vscode/.bashrc

View File

@ -29,7 +29,7 @@ trim_trailing_whitespace = false
# Matches multiple files with brace expansion notation
# Set default charset
[*.{js,tsx}]
[*.{js,jsx,ts,tsx,mjs}]
indent_style = space
indent_size = 2

226
.github/CODEOWNERS vendored Normal file
View File

@ -0,0 +1,226 @@
# CODEOWNERS
# This file defines code ownership for the Dify project.
# Each line is a file pattern followed by one or more owners.
# Owners can be @username, @org/team-name, or email addresses.
# For more information, see: https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners
* @crazywoola @laipz8200 @Yeuoly
# Backend (default owner, more specific rules below will override)
api/ @QuantumGhost
# Backend - Workflow - Engine (Core graph execution engine)
api/core/workflow/graph_engine/ @laipz8200 @QuantumGhost
api/core/workflow/runtime/ @laipz8200 @QuantumGhost
api/core/workflow/graph/ @laipz8200 @QuantumGhost
api/core/workflow/graph_events/ @laipz8200 @QuantumGhost
api/core/workflow/node_events/ @laipz8200 @QuantumGhost
api/core/model_runtime/ @laipz8200 @QuantumGhost
# Backend - Workflow - Nodes (Agent, Iteration, Loop, LLM)
api/core/workflow/nodes/agent/ Nov1c444
api/core/workflow/nodes/iteration/ Nov1c444
api/core/workflow/nodes/loop/ Nov1c444
api/core/workflow/nodes/llm/ Nov1c444
# Backend - RAG (Retrieval Augmented Generation)
api/core/rag/ @JohnJyong
api/services/rag_pipeline/ @JohnJyong
api/services/dataset_service.py @JohnJyong
api/services/knowledge_service.py @JohnJyong
api/services/external_knowledge_service.py @JohnJyong
api/services/hit_testing_service.py @JohnJyong
api/services/metadata_service.py @JohnJyong
api/services/vector_service.py @JohnJyong
api/services/entities/knowledge_entities/ @JohnJyong
api/services/entities/external_knowledge_entities/ @JohnJyong
api/controllers/console/datasets/ @JohnJyong
api/controllers/service_api/dataset/ @JohnJyong
api/models/dataset.py @JohnJyong
api/tasks/rag_pipeline/ @JohnJyong
api/tasks/add_document_to_index_task.py @JohnJyong
api/tasks/batch_clean_document_task.py @JohnJyong
api/tasks/clean_document_task.py @JohnJyong
api/tasks/clean_notion_document_task.py @JohnJyong
api/tasks/document_indexing_task.py @JohnJyong
api/tasks/document_indexing_sync_task.py @JohnJyong
api/tasks/document_indexing_update_task.py @JohnJyong
api/tasks/duplicate_document_indexing_task.py @JohnJyong
api/tasks/recover_document_indexing_task.py @JohnJyong
api/tasks/remove_document_from_index_task.py @JohnJyong
api/tasks/retry_document_indexing_task.py @JohnJyong
api/tasks/sync_website_document_indexing_task.py @JohnJyong
api/tasks/batch_create_segment_to_index_task.py @JohnJyong
api/tasks/create_segment_to_index_task.py @JohnJyong
api/tasks/delete_segment_from_index_task.py @JohnJyong
api/tasks/disable_segment_from_index_task.py @JohnJyong
api/tasks/disable_segments_from_index_task.py @JohnJyong
api/tasks/enable_segment_to_index_task.py @JohnJyong
api/tasks/enable_segments_to_index_task.py @JohnJyong
api/tasks/clean_dataset_task.py @JohnJyong
api/tasks/deal_dataset_index_update_task.py @JohnJyong
api/tasks/deal_dataset_vector_index_task.py @JohnJyong
# Backend - Plugins
api/core/plugin/ @Mairuis @Yeuoly @Stream29
api/services/plugin/ @Mairuis @Yeuoly @Stream29
api/controllers/console/workspace/plugin.py @Mairuis @Yeuoly @Stream29
api/controllers/inner_api/plugin/ @Mairuis @Yeuoly @Stream29
api/tasks/process_tenant_plugin_autoupgrade_check_task.py @Mairuis @Yeuoly @Stream29
# Backend - Trigger/Schedule/Webhook
api/controllers/trigger/ @Mairuis @Yeuoly
api/controllers/console/app/workflow_trigger.py @Mairuis @Yeuoly
api/controllers/console/workspace/trigger_providers.py @Mairuis @Yeuoly
api/core/trigger/ @Mairuis @Yeuoly
api/core/app/layers/trigger_post_layer.py @Mairuis @Yeuoly
api/services/trigger/ @Mairuis @Yeuoly
api/models/trigger.py @Mairuis @Yeuoly
api/fields/workflow_trigger_fields.py @Mairuis @Yeuoly
api/repositories/workflow_trigger_log_repository.py @Mairuis @Yeuoly
api/repositories/sqlalchemy_workflow_trigger_log_repository.py @Mairuis @Yeuoly
api/libs/schedule_utils.py @Mairuis @Yeuoly
api/services/workflow/scheduler.py @Mairuis @Yeuoly
api/schedule/trigger_provider_refresh_task.py @Mairuis @Yeuoly
api/schedule/workflow_schedule_task.py @Mairuis @Yeuoly
api/tasks/trigger_processing_tasks.py @Mairuis @Yeuoly
api/tasks/trigger_subscription_refresh_tasks.py @Mairuis @Yeuoly
api/tasks/workflow_schedule_tasks.py @Mairuis @Yeuoly
api/tasks/workflow_cfs_scheduler/ @Mairuis @Yeuoly
api/events/event_handlers/sync_plugin_trigger_when_app_created.py @Mairuis @Yeuoly
api/events/event_handlers/update_app_triggers_when_app_published_workflow_updated.py @Mairuis @Yeuoly
api/events/event_handlers/sync_workflow_schedule_when_app_published.py @Mairuis @Yeuoly
api/events/event_handlers/sync_webhook_when_app_created.py @Mairuis @Yeuoly
# Backend - Async Workflow
api/services/async_workflow_service.py @Mairuis @Yeuoly
api/tasks/async_workflow_tasks.py @Mairuis @Yeuoly
# Backend - Billing
api/services/billing_service.py @hj24 @zyssyz123
api/controllers/console/billing/ @hj24 @zyssyz123
# Backend - Enterprise
api/configs/enterprise/ @GarfieldDai @GareArc
api/services/enterprise/ @GarfieldDai @GareArc
api/services/feature_service.py @GarfieldDai @GareArc
api/controllers/console/feature.py @GarfieldDai @GareArc
api/controllers/web/feature.py @GarfieldDai @GareArc
# Backend - Database Migrations
api/migrations/ @snakevash @laipz8200
# Frontend
web/ @iamjoel
# Frontend - App - Orchestration
web/app/components/workflow/ @iamjoel @zxhlyh
web/app/components/workflow-app/ @iamjoel @zxhlyh
web/app/components/app/configuration/ @iamjoel @zxhlyh
web/app/components/app/app-publisher/ @iamjoel @zxhlyh
# Frontend - WebApp - Chat
web/app/components/base/chat/ @iamjoel @zxhlyh
# Frontend - WebApp - Completion
web/app/components/share/text-generation/ @iamjoel @zxhlyh
# Frontend - App - List and Creation
web/app/components/apps/ @JzoNgKVO @iamjoel
web/app/components/app/create-app-dialog/ @JzoNgKVO @iamjoel
web/app/components/app/create-app-modal/ @JzoNgKVO @iamjoel
web/app/components/app/create-from-dsl-modal/ @JzoNgKVO @iamjoel
# Frontend - App - API Documentation
web/app/components/develop/ @JzoNgKVO @iamjoel
# Frontend - App - Logs and Annotations
web/app/components/app/workflow-log/ @JzoNgKVO @iamjoel
web/app/components/app/log/ @JzoNgKVO @iamjoel
web/app/components/app/log-annotation/ @JzoNgKVO @iamjoel
web/app/components/app/annotation/ @JzoNgKVO @iamjoel
# Frontend - App - Monitoring
web/app/(commonLayout)/app/(appDetailLayout)/\[appId\]/overview/ @JzoNgKVO @iamjoel
web/app/components/app/overview/ @JzoNgKVO @iamjoel
# Frontend - App - Settings
web/app/components/app-sidebar/ @JzoNgKVO @iamjoel
# Frontend - RAG - Hit Testing
web/app/components/datasets/hit-testing/ @JzoNgKVO @iamjoel
# Frontend - RAG - List and Creation
web/app/components/datasets/list/ @iamjoel @WTW0313
web/app/components/datasets/create/ @iamjoel @WTW0313
web/app/components/datasets/create-from-pipeline/ @iamjoel @WTW0313
web/app/components/datasets/external-knowledge-base/ @iamjoel @WTW0313
# Frontend - RAG - Orchestration (general rule first, specific rules below override)
web/app/components/rag-pipeline/ @iamjoel @WTW0313
web/app/components/rag-pipeline/components/rag-pipeline-main.tsx @iamjoel @zxhlyh
web/app/components/rag-pipeline/store/ @iamjoel @zxhlyh
# Frontend - RAG - Documents List
web/app/components/datasets/documents/list.tsx @iamjoel @WTW0313
web/app/components/datasets/documents/create-from-pipeline/ @iamjoel @WTW0313
# Frontend - RAG - Segments List
web/app/components/datasets/documents/detail/ @iamjoel @WTW0313
# Frontend - RAG - Settings
web/app/components/datasets/settings/ @iamjoel @WTW0313
# Frontend - Ecosystem - Plugins
web/app/components/plugins/ @iamjoel @zhsama
# Frontend - Ecosystem - Tools
web/app/components/tools/ @iamjoel @Yessenia-d
# Frontend - Ecosystem - MarketPlace
web/app/components/plugins/marketplace/ @iamjoel @Yessenia-d
# Frontend - Login and Registration
web/app/signin/ @douxc @iamjoel
web/app/signup/ @douxc @iamjoel
web/app/reset-password/ @douxc @iamjoel
web/app/install/ @douxc @iamjoel
web/app/init/ @douxc @iamjoel
web/app/forgot-password/ @douxc @iamjoel
web/app/account/ @douxc @iamjoel
# Frontend - Service Authentication
web/service/base.ts @douxc @iamjoel
# Frontend - WebApp Authentication and Access Control
web/app/(shareLayout)/components/ @douxc @iamjoel
web/app/(shareLayout)/webapp-signin/ @douxc @iamjoel
web/app/(shareLayout)/webapp-reset-password/ @douxc @iamjoel
web/app/components/app/app-access-control/ @douxc @iamjoel
# Frontend - Explore Page
web/app/components/explore/ @CodingOnStar @iamjoel
# Frontend - Personal Settings
web/app/components/header/account-setting/ @CodingOnStar @iamjoel
web/app/components/header/account-dropdown/ @CodingOnStar @iamjoel
# Frontend - Analytics
web/app/components/base/ga/ @CodingOnStar @iamjoel
# Frontend - Base Components
web/app/components/base/ @iamjoel @zxhlyh
# Frontend - Utils and Hooks
web/utils/classnames.ts @iamjoel @zxhlyh
web/utils/time.ts @iamjoel @zxhlyh
web/utils/format.ts @iamjoel @zxhlyh
web/utils/clipboard.ts @iamjoel @zxhlyh
web/hooks/use-document-title.ts @iamjoel @zxhlyh
# Frontend - Billing and Education
web/app/components/billing/ @iamjoel @zxhlyh
web/app/education-apply/ @iamjoel @zxhlyh
# Frontend - Workspace
web/app/components/header/account-dropdown/workplace-selector/ @iamjoel @zxhlyh

12
.github/copilot-instructions.md vendored Normal file
View File

@ -0,0 +1,12 @@
# Copilot Instructions
GitHub Copilot must follow the unified frontend testing requirements documented in `web/testing/testing.md`.
Key reminders:
- Generate tests using the mandated tech stack, naming, and code style (AAA pattern, `fireEvent`, descriptive test names, cleans up mocks).
- Cover rendering, prop combinations, and edge cases by default; extend coverage for hooks, routing, async flows, and domain-specific components when applicable.
- Target >95% line and branch coverage and 100% function/statement coverage.
- Apply the project's mocking conventions for i18n, toast notifications, and Next.js utilities.
Any suggestions from Copilot that conflict with `web/testing/testing.md` should be revised before acceptance.

View File

@ -62,7 +62,7 @@ jobs:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db
db_postgres
redis
sandbox
ssrf_proxy

View File

@ -2,6 +2,8 @@ name: autofix.ci
on:
pull_request:
branches: ["main"]
push:
branches: ["main"]
permissions:
contents: read
@ -26,6 +28,11 @@ jobs:
# Format code
uv run ruff format ..
- name: count migration progress
run: |
cd api
./cnt_base.sh
- name: ast-grep
run: |
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all

View File

@ -8,7 +8,7 @@ concurrency:
cancel-in-progress: true
jobs:
db-migration-test:
db-migration-test-postgres:
runs-on: ubuntu-latest
steps:
@ -45,7 +45,7 @@ jobs:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db
db_postgres
redis
- name: Prepare configs
@ -57,3 +57,60 @@ jobs:
env:
DEBUG: true
run: uv run --directory api flask upgrade-db
db-migration-test-mysql:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Setup UV and Python
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
python-version: "3.12"
cache-dependency-glob: api/uv.lock
- name: Install dependencies
run: uv sync --project api
- name: Ensure Offline migration are supported
run: |
# upgrade
uv run --directory api flask db upgrade 'base:head' --sql
# downgrade
uv run --directory api flask db downgrade 'head:base' --sql
- name: Prepare middleware env for MySQL
run: |
cd docker
cp middleware.env.example middleware.env
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' middleware.env
sed -i 's/DB_HOST=db_postgres/DB_HOST=db_mysql/' middleware.env
sed -i 's/DB_PORT=5432/DB_PORT=3306/' middleware.env
sed -i 's/DB_USERNAME=postgres/DB_USERNAME=mysql/' middleware.env
- name: Set up Middlewares
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db_mysql
redis
- name: Prepare configs for MySQL
run: |
cd api
cp .env.example .env
sed -i 's/DB_TYPE=postgresql/DB_TYPE=mysql/' .env
sed -i 's/DB_PORT=5432/DB_PORT=3306/' .env
sed -i 's/DB_USERNAME=postgres/DB_USERNAME=root/' .env
- name: Run DB Migration
env:
DEBUG: true
run: uv run --directory api flask upgrade-db

View File

@ -20,22 +20,22 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Check for file changes in i18n/en-US
id: check_files
run: |
recent_commit_sha=$(git rev-parse HEAD)
second_recent_commit_sha=$(git rev-parse HEAD~1)
changed_files=$(git diff --name-only $recent_commit_sha $second_recent_commit_sha -- 'i18n/en-US/*.ts')
git fetch origin "${{ github.event.before }}" || true
git fetch origin "${{ github.sha }}" || true
changed_files=$(git diff --name-only "${{ github.event.before }}" "${{ github.sha }}" -- 'i18n/en-US/*.ts')
echo "Changed files: $changed_files"
if [ -n "$changed_files" ]; then
echo "FILES_CHANGED=true" >> $GITHUB_ENV
file_args=""
for file in $changed_files; do
filename=$(basename "$file" .ts)
file_args="$file_args --file=$filename"
file_args="$file_args --file $filename"
done
echo "FILE_ARGS=$file_args" >> $GITHUB_ENV
echo "File arguments: $file_args"
@ -77,12 +77,15 @@ jobs:
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Update i18n files and type definitions based on en-US changes
title: 'chore: translate i18n files and update type definitions'
commit-message: 'chore(i18n): update translations based on en-US changes'
title: 'chore(i18n): translate i18n files and update type definitions'
body: |
This PR was automatically created to update i18n files and TypeScript type definitions based on changes in en-US locale.
**Triggered by:** ${{ github.sha }}
**Changes included:**
- Updated translation files for all locales
- Regenerated TypeScript type definitions for type safety
branch: chore/automated-i18n-updates
branch: chore/automated-i18n-updates-${{ github.sha }}
delete-branch: true

View File

@ -51,13 +51,13 @@ jobs:
- name: Expose Service Ports
run: sh .github/workflows/expose_service_ports.sh
- name: Set up Vector Store (TiDB)
uses: hoverkraft-tech/compose-action@v2.0.2
with:
compose-file: docker/tidb/docker-compose.yaml
services: |
tidb
tiflash
# - name: Set up Vector Store (TiDB)
# uses: hoverkraft-tech/compose-action@v2.0.2
# with:
# compose-file: docker/tidb/docker-compose.yaml
# services: |
# tidb
# tiflash
- name: Set up Vector Stores (Weaviate, Qdrant, PGVector, Milvus, PgVecto-RS, Chroma, MyScale, ElasticSearch, Couchbase, OceanBase)
uses: hoverkraft-tech/compose-action@v2.0.2
@ -83,8 +83,8 @@ jobs:
ls -lah .
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
- name: Check VDB Ready (TiDB)
run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
# - name: Check VDB Ready (TiDB)
# run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
- name: Test Vector Stores
run: uv run --project api bash dev/pytest/pytest_vdb.sh

10
.gitignore vendored
View File

@ -6,6 +6,9 @@ __pycache__/
# C extensions
*.so
# *db files
*.db
# Distribution / packaging
.Python
build/
@ -183,6 +186,8 @@ docker/volumes/couchbase/*
docker/volumes/oceanbase/*
docker/volumes/plugin_daemon/*
docker/volumes/matrixone/*
docker/volumes/mysql/*
docker/volumes/seekdb/*
!docker/volumes/oceanbase/init.d
docker/nginx/conf.d/default.conf
@ -235,4 +240,7 @@ scripts/stress-test/reports/
# mcp
.playwright-mcp/
.serena/
.serena/
# settings
*.local.json

View File

@ -37,7 +37,7 @@
"-c",
"1",
"-Q",
"dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline",
"dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor",
"--loglevel",
"INFO"
],

View File

@ -0,0 +1,5 @@
# Windsurf Testing Rules
- Use `web/testing/testing.md` as the single source of truth for frontend automated testing.
- Honor every requirement in that document when generating or accepting tests.
- When proposing or saving tests, re-read that document and follow every requirement.

View File

@ -77,6 +77,8 @@ How we prioritize:
For setting up the frontend service, please refer to our comprehensive [guide](https://github.com/langgenius/dify/blob/main/web/README.md) in the `web/README.md` file. This document provides detailed instructions to help you set up the frontend environment properly.
**Testing**: All React components must have comprehensive test coverage. See [web/testing/testing.md](https://github.com/langgenius/dify/blob/main/web/testing/testing.md) for the canonical frontend testing guidelines and follow every requirement described there.
#### Backend
For setting up the backend service, kindly refer to our detailed [instructions](https://github.com/langgenius/dify/blob/main/api/README.md) in the `api/README.md` file. This document contains step-by-step guidance to help you get the backend up and running smoothly.

View File

@ -70,6 +70,11 @@ type-check:
@uv run --directory api --dev basedpyright
@echo "✅ Type check complete"
test:
@echo "🧪 Running backend unit tests..."
@uv run --project api --dev dev/pytest/pytest_unit_tests.sh
@echo "✅ Tests complete"
# Build Docker images
build-web:
@echo "Building web Docker image: $(WEB_IMAGE):$(VERSION)..."
@ -119,6 +124,7 @@ help:
@echo " make check - Check code with ruff"
@echo " make lint - Format and fix code with ruff"
@echo " make type-check - Run type checking with basedpyright"
@echo " make test - Run backend unit tests"
@echo ""
@echo "Docker Build Targets:"
@echo " make build-web - Build web Docker image"
@ -128,4 +134,4 @@ help:
@echo " make build-push-all - Build and push all Docker images"
# Phony targets
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check
.PHONY: build-web build-api push-web push-api build-all push-all build-push-all dev-setup prepare-docker prepare-web prepare-api dev-clean help format check lint type-check test

View File

@ -36,6 +36,12 @@
<img alt="Issues closed" src="https://img.shields.io/github/issues-search?query=repo%3Alanggenius%2Fdify%20is%3Aclosed&label=issues%20closed&labelColor=%20%237d89b0&color=%20%235d6b98"></a>
<a href="https://github.com/langgenius/dify/discussions/" target="_blank">
<img alt="Discussion posts" src="https://img.shields.io/github/discussions/langgenius/dify?labelColor=%20%239b8afb&color=%20%237a5af8"></a>
<a href="https://insights.linuxfoundation.org/project/langgenius-dify" target="_blank">
<img alt="LFX Health Score" src="https://insights.linuxfoundation.org/api/badge/health-score?project=langgenius-dify"></a>
<a href="https://insights.linuxfoundation.org/project/langgenius-dify" target="_blank">
<img alt="LFX Contributors" src="https://insights.linuxfoundation.org/api/badge/contributors?project=langgenius-dify"></a>
<a href="https://insights.linuxfoundation.org/project/langgenius-dify" target="_blank">
<img alt="LFX Active Contributors" src="https://insights.linuxfoundation.org/api/badge/active-contributors?project=langgenius-dify"></a>
</p>
<p align="center">
@ -117,7 +123,7 @@ All of Dify's offerings come with corresponding APIs, so you could effortlessly
Use our [documentation](https://docs.dify.ai) for further references and more in-depth instructions.
- **Dify for enterprise / organizations<br/>**
We provide additional enterprise-centric features. [Log your questions for us through this chatbot](https://udify.app/chat/22L1zSxg6yW1cWQg) or [send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss enterprise needs. <br/>
We provide additional enterprise-centric features. [Send us an email](mailto:business@dify.ai?subject=%5BGitHub%5DBusiness%20License%20Inquiry) to discuss your enterprise needs. <br/>
> For startups and small businesses using AWS, check out [Dify Premium on AWS Marketplace](https://aws.amazon.com/marketplace/pp/prodview-t22mebxzwjhu6) and deploy it to your own AWS VPC with one click. It's an affordable AMI offering with the option to create apps with custom logo and branding.

View File

@ -27,12 +27,12 @@ FILES_URL=http://localhost:5001
# Example: INTERNAL_FILES_URL=http://api:5001
INTERNAL_FILES_URL=http://127.0.0.1:5001
# TRIGGER URL
TRIGGER_URL=http://localhost:5001
# The time in seconds after the signature is rejected
FILES_ACCESS_TIMEOUT=300
# Collaboration mode toggle
ENABLE_COLLABORATION_MODE=false
# Access token expiration time in minutes
ACCESS_TOKEN_EXPIRE_MINUTES=60
@ -72,12 +72,15 @@ REDIS_CLUSTERS_PASSWORD=
# celery configuration
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
CELERY_BACKEND=redis
# PostgreSQL database configuration
# Database configuration
DB_TYPE=postgresql
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
DB_HOST=localhost
DB_PORT=5432
DB_DATABASE=dify
SQLALCHEMY_POOL_PRE_PING=true
SQLALCHEMY_POOL_TIMEOUT=30
@ -159,12 +162,11 @@ SUPABASE_URL=your-server-url
# CORS configuration
WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
# Set COOKIE_DOMAIN when the console frontend and API are on different subdomains.
# Provide the registrable domain (e.g. example.com); leading dots are optional.
# When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the sites top-level domain (e.g., `example.com`). Leading dots are optional.
COOKIE_DOMAIN=
# Vector database configuration
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
# Supported values are `weaviate`, `oceanbase`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
VECTOR_STORE=weaviate
# Prefix used to create collection name in vector database
VECTOR_INDEX_NAME_PREFIX=Vector_index
@ -174,6 +176,18 @@ WEAVIATE_ENDPOINT=http://localhost:8080
WEAVIATE_API_KEY=WVF5YThaHlkYwhGUSmCRgsX3tD5ngdN8pkih
WEAVIATE_GRPC_ENABLED=false
WEAVIATE_BATCH_SIZE=100
WEAVIATE_TOKENIZATION=word
# OceanBase Vector configuration
OCEANBASE_VECTOR_HOST=127.0.0.1
OCEANBASE_VECTOR_PORT=2881
OCEANBASE_VECTOR_USER=root@test
OCEANBASE_VECTOR_PASSWORD=difyai123456
OCEANBASE_VECTOR_DATABASE=test
OCEANBASE_MEMORY_LIMIT=6G
OCEANBASE_ENABLE_HYBRID_SEARCH=false
OCEANBASE_FULLTEXT_PARSER=ik
SEEKDB_MEMORY_LIMIT=2G
# Qdrant configuration, use `http://localhost:6333` for local mode or `https://your-qdrant-cluster-url.qdrant.io` for remote mode
QDRANT_URL=http://localhost:6333
@ -340,15 +354,6 @@ LINDORM_PASSWORD=admin
LINDORM_USING_UGC=True
LINDORM_QUERY_TIMEOUT=1
# OceanBase Vector configuration
OCEANBASE_VECTOR_HOST=127.0.0.1
OCEANBASE_VECTOR_PORT=2881
OCEANBASE_VECTOR_USER=root@test
OCEANBASE_VECTOR_PASSWORD=difyai123456
OCEANBASE_VECTOR_DATABASE=test
OCEANBASE_MEMORY_LIMIT=6G
OCEANBASE_ENABLE_HYBRID_SEARCH=false
# AlibabaCloud MySQL Vector configuration
ALIBABACLOUD_MYSQL_HOST=127.0.0.1
ALIBABACLOUD_MYSQL_PORT=3306
@ -469,6 +474,9 @@ HTTP_REQUEST_NODE_MAX_BINARY_SIZE=10485760
HTTP_REQUEST_NODE_MAX_TEXT_SIZE=1048576
HTTP_REQUEST_NODE_SSL_VERIFY=True
# Webhook request configuration
WEBHOOK_REQUEST_BODY_MAX_SIZE=10485760
# Respect X-* headers to redirect clients
RESPECT_XFORWARD_HEADERS_ENABLED=false
@ -524,7 +532,7 @@ API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node
API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository
# Workflow log cleanup configuration
# Enable automatic cleanup of workflow run logs to manage database size
WORKFLOW_LOG_CLEANUP_ENABLED=true
WORKFLOW_LOG_CLEANUP_ENABLED=false
# Number of days to retain workflow run logs (default: 30 days)
WORKFLOW_LOG_RETENTION_DAYS=30
# Batch size for workflow log cleanup operations (default: 100)
@ -532,6 +540,7 @@ WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
# App configuration
APP_MAX_EXECUTION_TIME=1200
APP_DEFAULT_ACTIVE_REQUESTS=0
APP_MAX_ACTIVE_REQUESTS=0
# Celery beat configuration
@ -546,6 +555,12 @@ ENABLE_CLEAN_MESSAGES=false
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
ENABLE_DATASETS_QUEUE_MONITOR=false
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK=true
# Interval time in minutes for polling scheduled workflows(default: 1 min)
WORKFLOW_SCHEDULE_POLLER_INTERVAL=1
WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE=100
# Maximum number of scheduled workflows to dispatch per tick (0 for unlimited)
WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK=0
# Position configuration
POSITION_TOOL_PINS=
@ -618,5 +633,8 @@ SWAGGER_UI_PATH=/swagger-ui.html
# Set to false to export dataset IDs as plain text for easier cross-environment import
DSL_EXPORT_ENCRYPT_DATASET_ID=true
# Tenant isolated task queue configuration
TENANT_ISOLATED_TASK_CONCURRENCY=1
# Maximum number of segments for dataset segments API (0 for unlimited)
DATASET_MAX_SEGMENTS_PER_REQUEST=0

View File

@ -16,6 +16,7 @@ layers =
graph
nodes
node_events
runtime
entities
containers =
core.workflow

View File

@ -54,7 +54,7 @@
"--loglevel",
"DEBUG",
"-Q",
"dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline"
"dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor"
]
}
]

62
api/AGENTS.md Normal file
View File

@ -0,0 +1,62 @@
# Agent Skill Index
Start with the section that best matches your need. Each entry lists the problems it solves plus key files/concepts so you know what to expect before opening it.
______________________________________________________________________
## Platform Foundations
- **[Infrastructure Overview](agent_skills/infra.md)**\
When to read this:
- You need to understand where a feature belongs in the architecture.
- Youre wiring storage, Redis, vector stores, or OTEL.
- Youre about to add CLI commands or async jobs.\
What it covers: configuration stack (`configs/app_config.py`, remote settings), storage entry points (`extensions/ext_storage.py`, `core/file/file_manager.py`), Redis conventions (`extensions/ext_redis.py`), plugin runtime topology, vector-store factory (`core/rag/datasource/vdb/*`), observability hooks, SSRF proxy usage, and core CLI commands.
- **[Coding Style](agent_skills/coding_style.md)**\
When to read this:
- Youre writing or reviewing backend code and need the authoritative checklist.
- Youre unsure about Pydantic validators, SQLAlchemy session usage, or logging patterns.
- You want the exact lint/type/test commands used in PRs.\
Includes: Ruff & BasedPyright commands, no-annotation policy, session examples (`with Session(db.engine, ...)`), `@field_validator` usage, logging expectations, and the rule set for file size, helpers, and package management.
______________________________________________________________________
## Plugin & Extension Development
- **[Plugin Systems](agent_skills/plugin.md)**\
When to read this:
- Youre building or debugging a marketplace plugin.
- You need to know how manifests, providers, daemons, and migrations fit together.\
What it covers: plugin manifests (`core/plugin/entities/plugin.py`), installation/upgrade flows (`services/plugin/plugin_service.py`, CLI commands), runtime adapters (`core/plugin/impl/*` for tool/model/datasource/trigger/endpoint/agent), daemon coordination (`core/plugin/entities/plugin_daemon.py`), and how provider registries surface capabilities to the rest of the platform.
- **[Plugin OAuth](agent_skills/plugin_oauth.md)**\
When to read this:
- You must integrate OAuth for a plugin or datasource.
- Youre handling credential encryption or refresh flows.\
Topics: credential storage, encryption helpers (`core/helper/provider_encryption.py`), OAuth client bootstrap (`services/plugin/oauth_service.py`, `services/plugin/plugin_parameter_service.py`), and how console/API layers expose the flows.
______________________________________________________________________
## Workflow Entry & Execution
- **[Trigger Concepts](agent_skills/trigger.md)**\
When to read this:
- Youre debugging why a workflow didnt start.
- Youre adding a new trigger type or hook.
- You need to trace async execution, draft debugging, or webhook/schedule pipelines.\
Details: Start-node taxonomy, webhook & schedule internals (`core/workflow/nodes/trigger_*`, `services/trigger/*`), async orchestration (`services/async_workflow_service.py`, Celery queues), debug event bus, and storage/logging interactions.
______________________________________________________________________
## Additional Notes for Agents
- All skill docs assume you follow the coding style guide—run Ruff/BasedPyright/tests listed there before submitting changes.
- When you cannot find an answer in these briefs, search the codebase using the paths referenced (e.g., `core/plugin/impl/tool.py`, `services/dataset_service.py`).
- If you run into cross-cutting concerns (tenancy, configuration, storage), check the infrastructure guide first; it links to most supporting modules.
- Keep multi-tenancy and configuration central: everything flows through `configs.dify_config` and `tenant_id`.
- When touching plugins or triggers, consult both the system overview and the specialised doc to ensure you adjust lifecycle, storage, and observability consistently.

View File

@ -48,6 +48,12 @@ ENV PYTHONIOENCODING=utf-8
WORKDIR /app/api
# Create non-root user
ARG dify_uid=1001
RUN groupadd -r -g ${dify_uid} dify && \
useradd -r -u ${dify_uid} -g ${dify_uid} -s /bin/bash dify && \
chown -R dify:dify /app
RUN \
apt-get update \
# Install dependencies
@ -57,7 +63,7 @@ RUN \
# for gmpy2 \
libgmp-dev libmpfr-dev libmpc-dev \
# For Security
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
expat libldap-2.5-0=2.5.13+dfsg-5 perl libsqlite3-0=3.40.1-2+deb12u2 zlib1g=1:1.2.13.dfsg-1 \
# install fonts to support the use of tools like pypdfium2
fonts-noto-cjk \
# install a package to improve the accuracy of guessing mime type and file extension
@ -69,24 +75,29 @@ RUN \
# Copy Python environment and packages
ENV VIRTUAL_ENV=/app/api/.venv
COPY --from=packages ${VIRTUAL_ENV} ${VIRTUAL_ENV}
COPY --from=packages --chown=dify:dify ${VIRTUAL_ENV} ${VIRTUAL_ENV}
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
# Download nltk data
RUN python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger')"
RUN mkdir -p /usr/local/share/nltk_data && NLTK_DATA=/usr/local/share/nltk_data python -c "import nltk; nltk.download('punkt'); nltk.download('averaged_perceptron_tagger'); nltk.download('stopwords')" \
&& chmod -R 755 /usr/local/share/nltk_data
ENV TIKTOKEN_CACHE_DIR=/app/api/.tiktoken_cache
RUN python -c "import tiktoken; tiktoken.encoding_for_model('gpt2')"
RUN python -c "import tiktoken; tiktoken.encoding_for_model('gpt2')" \
&& chown -R dify:dify ${TIKTOKEN_CACHE_DIR}
# Copy source code
COPY . /app/api/
COPY --chown=dify:dify . /app/api/
# Prepare entrypoint script
COPY --chown=dify:dify --chmod=755 docker/entrypoint.sh /entrypoint.sh
# Copy entrypoint
COPY docker/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
ARG COMMIT_SHA
ENV COMMIT_SHA=${COMMIT_SHA}
ENV NLTK_DATA=/usr/local/share/nltk_data
USER dify
ENTRYPOINT ["/bin/bash", "/entrypoint.sh"]

View File

@ -15,8 +15,8 @@
```bash
cd ../docker
cp middleware.env.example middleware.env
# change the profile to other vector database if you are not using weaviate
docker compose -f docker-compose.middleware.yaml --profile weaviate -p dify up -d
# change the profile to mysql if you are not using postgres,change the profile to other vector database if you are not using weaviate
docker compose -f docker-compose.middleware.yaml --profile postgresql --profile weaviate -p dify up -d
cd ../api
```
@ -26,6 +26,10 @@
cp .env.example .env
```
> [!IMPORTANT]
>
> When the frontend and backend run on different subdomains, set COOKIE_DOMAIN to the sites top-level domain (e.g., `example.com`). The frontend and backend must be under the same top-level domain in order to share authentication cookies.
1. Generate a `SECRET_KEY` in the `.env` file.
bash for Linux
@ -80,7 +84,7 @@
1. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
```bash
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,priority_pipeline,pipeline
uv run celery -A app.celery worker -P threads -c 2 --loglevel INFO -Q dataset,priority_dataset,priority_pipeline,pipeline,mail,ops_trace,app_deletion,plugin,workflow_storage,conversation,workflow,schedule_poller,schedule_executor,triggered_workflow_dispatcher,trigger_refresh_executor
```
Additionally, if you want to debug the celery scheduled tasks, you can run the following command in another terminal to start the beat service:

View File

@ -0,0 +1,115 @@
## Linter
- Always follow `.ruff.toml`.
- Run `uv run ruff check --fix --unsafe-fixes`.
- Keep each line under 100 characters (including spaces).
## Code Style
- `snake_case` for variables and functions.
- `PascalCase` for classes.
- `UPPER_CASE` for constants.
## Rules
- Use Pydantic v2 standard.
- Use `uv` for package management.
- Do not override dunder methods like `__init__`, `__iadd__`, etc.
- Never launch services (`uv run app.py`, `flask run`, etc.); running tests under `tests/` is allowed.
- Prefer simple functions over classes for lightweight helpers.
- Keep files below 800 lines; split when necessary.
- Keep code readable—no clever hacks.
- Never use `print`; log with `logger = logging.getLogger(__name__)`.
## Guiding Principles
- Mirror the projects layered architecture: controller → service → core/domain.
- Reuse existing helpers in `core/`, `services/`, and `libs/` before creating new abstractions.
- Optimise for observability: deterministic control flow, clear logging, actionable errors.
## SQLAlchemy Patterns
- Models inherit from `models.base.Base`; never create ad-hoc metadata or engines.
- Open sessions with context managers:
```python
from sqlalchemy.orm import Session
with Session(db.engine, expire_on_commit=False) as session:
stmt = select(Workflow).where(
Workflow.id == workflow_id,
Workflow.tenant_id == tenant_id,
)
workflow = session.execute(stmt).scalar_one_or_none()
```
- Use SQLAlchemy expressions; avoid raw SQL unless necessary.
- Introduce repository abstractions only for very large tables (e.g., workflow executions) to support alternative storage strategies.
- Always scope queries by `tenant_id` and protect write paths with safeguards (`FOR UPDATE`, row counts, etc.).
## Storage & External IO
- Access storage via `extensions.ext_storage.storage`.
- Use `core.helper.ssrf_proxy` for outbound HTTP fetches.
- Background tasks that touch storage must be idempotent and log the relevant object identifiers.
## Pydantic Usage
- Define DTOs with Pydantic v2 models and forbid extras by default.
- Use `@field_validator` / `@model_validator` for domain rules.
- Example:
```python
from pydantic import BaseModel, ConfigDict, HttpUrl, field_validator
class TriggerConfig(BaseModel):
endpoint: HttpUrl
secret: str
model_config = ConfigDict(extra="forbid")
@field_validator("secret")
def ensure_secret_prefix(cls, value: str) -> str:
if not value.startswith("dify_"):
raise ValueError("secret must start with dify_")
return value
```
## Generics & Protocols
- Use `typing.Protocol` to define behavioural contracts (e.g., cache interfaces).
- Apply generics (`TypeVar`, `Generic`) for reusable utilities like caches or providers.
- Validate dynamic inputs at runtime when generics cannot enforce safety alone.
## Error Handling & Logging
- Raise domain-specific exceptions (`services/errors`, `core/errors`) and translate to HTTP responses in controllers.
- Declare `logger = logging.getLogger(__name__)` at module top.
- Include tenant/app/workflow identifiers in log context.
- Log retryable events at `warning`, terminal failures at `error`.
## Tooling & Checks
- Format/lint: `uv run --project api --dev ruff format ./api` and `uv run --project api --dev ruff check --fix --unsafe-fixes ./api`.
- Type checks: `uv run --directory api --dev basedpyright`.
- Tests: `uv run --project api --dev dev/pytest/pytest_unit_tests.sh`.
- Run all of the above before submitting your work.
## Controllers & Services
- Controllers: parse input via Pydantic, invoke services, return serialised responses; no business logic.
- Services: coordinate repositories, providers, background tasks; keep side effects explicit.
- Avoid repositories unless necessary; direct SQLAlchemy usage is preferred for typical tables.
- Document non-obvious behaviour with concise comments.
## Miscellaneous
- Use `configs.dify_config` for configuration—never read environment variables directly.
- Maintain tenant awareness end-to-end; `tenant_id` must flow through every layer touching shared resources.
- Queue async work through `services/async_workflow_service`; implement tasks under `tasks/` with explicit queue selection.
- Keep experimental scripts under `dev/`; do not ship them in production builds.

96
api/agent_skills/infra.md Normal file
View File

@ -0,0 +1,96 @@
## Configuration
- Import `configs.dify_config` for every runtime toggle. Do not read environment variables directly.
- Add new settings to the proper mixin inside `configs/` (deployment, feature, middleware, etc.) so they load through `DifyConfig`.
- Remote overrides come from the optional providers in `configs/remote_settings_sources`; keep defaults in code safe when the value is missing.
- Example: logging pulls targets from `extensions/ext_logging.py`, and model provider URLs are assembled in `services/entities/model_provider_entities.py`.
## Dependencies
- Runtime dependencies live in `[project].dependencies` inside `pyproject.toml`. Optional clients go into the `storage`, `tools`, or `vdb` groups under `[dependency-groups]`.
- Always pin versions and keep the list alphabetised. Shared tooling (lint, typing, pytest) belongs in the `dev` group.
- When code needs a new package, explain why in the PR and run `uv lock` so the lockfile stays current.
## Storage & Files
- Use `extensions.ext_storage.storage` for all blob IO; it already respects the configured backend.
- Convert files for workflows with helpers in `core/file/file_manager.py`; they handle signed URLs and multimodal payloads.
- When writing controller logic, delegate upload quotas and metadata to `services/file_service.py` instead of touching storage directly.
- All outbound HTTP fetches (webhooks, remote files) must go through the SSRF-safe client in `core/helper/ssrf_proxy.py`; it wraps `httpx` with the allow/deny rules configured for the platform.
## Redis & Shared State
- Access Redis through `extensions.ext_redis.redis_client`. For locking, reuse `redis_client.lock`.
- Prefer higher-level helpers when available: rate limits use `libs.helper.RateLimiter`, provider metadata uses caches in `core/helper/provider_cache.py`.
## Models
- SQLAlchemy models sit in `models/` and inherit from the shared declarative `Base` defined in `models/base.py` (metadata configured via `models/engine.py`).
- `models/__init__.py` exposes grouped aggregates: account/tenant models, app and conversation tables, datasets, providers, workflow runs, triggers, etc. Import from there to avoid deep path churn.
- Follow the DDD boundary: persistence objects live in `models/`, repositories under `repositories/` translate them into domain entities, and services consume those repositories.
- When adding a table, create the model class, register it in `models/__init__.py`, wire a repository if needed, and generate an Alembic migration as described below.
## Vector Stores
- Vector client implementations live in `core/rag/datasource/vdb/<provider>`, with a common factory in `core/rag/datasource/vdb/vector_factory.py` and enums in `core/rag/datasource/vdb/vector_type.py`.
- Retrieval pipelines call these providers through `core/rag/datasource/retrieval_service.py` and dataset ingestion flows in `services/dataset_service.py`.
- The CLI helper `flask vdb-migrate` orchestrates bulk migrations using routines in `commands.py`; reuse that pattern when adding new backend transitions.
- To add another store, mirror the provider layout, register it with the factory, and include any schema changes in Alembic migrations.
## Observability & OTEL
- OpenTelemetry settings live under the observability mixin in `configs/observability`. Toggle exporters and sampling via `dify_config`, not ad-hoc env reads.
- HTTP, Celery, Redis, SQLAlchemy, and httpx instrumentation is initialised in `extensions/ext_app_metrics.py` and `extensions/ext_request_logging.py`; reuse these hooks when adding new workers or entrypoints.
- When creating background tasks or external calls, propagate tracing context with helpers in the existing instrumented clients (e.g. use the shared `httpx` session from `core/helper/http_client_pooling.py`).
- If you add a new external integration, ensure spans and metrics are emitted by wiring the appropriate OTEL instrumentation package in `pyproject.toml` and configuring it in `extensions/`.
## Ops Integrations
- Langfuse support and other tracing bridges live under `core/ops/opik_trace`. Config toggles sit in `configs/observability`, while exporters are initialised in the OTEL extensions mentioned above.
- External monitoring services should follow this pattern: keep client code in `core/ops`, expose switches via `dify_config`, and hook initialisation in `extensions/ext_app_metrics.py` or sibling modules.
- Before instrumenting new code paths, check whether existing context helpers (e.g. `extensions/ext_request_logging.py`) already capture the necessary metadata.
## Controllers, Services, Core
- Controllers only parse HTTP input and call a service method. Keep business rules in `services/`.
- Services enforce tenant rules, quotas, and orchestration, then call into `core/` engines (workflow execution, tools, LLMs).
- When adding a new endpoint, search for an existing service to extend before introducing a new layer. Example: workflow APIs pipe through `services/workflow_service.py` into `core/workflow`.
## Plugins, Tools, Providers
- In Dify a plugin is a tenant-installable bundle that declares one or more providers (tool, model, datasource, trigger, endpoint, agent strategy) plus its resource needs and version metadata. The manifest (`core/plugin/entities/plugin.py`) mirrors what you see in the marketplace documentation.
- Installation, upgrades, and migrations are orchestrated by `services/plugin/plugin_service.py` together with helpers such as `services/plugin/plugin_migration.py`.
- Runtime loading happens through the implementations under `core/plugin/impl/*` (tool/model/datasource/trigger/endpoint/agent). These modules normalise plugin providers so that downstream systems (`core/tools/tool_manager.py`, `services/model_provider_service.py`, `services/trigger/*`) can treat builtin and plugin capabilities the same way.
- For remote execution, plugin daemons (`core/plugin/entities/plugin_daemon.py`, `core/plugin/impl/plugin.py`) manage lifecycle hooks, credential forwarding, and background workers that keep plugin processes in sync with the main application.
- Acquire tool implementations through `core/tools/tool_manager.py`; it resolves builtin, plugin, and workflow-as-tool providers uniformly, injecting the right context (tenant, credentials, runtime config).
- To add a new plugin capability, extend the relevant `core/plugin/entities` schema and register the implementation in the matching `core/plugin/impl` module rather than importing the provider directly.
## Async Workloads
see `agent_skills/trigger.md` for more detailed documentation.
- Enqueue background work through `services/async_workflow_service.py`. It routes jobs to the tiered Celery queues defined in `tasks/`.
- Workers boot from `celery_entrypoint.py` and execute functions in `tasks/workflow_execution_tasks.py`, `tasks/trigger_processing_tasks.py`, etc.
- Scheduled workflows poll from `schedule/workflow_schedule_tasks.py`. Follow the same pattern if you need new periodic jobs.
## Database & Migrations
- SQLAlchemy models live under `models/` and map directly to migration files in `migrations/versions`.
- Generate migrations with `uv run --project api flask db revision --autogenerate -m "<summary>"`, then review the diff; never hand-edit the database outside Alembic.
- Apply migrations locally using `uv run --project api flask db upgrade`; production deploys expect the same history.
- If you add tenant-scoped data, confirm the upgrade includes tenant filters or defaults consistent with the service logic touching those tables.
## CLI Commands
- Maintenance commands from `commands.py` are registered on the Flask CLI. Run them via `uv run --project api flask <command>`.
- Use the built-in `db` commands from Flask-Migrate for schema operations (`flask db upgrade`, `flask db stamp`, etc.). Only fall back to custom helpers if you need their extra behaviour.
- Custom entries such as `flask reset-password`, `flask reset-email`, and `flask vdb-migrate` handle self-hosted account recovery and vector database migrations.
- Before adding a new command, check whether an existing service can be reused and ensure the command guards edition-specific behaviour (many enforce `SELF_HOSTED`). Document any additions in the PR.
- Ruff helpers are run directly with `uv`: `uv run --project api --dev ruff format ./api` for formatting and `uv run --project api --dev ruff check ./api` (add `--fix` if you want automatic fixes).
## When You Add Features
- Check for an existing helper or service before writing a new util.
- Uphold tenancy: every service method should receive the tenant ID from controller wrappers such as `controllers/console/wraps.py`.
- Update or create tests alongside behaviour changes (`tests/unit_tests` for fast coverage, `tests/integration_tests` when touching orchestrations).
- Run `uv run --project api --dev ruff check ./api`, `uv run --directory api --dev basedpyright`, and `uv run --project api --dev dev/pytest/pytest_unit_tests.sh` before submitting changes.

View File

@ -0,0 +1 @@
// TBD

View File

@ -0,0 +1 @@
// TBD

View File

@ -0,0 +1,53 @@
## Overview
Trigger is a collection of nodes that we called `Start` nodes, also, the concept of `Start` is the same as `RootNode` in the workflow engine `core/workflow/graph_engine`, On the other hand, `Start` node is the entry point of workflows, every workflow run always starts from a `Start` node.
## Trigger nodes
- `UserInput`
- `Trigger Webhook`
- `Trigger Schedule`
- `Trigger Plugin`
### UserInput
Before `Trigger` concept is introduced, it's what we called `Start` node, but now, to avoid confusion, it was renamed to `UserInput` node, has a strong relation with `ServiceAPI` in `controllers/service_api/app`
1. `UserInput` node introduces a list of arguments that need to be provided by the user, finally it will be converted into variables in the workflow variable pool.
1. `ServiceAPI` accept those arguments, and pass through them into `UserInput` node.
1. For its detailed implementation, please refer to `core/workflow/nodes/start`
### Trigger Webhook
Inside Webhook Node, Dify provided a UI panel that allows user define a HTTP manifest `core/workflow/nodes/trigger_webhook/entities.py`.`WebhookData`, also, Dify generates a random webhook id for each `Trigger Webhook` node, the implementation was implemented in `core/trigger/utils/endpoint.py`, as you can see, `webhook-debug` is a debug mode for webhook, you may find it in `controllers/trigger/webhook.py`.
Finally, requests to `webhook` endpoint will be converted into variables in workflow variable pool during workflow execution.
### Trigger Schedule
`Trigger Schedule` node is a node that allows user define a schedule to trigger the workflow, detailed manifest is here `core/workflow/nodes/trigger_schedule/entities.py`, we have a poller and executor to handle millions of schedules, see `docker/entrypoint.sh` / `schedule/workflow_schedule_task.py` for help.
To Achieve this, a `WorkflowSchedulePlan` model was introduced in `models/trigger.py`, and a `events/event_handlers/sync_workflow_schedule_when_app_published.py` was used to sync workflow schedule plans when app is published.
### Trigger Plugin
`Trigger Plugin` node allows user define there own distributed trigger plugin, whenever a request was received, Dify forwards it to the plugin and wait for parsed variables from it.
1. Requests were saved in storage by `services/trigger/trigger_request_service.py`, referenced by `services/trigger/trigger_service.py`.`TriggerService`.`process_endpoint`
1. Plugins accept those requests and parse variables from it, see `core/plugin/impl/trigger.py` for details.
A `subscription` concept was out here by Dify, it means an endpoint address from Dify was bound to thirdparty webhook service like `Github` `Slack` `Linear` `GoogleDrive` `Gmail` etc. Once a subscription was created, Dify continually receives requests from the platforms and handle them one by one.
## Worker Pool / Async Task
All the events that triggered a new workflow run is always in async mode, a unified entrypoint can be found here `services/async_workflow_service.py`.`AsyncWorkflowService`.`trigger_workflow_async`.
The infrastructure we used is `celery`, we've already configured it in `docker/entrypoint.sh`, and the consumers are in `tasks/async_workflow_tasks.py`, 3 queues were used to handle different tiers of users, `PROFESSIONAL_QUEUE` `TEAM_QUEUE` `SANDBOX_QUEUE`.
## Debug Strategy
Dify divided users into 2 groups: builders / end users.
Builders are the users who create workflows, in this stage, debugging a workflow becomes a critical part of the workflow development process, as the start node in workflows, trigger nodes can `listen` to the events from `WebhookDebug` `Schedule` `Plugin`, debugging process was created in `controllers/console/app/workflow.py`.`DraftWorkflowTriggerNodeApi`.
A polling process can be considered as combine of few single `poll` operations, each `poll` operation fetches events cached in `Redis`, returns `None` if no event was found, more detailed implemented: `core/trigger/debug/event_bus.py` was used to handle the polling process, and `core/trigger/debug/event_selectors.py` was used to select the event poller based on the trigger type.

View File

@ -1,24 +1,17 @@
import os
import sys
def is_db_command():
def is_db_command() -> bool:
if len(sys.argv) > 1 and sys.argv[0].endswith("flask") and sys.argv[1] == "db":
return True
return False
# create app
celery = None
flask_app = None
socketio_app = None
if is_db_command():
from app_factory import create_migrations_app
app = create_migrations_app()
socketio_app = app
flask_app = app
else:
# Gunicorn and Celery handle monkey patching automatically in production by
# specifying the `gevent` worker class. Manual monkey patching is not required here.
@ -29,15 +22,8 @@ else:
from app_factory import create_app
socketio_app, flask_app = create_app()
app = flask_app
celery = flask_app.extensions["celery"]
app = create_app()
celery = app.extensions["celery"]
if __name__ == "__main__":
from gevent import pywsgi
from geventwebsocket.handler import WebSocketHandler
host = os.environ.get("HOST", "0.0.0.0")
port = int(os.environ.get("PORT", 5001))
server = pywsgi.WSGIServer((host, port), socketio_app, handler_class=WebSocketHandler)
server.serve_forever()
app.run(host="0.0.0.0", port=5001)

View File

@ -18,6 +18,7 @@ def create_flask_app_with_configs() -> DifyApp:
"""
dify_app = DifyApp(__name__)
dify_app.config.from_mapping(dify_config.model_dump())
dify_app.config["RESTX_INCLUDE_ALL_MODELS"] = True
# add before request hook
@dify_app.before_request
@ -31,22 +32,14 @@ def create_flask_app_with_configs() -> DifyApp:
return dify_app
def create_app() -> tuple[any, DifyApp]:
def create_app() -> DifyApp:
start_time = time.perf_counter()
app = create_flask_app_with_configs()
initialize_extensions(app)
import socketio
from extensions.ext_socketio import sio
sio.app = app
socketio_app = socketio.WSGIApp(sio, app)
end_time = time.perf_counter()
if dify_config.DEBUG:
logger.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
return socketio_app, app
return app
def initialize_extensions(app: DifyApp):

7
api/cnt_base.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
set -euxo pipefail
for pattern in "Base" "TypeBase"; do
printf "%s " "$pattern"
grep "($pattern):" -r --include='*.py' --exclude-dir=".venv" --exclude-dir="tests" . | wc -l
done

View File

@ -15,12 +15,12 @@ from sqlalchemy.orm import sessionmaker
from configs import dify_config
from constants.languages import languages
from core.helper import encrypter
from core.plugin.entities.plugin_daemon import CredentialType
from core.plugin.impl.plugin import PluginInstaller
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.index_processor.constant.built_in_field import BuiltInField
from core.rag.models.document import Document
from core.tools.entities.tool_entities import CredentialType
from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params
from events.app_event import app_was_created
from extensions.ext_database import db
@ -1229,6 +1229,55 @@ def setup_system_tool_oauth_client(provider, client_params):
click.echo(click.style(f"OAuth client params setup successfully. id: {oauth_client.id}", fg="green"))
@click.command("setup-system-trigger-oauth-client", help="Setup system trigger oauth client.")
@click.option("--provider", prompt=True, help="Provider name")
@click.option("--client-params", prompt=True, help="Client Params")
def setup_system_trigger_oauth_client(provider, client_params):
"""
Setup system trigger oauth client
"""
from models.provider_ids import TriggerProviderID
from models.trigger import TriggerOAuthSystemClient
provider_id = TriggerProviderID(provider)
provider_name = provider_id.provider_name
plugin_id = provider_id.plugin_id
try:
# json validate
click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params)
click.echo(click.style("Client params validated successfully.", fg="green"))
click.echo(click.style(f"Encrypting client params: {client_params}", fg="yellow"))
click.echo(click.style(f"Using SECRET_KEY: `{dify_config.SECRET_KEY}`", fg="yellow"))
oauth_client_params = encrypt_system_oauth_params(client_params_dict)
click.echo(click.style("Client params encrypted successfully.", fg="green"))
except Exception as e:
click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
return
deleted_count = (
db.session.query(TriggerOAuthSystemClient)
.filter_by(
provider=provider_name,
plugin_id=plugin_id,
)
.delete()
)
if deleted_count > 0:
click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
oauth_client = TriggerOAuthSystemClient(
provider=provider_name,
plugin_id=plugin_id,
encrypted_oauth_params=oauth_client_params,
)
db.session.add(oauth_client)
db.session.commit()
click.echo(click.style(f"OAuth client params setup successfully. id: {oauth_client.id}", fg="green"))
def _find_orphaned_draft_variables(batch_size: int = 1000) -> list[str]:
"""
Find draft variables that reference non-existent apps.
@ -1422,7 +1471,10 @@ def setup_datasource_oauth_client(provider, client_params):
@click.command("transform-datasource-credentials", help="Transform datasource credentials.")
def transform_datasource_credentials():
@click.option(
"--environment", prompt=True, help="the environment to transform datasource credentials", default="online"
)
def transform_datasource_credentials(environment: str):
"""
Transform datasource credentials
"""
@ -1433,9 +1485,14 @@ def transform_datasource_credentials():
notion_plugin_id = "langgenius/notion_datasource"
firecrawl_plugin_id = "langgenius/firecrawl_datasource"
jina_plugin_id = "langgenius/jina_datasource"
notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id) # pyright: ignore[reportPrivateUsage]
firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id) # pyright: ignore[reportPrivateUsage]
jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id) # pyright: ignore[reportPrivateUsage]
if environment == "online":
notion_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(notion_plugin_id) # pyright: ignore[reportPrivateUsage]
firecrawl_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(firecrawl_plugin_id) # pyright: ignore[reportPrivateUsage]
jina_plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(jina_plugin_id) # pyright: ignore[reportPrivateUsage]
else:
notion_plugin_unique_identifier = None
firecrawl_plugin_unique_identifier = None
jina_plugin_unique_identifier = None
oauth_credential_type = CredentialType.OAUTH2
api_key_credential_type = CredentialType.API_KEY

View File

@ -73,14 +73,14 @@ class AppExecutionConfig(BaseSettings):
description="Maximum allowed execution time for the application in seconds",
default=1200,
)
APP_DEFAULT_ACTIVE_REQUESTS: NonNegativeInt = Field(
description="Default number of concurrent active requests per app (0 for unlimited)",
default=0,
)
APP_MAX_ACTIVE_REQUESTS: NonNegativeInt = Field(
description="Maximum number of concurrent active requests per app (0 for unlimited)",
default=0,
)
APP_DAILY_RATE_LIMIT: NonNegativeInt = Field(
description="Maximum number of requests per app per day",
default=5000,
)
class CodeExecutionSandboxConfig(BaseSettings):
@ -174,6 +174,33 @@ class CodeExecutionSandboxConfig(BaseSettings):
)
class TriggerConfig(BaseSettings):
"""
Configuration for trigger
"""
WEBHOOK_REQUEST_BODY_MAX_SIZE: PositiveInt = Field(
description="Maximum allowed size for webhook request bodies in bytes",
default=10485760,
)
class AsyncWorkflowConfig(BaseSettings):
"""
Configuration for async workflow
"""
ASYNC_WORKFLOW_SCHEDULER_GRANULARITY: int = Field(
description="Granularity for async workflow scheduler, "
"sometime, few users could block the queue due to some time-consuming tasks, "
"to avoid this, workflow can be suspended if needed, to achieve"
"this, a time-based checker is required, every granularity seconds, "
"the checker will check the workflow queue and suspend the workflow",
default=120,
ge=1,
)
class PluginConfig(BaseSettings):
"""
Plugin configs
@ -263,6 +290,8 @@ class EndpointConfig(BaseSettings):
description="Template url for endpoint plugin", default="http://localhost:5002/e/{hook_id}"
)
TRIGGER_URL: str = Field(description="Template url for triggers", default="http://localhost:5001")
class FileAccessConfig(BaseSettings):
"""
@ -871,16 +900,6 @@ class MailConfig(BaseSettings):
default=None,
)
ENABLE_TRIAL_APP: bool = Field(
description="Enable trial app",
default=False,
)
ENABLE_EXPLORE_BANNER: bool = Field(
description="Enable explore banner",
default=False,
)
class RagEtlConfig(BaseSettings):
"""
@ -1035,6 +1054,44 @@ class CeleryScheduleTasksConfig(BaseSettings):
description="Enable check upgradable plugin task",
default=True,
)
ENABLE_WORKFLOW_SCHEDULE_POLLER_TASK: bool = Field(
description="Enable workflow schedule poller task",
default=True,
)
WORKFLOW_SCHEDULE_POLLER_INTERVAL: int = Field(
description="Workflow schedule poller interval in minutes",
default=1,
)
WORKFLOW_SCHEDULE_POLLER_BATCH_SIZE: int = Field(
description="Maximum number of schedules to process in each poll batch",
default=100,
)
WORKFLOW_SCHEDULE_MAX_DISPATCH_PER_TICK: int = Field(
description="Maximum schedules to dispatch per tick (0=unlimited, circuit breaker)",
default=0,
)
# Trigger provider refresh (simple version)
ENABLE_TRIGGER_PROVIDER_REFRESH_TASK: bool = Field(
description="Enable trigger provider refresh poller",
default=True,
)
TRIGGER_PROVIDER_REFRESH_INTERVAL: int = Field(
description="Trigger provider refresh poller interval in minutes",
default=1,
)
TRIGGER_PROVIDER_REFRESH_BATCH_SIZE: int = Field(
description="Max trigger subscriptions to process per tick",
default=200,
)
TRIGGER_PROVIDER_CREDENTIAL_THRESHOLD_SECONDS: int = Field(
description="Proactive credential refresh threshold in seconds",
default=60 * 60,
)
TRIGGER_PROVIDER_SUBSCRIPTION_THRESHOLD_SECONDS: int = Field(
description="Proactive subscription refresh threshold in seconds",
default=60 * 60,
)
class PositionConfig(BaseSettings):
@ -1093,13 +1150,6 @@ class PositionConfig(BaseSettings):
return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
class CollaborationConfig(BaseSettings):
ENABLE_COLLABORATION_MODE: bool = Field(
description="Whether to enable collaboration mode features across the workspace",
default=False,
)
class LoginConfig(BaseSettings):
ENABLE_EMAIL_CODE_LOGIN: bool = Field(
description="whether to enable email code login",
@ -1140,7 +1190,7 @@ class AccountConfig(BaseSettings):
class WorkflowLogConfig(BaseSettings):
WORKFLOW_LOG_CLEANUP_ENABLED: bool = Field(default=True, description="Enable workflow run log cleanup")
WORKFLOW_LOG_CLEANUP_ENABLED: bool = Field(default=False, description="Enable workflow run log cleanup")
WORKFLOW_LOG_RETENTION_DAYS: int = Field(default=30, description="Retention days for workflow run logs")
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: int = Field(
default=100, description="Batch size for workflow run log cleanup operations"
@ -1159,9 +1209,9 @@ class SwaggerUIConfig(BaseSettings):
)
class TenantSelfTaskQueueConfig(BaseSettings):
TENANT_SELF_TASK_QUEUE_PULL_SIZE: int = Field(
description="Default batch size for tenant self task queue pull operations",
class TenantIsolatedTaskQueueConfig(BaseSettings):
TENANT_ISOLATED_TASK_CONCURRENCY: int = Field(
description="Number of tasks allowed to be delivered concurrently from isolated queue per tenant",
default=1,
)
@ -1172,6 +1222,8 @@ class FeatureConfig(
AuthConfig, # Changed from OAuthConfig to AuthConfig
BillingConfig,
CodeExecutionSandboxConfig,
TriggerConfig,
AsyncWorkflowConfig,
PluginConfig,
MarketplaceConfig,
DataSetConfig,
@ -1190,13 +1242,12 @@ class FeatureConfig(
RagEtlConfig,
RepositoryConfig,
SecurityConfig,
TenantSelfTaskQueueConfig,
TenantIsolatedTaskQueueConfig,
ToolConfig,
UpdateConfig,
WorkflowConfig,
WorkflowNodeExecutionConfig,
WorkspaceConfig,
CollaborationConfig,
LoginConfig,
AccountConfig,
SwaggerUIConfig,

View File

@ -8,11 +8,6 @@ class HostedCreditConfig(BaseSettings):
default="",
)
HOSTED_POOL_CREDITS: int = Field(
description="Pool credits for hosted service",
default=200,
)
def get_model_credits(self, model_name: str) -> int:
"""
Get credit value for a specific model name.
@ -65,46 +60,19 @@ class HostedOpenAiConfig(BaseSettings):
HOSTED_OPENAI_TRIAL_MODELS: str = Field(
description="Comma-separated list of available models for trial access",
default="gpt-4,"
"gpt-4-turbo-preview,"
"gpt-4-turbo-2024-04-09,"
"gpt-4-1106-preview,"
"gpt-4-0125-preview,"
"gpt-4-turbo,"
"gpt-4.1,"
"gpt-4.1-2025-04-14,"
"gpt-4.1-mini,"
"gpt-4.1-mini-2025-04-14,"
"gpt-4.1-nano,"
"gpt-4.1-nano-2025-04-14,"
"gpt-3.5-turbo,"
default="gpt-3.5-turbo,"
"gpt-3.5-turbo-1106,"
"gpt-3.5-turbo-instruct,"
"gpt-3.5-turbo-16k,"
"gpt-3.5-turbo-16k-0613,"
"gpt-3.5-turbo-1106,"
"gpt-3.5-turbo-0613,"
"gpt-3.5-turbo-0125,"
"gpt-3.5-turbo-instruct,"
"text-davinci-003,"
"chatgpt-4o-latest,"
"gpt-4o,"
"gpt-4o-2024-05-13,"
"gpt-4o-2024-08-06,"
"gpt-4o-2024-11-20,"
"gpt-4o-audio-preview,"
"gpt-4o-audio-preview-2025-06-03,"
"gpt-4o-mini,"
"gpt-4o-mini-2024-07-18,"
"o3-mini,"
"o3-mini-2025-01-31,"
"gpt-5-mini-2025-08-07,"
"gpt-5-mini,"
"o4-mini,"
"o4-mini-2025-04-16,"
"gpt-5-chat-latest,"
"gpt-5,"
"gpt-5-2025-08-07,"
"gpt-5-nano,"
"gpt-5-nano-2025-08-07",
"text-davinci-003",
)
HOSTED_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
description="Quota limit for hosted OpenAI service usage",
default=200,
)
HOSTED_OPENAI_PAID_ENABLED: bool = Field(
@ -119,13 +87,6 @@ class HostedOpenAiConfig(BaseSettings):
"gpt-4-turbo-2024-04-09,"
"gpt-4-1106-preview,"
"gpt-4-0125-preview,"
"gpt-4-turbo,"
"gpt-4.1,"
"gpt-4.1-2025-04-14,"
"gpt-4.1-mini,"
"gpt-4.1-mini-2025-04-14,"
"gpt-4.1-nano,"
"gpt-4.1-nano-2025-04-14,"
"gpt-3.5-turbo,"
"gpt-3.5-turbo-16k,"
"gpt-3.5-turbo-16k-0613,"
@ -133,150 +94,7 @@ class HostedOpenAiConfig(BaseSettings):
"gpt-3.5-turbo-0613,"
"gpt-3.5-turbo-0125,"
"gpt-3.5-turbo-instruct,"
"text-davinci-003,"
"chatgpt-4o-latest,"
"gpt-4o,"
"gpt-4o-2024-05-13,"
"gpt-4o-2024-08-06,"
"gpt-4o-2024-11-20,"
"gpt-4o-audio-preview,"
"gpt-4o-audio-preview-2025-06-03,"
"gpt-4o-mini,"
"gpt-4o-mini-2024-07-18,"
"o3-mini,"
"o3-mini-2025-01-31,"
"gpt-5-mini-2025-08-07,"
"gpt-5-mini,"
"o4-mini,"
"o4-mini-2025-04-16,"
"gpt-5-chat-latest,"
"gpt-5,"
"gpt-5-2025-08-07,"
"gpt-5-nano,"
"gpt-5-nano-2025-08-07",
)
class HostedGeminiConfig(BaseSettings):
"""
Configuration for fetching Gemini service
"""
HOSTED_GEMINI_API_KEY: str | None = Field(
description="API key for hosted Gemini service",
default=None,
)
HOSTED_GEMINI_API_BASE: str | None = Field(
description="Base URL for hosted Gemini API",
default=None,
)
HOSTED_GEMINI_API_ORGANIZATION: str | None = Field(
description="Organization ID for hosted Gemini service",
default=None,
)
HOSTED_GEMINI_TRIAL_ENABLED: bool = Field(
description="Enable trial access to hosted Gemini service",
default=False,
)
HOSTED_GEMINI_TRIAL_MODELS: str = Field(
description="Comma-separated list of available models for trial access",
default="gemini-2.5-flash,gemini-2.0-flash,gemini-2.0-flash-lite,",
)
HOSTED_GEMINI_PAID_ENABLED: bool = Field(
description="Enable paid access to hosted gemini service",
default=False,
)
HOSTED_GEMINI_PAID_MODELS: str = Field(
description="Comma-separated list of available models for paid access",
default="gemini-2.5-flash,gemini-2.0-flash,gemini-2.0-flash-lite,",
)
class HostedXAIConfig(BaseSettings):
"""
Configuration for fetching XAI service
"""
HOSTED_XAI_API_KEY: str | None = Field(
description="API key for hosted XAI service",
default=None,
)
HOSTED_XAI_API_BASE: str | None = Field(
description="Base URL for hosted XAI API",
default=None,
)
HOSTED_XAI_API_ORGANIZATION: str | None = Field(
description="Organization ID for hosted XAI service",
default=None,
)
HOSTED_XAI_TRIAL_ENABLED: bool = Field(
description="Enable trial access to hosted XAI service",
default=False,
)
HOSTED_XAI_TRIAL_MODELS: str = Field(
description="Comma-separated list of available models for trial access",
default="grok-3,grok-3-mini,grok-3-mini-fast",
)
HOSTED_XAI_PAID_ENABLED: bool = Field(
description="Enable paid access to hosted XAI service",
default=False,
)
HOSTED_XAI_PAID_MODELS: str = Field(
description="Comma-separated list of available models for paid access",
default="grok-3,grok-3-mini,grok-3-mini-fast",
)
class HostedDeepseekConfig(BaseSettings):
"""
Configuration for fetching Deepseek service
"""
HOSTED_DEEPSEEK_API_KEY: str | None = Field(
description="API key for hosted Deepseek service",
default=None,
)
HOSTED_DEEPSEEK_API_BASE: str | None = Field(
description="Base URL for hosted Deepseek API",
default=None,
)
HOSTED_DEEPSEEK_API_ORGANIZATION: str | None = Field(
description="Organization ID for hosted Deepseek service",
default=None,
)
HOSTED_DEEPSEEK_TRIAL_ENABLED: bool = Field(
description="Enable trial access to hosted Deepseek service",
default=False,
)
HOSTED_DEEPSEEK_TRIAL_MODELS: str = Field(
description="Comma-separated list of available models for trial access",
default="deepseek-chat,deepseek-reasoner",
)
HOSTED_DEEPSEEK_PAID_ENABLED: bool = Field(
description="Enable paid access to hosted XAI service",
default=False,
)
HOSTED_DEEPSEEK_PAID_MODELS: str = Field(
description="Comma-separated list of available models for paid access",
default="deepseek-chat,deepseek-reasoner",
"text-davinci-003",
)
@ -326,30 +144,16 @@ class HostedAnthropicConfig(BaseSettings):
default=False,
)
HOSTED_ANTHROPIC_QUOTA_LIMIT: NonNegativeInt = Field(
description="Quota limit for hosted Anthropic service usage",
default=600000,
)
HOSTED_ANTHROPIC_PAID_ENABLED: bool = Field(
description="Enable paid access to hosted Anthropic service",
default=False,
)
HOSTED_ANTHROPIC_TRIAL_MODELS: str = Field(
description="Comma-separated list of available models for paid access",
default="claude-opus-4-20250514,"
"claude-sonnet-4-20250514,"
"claude-3-5-haiku-20241022,"
"claude-3-opus-20240229,"
"claude-3-7-sonnet-20250219,"
"claude-3-haiku-20240307",
)
HOSTED_ANTHROPIC_PAID_MODELS: str = Field(
description="Comma-separated list of available models for paid access",
default="claude-opus-4-20250514,"
"claude-sonnet-4-20250514,"
"claude-3-5-haiku-20241022,"
"claude-3-opus-20240229,"
"claude-3-7-sonnet-20250219,"
"claude-3-haiku-20240307",
)
class HostedMinmaxConfig(BaseSettings):
"""
@ -446,8 +250,5 @@ class HostedServiceConfig(
HostedModerationConfig,
# credit config
HostedCreditConfig,
HostedGeminiConfig,
HostedXAIConfig,
HostedDeepseekConfig,
):
pass

View File

@ -105,6 +105,12 @@ class KeywordStoreConfig(BaseSettings):
class DatabaseConfig(BaseSettings):
# Database type selector
DB_TYPE: Literal["postgresql", "mysql", "oceanbase"] = Field(
description="Database type to use. OceanBase is MySQL-compatible.",
default="postgresql",
)
DB_HOST: str = Field(
description="Hostname or IP address of the database server.",
default="localhost",
@ -140,10 +146,10 @@ class DatabaseConfig(BaseSettings):
default="",
)
SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
description="Database URI scheme for SQLAlchemy connection.",
default="postgresql",
)
@computed_field # type: ignore[prop-decorator]
@property
def SQLALCHEMY_DATABASE_URI_SCHEME(self) -> str:
return "postgresql" if self.DB_TYPE == "postgresql" else "mysql+pymysql"
@computed_field # type: ignore[prop-decorator]
@property
@ -204,15 +210,15 @@ class DatabaseConfig(BaseSettings):
# Parse DB_EXTRAS for 'options'
db_extras_dict = dict(parse_qsl(self.DB_EXTRAS))
options = db_extras_dict.get("options", "")
# Always include timezone
timezone_opt = "-c timezone=UTC"
if options:
# Merge user options and timezone
merged_options = f"{options} {timezone_opt}"
else:
merged_options = timezone_opt
connect_args = {"options": merged_options}
connect_args = {}
# Use the dynamic SQLALCHEMY_DATABASE_URI_SCHEME property
if self.SQLALCHEMY_DATABASE_URI_SCHEME.startswith("postgresql"):
timezone_opt = "-c timezone=UTC"
if options:
merged_options = f"{options} {timezone_opt}"
else:
merged_options = timezone_opt
connect_args = {"options": merged_options}
return {
"pool_size": self.SQLALCHEMY_POOL_SIZE,

View File

@ -31,3 +31,8 @@ class WeaviateConfig(BaseSettings):
description="Number of objects to be processed in a single batch operation (default is 100)",
default=100,
)
WEAVIATE_TOKENIZATION: str | None = Field(
description="Tokenization for Weaviate (default is word)",
default="word",
)

File diff suppressed because one or more lines are too long

View File

@ -9,6 +9,7 @@ if TYPE_CHECKING:
from core.model_runtime.entities.model_entities import AIModelEntity
from core.plugin.entities.plugin_daemon import PluginModelProviderEntity
from core.tools.plugin_tool.provider import PluginToolProviderController
from core.trigger.provider import PluginTriggerProviderController
"""
@ -41,3 +42,11 @@ datasource_plugin_providers: RecyclableContextVar[dict[str, "DatasourcePluginPro
datasource_plugin_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(
ContextVar("datasource_plugin_providers_lock")
)
plugin_trigger_providers: RecyclableContextVar[dict[str, "PluginTriggerProviderController"]] = RecyclableContextVar(
ContextVar("plugin_trigger_providers")
)
plugin_trigger_providers_lock: RecyclableContextVar[Lock] = RecyclableContextVar(
ContextVar("plugin_trigger_providers_lock")
)

View File

@ -58,16 +58,15 @@ from .app import (
mcp_server,
message,
model_config,
online_user,
ops_trace,
site,
statistic,
workflow,
workflow_app_log,
workflow_comment,
workflow_draft_variable,
workflow_run,
workflow_statistic,
workflow_trigger,
)
# Import auth controllers
@ -108,12 +107,10 @@ from .datasets.rag_pipeline import (
# Import explore controllers
from .explore import (
banner,
installed_app,
parameter,
recommended_app,
saved_message,
trial,
)
# Import tag controllers
@ -130,6 +127,7 @@ from .workspace import (
models,
plugin,
tool_providers,
trigger_providers,
workspace,
)
@ -147,7 +145,6 @@ __all__ = [
"apikey",
"app",
"audio",
"banner",
"billing",
"bp",
"completion",
@ -201,7 +198,7 @@ __all__ = [
"statistic",
"tags",
"tool_providers",
"trial",
"trigger_providers",
"version",
"website",
"workflow",
@ -209,5 +206,6 @@ __all__ = [
"workflow_draft_variable",
"workflow_run",
"workflow_statistic",
"workflow_trigger",
"workspace",
]

View File

@ -12,11 +12,11 @@ P = ParamSpec("P")
R = TypeVar("R")
from configs import dify_config
from constants.languages import supported_language
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.wraps import only_edition_cloud
from extensions.ext_database import db
from libs.token import extract_access_token
from models.model import App, ExporleBanner, InstalledApp, RecommendedApp, TrialApp
from models.model import App, InstalledApp, RecommendedApp
def admin_required(view: Callable[P, R]):
@ -38,10 +38,10 @@ def admin_required(view: Callable[P, R]):
@console_ns.route("/admin/insert-explore-apps")
class InsertExploreAppListApi(Resource):
@api.doc("insert_explore_app")
@api.doc(description="Insert or update an app in the explore list")
@api.expect(
api.model(
@console_ns.doc("insert_explore_app")
@console_ns.doc(description="Insert or update an app in the explore list")
@console_ns.expect(
console_ns.model(
"InsertExploreAppRequest",
{
"app_id": fields.String(required=True, description="Application ID"),
@ -52,14 +52,12 @@ class InsertExploreAppListApi(Resource):
"language": fields.String(required=True, description="Language code"),
"category": fields.String(required=True, description="App category"),
"position": fields.Integer(required=True, description="Display position"),
"can_trial": fields.Boolean(required=True, description="Can trial"),
"trial_limit": fields.Integer(required=True, description="Trial limit"),
},
)
)
@api.response(200, "App updated successfully")
@api.response(201, "App inserted successfully")
@api.response(404, "App not found")
@console_ns.response(200, "App updated successfully")
@console_ns.response(201, "App inserted successfully")
@console_ns.response(404, "App not found")
@only_edition_cloud
@admin_required
def post(self):
@ -73,8 +71,6 @@ class InsertExploreAppListApi(Resource):
.add_argument("language", type=supported_language, required=True, nullable=False, location="json")
.add_argument("category", type=str, required=True, nullable=False, location="json")
.add_argument("position", type=int, required=True, nullable=False, location="json")
.add_argument("can_trial", type=bool, required=True, nullable=False, location="json")
.add_argument("trial_limit", type=int, required=True, nullable=False, location="json")
)
args = parser.parse_args()
@ -112,20 +108,6 @@ class InsertExploreAppListApi(Resource):
)
db.session.add(recommended_app)
if args["can_trial"]:
trial_app = db.session.execute(
select(TrialApp).where(TrialApp.app_id == args["app_id"])
).scalar_one_or_none()
if not trial_app:
db.session.add(
TrialApp(
app_id=args["app_id"],
tenant_id=app.tenant_id,
trial_limit=args["trial_limit"],
)
)
else:
trial_app.trial_limit = args["trial_limit"]
app.is_public = True
db.session.commit()
@ -140,20 +122,6 @@ class InsertExploreAppListApi(Resource):
recommended_app.category = args["category"]
recommended_app.position = args["position"]
if args["can_trial"]:
trial_app = db.session.execute(
select(TrialApp).where(TrialApp.app_id == args["app_id"])
).scalar_one_or_none()
if not trial_app:
db.session.add(
TrialApp(
app_id=args["app_id"],
tenant_id=app.tenant_id,
trial_limit=args["trial_limit"],
)
)
else:
trial_app.trial_limit = args["trial_limit"]
app.is_public = True
db.session.commit()
@ -163,10 +131,10 @@ class InsertExploreAppListApi(Resource):
@console_ns.route("/admin/insert-explore-apps/<uuid:app_id>")
class InsertExploreAppApi(Resource):
@api.doc("delete_explore_app")
@api.doc(description="Remove an app from the explore list")
@api.doc(params={"app_id": "Application ID to remove"})
@api.response(204, "App removed successfully")
@console_ns.doc("delete_explore_app")
@console_ns.doc(description="Remove an app from the explore list")
@console_ns.doc(params={"app_id": "Application ID to remove"})
@console_ns.response(204, "App removed successfully")
@only_edition_cloud
@admin_required
def delete(self, app_id):
@ -199,83 +167,7 @@ class InsertExploreAppApi(Resource):
for installed_app in installed_apps:
session.delete(installed_app)
trial_app = session.execute(
select(TrialApp).where(TrialApp.app_id == recommended_app.app_id)
).scalar_one_or_none()
if trial_app:
session.delete(trial_app)
db.session.delete(recommended_app)
db.session.commit()
return {"result": "success"}, 204
@console_ns.route("/admin/insert-explore-banner")
class InsertExploreBanner(Resource):
@api.doc("insert_explore_banner")
@api.doc(description="Insert an explore banner")
@api.expect(
api.model(
"InsertExploreBannerRequest",
{
"content": fields.String(required=True, description="Banner content"),
"link": fields.String(required=True, description="Banner link"),
"sort": fields.Integer(required=True, description="Banner sort"),
},
)
)
@api.response(200, "Banner inserted successfully")
@admin_required
@only_edition_cloud
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("category", type=str, required=True, nullable=False, location="json")
parser.add_argument("title", type=str, required=True, nullable=False, location="json")
parser.add_argument("description", type=str, required=True, nullable=False, location="json")
parser.add_argument("img-src", type=str, required=True, nullable=False, location="json")
parser.add_argument("language", type=str, required=True, nullable=False, location="json")
parser.add_argument("link", type=str, required=True, nullable=False, location="json")
parser.add_argument("sort", type=int, required=True, nullable=False, location="json")
args = parser.parse_args()
content = {
"category": args["category"],
"title": args["title"],
"description": args["description"],
"img-src": args["img-src"],
}
if not args["language"]:
args["language"] = "en-US"
banner = ExporleBanner(
content=content,
link=args["link"],
sort=args["sort"],
language=args["language"],
)
db.session.add(banner)
db.session.commit()
return {"result": "success"}, 200
@console_ns.route("/admin/delete-explore-banner/<uuid:banner_id>")
class DeleteExploreBanner(Resource):
@api.doc("delete_explore_banner")
@api.doc(description="Delete an explore banner")
@api.response(204, "Banner deleted successfully")
@admin_required
@only_edition_cloud
def delete(self, banner_id):
banner = db.session.execute(select(ExporleBanner).where(ExporleBanner.id == banner_id)).scalar_one_or_none()
if not banner:
raise NotFound(f"Banner '{banner_id}' is not found")
db.session.delete(banner)
db.session.commit()
return {"result": "success"}, 204

View File

@ -11,7 +11,7 @@ from libs.login import current_account_with_tenant, login_required
from models.dataset import Dataset
from models.model import ApiToken, App
from . import api, console_ns
from . import console_ns
from .wraps import account_initialization_required, edit_permission_required, setup_required
api_key_fields = {
@ -24,6 +24,12 @@ api_key_fields = {
api_key_list = {"data": fields.List(fields.Nested(api_key_fields), attribute="items")}
api_key_item_model = console_ns.model("ApiKeyItem", api_key_fields)
api_key_list_model = console_ns.model(
"ApiKeyList", {"data": fields.List(fields.Nested(api_key_item_model), attribute="items")}
)
def _get_resource(resource_id, tenant_id, resource_model):
if resource_model == App:
@ -52,7 +58,7 @@ class BaseApiKeyListResource(Resource):
token_prefix: str | None = None
max_keys = 10
@marshal_with(api_key_list)
@marshal_with(api_key_list_model)
def get(self, resource_id):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
@ -66,7 +72,7 @@ class BaseApiKeyListResource(Resource):
).all()
return {"items": keys}
@marshal_with(api_key_fields)
@marshal_with(api_key_item_model)
@edit_permission_required
def post(self, resource_id):
assert self.resource_id_field is not None, "resource_id_field must be set"
@ -104,14 +110,11 @@ class BaseApiKeyResource(Resource):
resource_model: type | None = None
resource_id_field: str | None = None
def delete(self, resource_id, api_key_id):
def delete(self, resource_id: str, api_key_id: str):
assert self.resource_id_field is not None, "resource_id_field must be set"
resource_id = str(resource_id)
api_key_id = str(api_key_id)
current_user, current_tenant_id = current_account_with_tenant()
_get_resource(resource_id, current_tenant_id, self.resource_model)
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
@ -136,20 +139,20 @@ class BaseApiKeyResource(Resource):
@console_ns.route("/apps/<uuid:resource_id>/api-keys")
class AppApiKeyListResource(BaseApiKeyListResource):
@api.doc("get_app_api_keys")
@api.doc(description="Get all API keys for an app")
@api.doc(params={"resource_id": "App ID"})
@api.response(200, "Success", api_key_list)
def get(self, resource_id):
@console_ns.doc("get_app_api_keys")
@console_ns.doc(description="Get all API keys for an app")
@console_ns.doc(params={"resource_id": "App ID"})
@console_ns.response(200, "Success", api_key_list_model)
def get(self, resource_id): # type: ignore
"""Get all API keys for an app"""
return super().get(resource_id)
@api.doc("create_app_api_key")
@api.doc(description="Create a new API key for an app")
@api.doc(params={"resource_id": "App ID"})
@api.response(201, "API key created successfully", api_key_fields)
@api.response(400, "Maximum keys exceeded")
def post(self, resource_id):
@console_ns.doc("create_app_api_key")
@console_ns.doc(description="Create a new API key for an app")
@console_ns.doc(params={"resource_id": "App ID"})
@console_ns.response(201, "API key created successfully", api_key_item_model)
@console_ns.response(400, "Maximum keys exceeded")
def post(self, resource_id): # type: ignore
"""Create a new API key for an app"""
return super().post(resource_id)
@ -161,10 +164,10 @@ class AppApiKeyListResource(BaseApiKeyListResource):
@console_ns.route("/apps/<uuid:resource_id>/api-keys/<uuid:api_key_id>")
class AppApiKeyResource(BaseApiKeyResource):
@api.doc("delete_app_api_key")
@api.doc(description="Delete an API key for an app")
@api.doc(params={"resource_id": "App ID", "api_key_id": "API key ID"})
@api.response(204, "API key deleted successfully")
@console_ns.doc("delete_app_api_key")
@console_ns.doc(description="Delete an API key for an app")
@console_ns.doc(params={"resource_id": "App ID", "api_key_id": "API key ID"})
@console_ns.response(204, "API key deleted successfully")
def delete(self, resource_id, api_key_id):
"""Delete an API key for an app"""
return super().delete(resource_id, api_key_id)
@ -176,20 +179,20 @@ class AppApiKeyResource(BaseApiKeyResource):
@console_ns.route("/datasets/<uuid:resource_id>/api-keys")
class DatasetApiKeyListResource(BaseApiKeyListResource):
@api.doc("get_dataset_api_keys")
@api.doc(description="Get all API keys for a dataset")
@api.doc(params={"resource_id": "Dataset ID"})
@api.response(200, "Success", api_key_list)
def get(self, resource_id):
@console_ns.doc("get_dataset_api_keys")
@console_ns.doc(description="Get all API keys for a dataset")
@console_ns.doc(params={"resource_id": "Dataset ID"})
@console_ns.response(200, "Success", api_key_list_model)
def get(self, resource_id): # type: ignore
"""Get all API keys for a dataset"""
return super().get(resource_id)
@api.doc("create_dataset_api_key")
@api.doc(description="Create a new API key for a dataset")
@api.doc(params={"resource_id": "Dataset ID"})
@api.response(201, "API key created successfully", api_key_fields)
@api.response(400, "Maximum keys exceeded")
def post(self, resource_id):
@console_ns.doc("create_dataset_api_key")
@console_ns.doc(description="Create a new API key for a dataset")
@console_ns.doc(params={"resource_id": "Dataset ID"})
@console_ns.response(201, "API key created successfully", api_key_item_model)
@console_ns.response(400, "Maximum keys exceeded")
def post(self, resource_id): # type: ignore
"""Create a new API key for a dataset"""
return super().post(resource_id)
@ -201,10 +204,10 @@ class DatasetApiKeyListResource(BaseApiKeyListResource):
@console_ns.route("/datasets/<uuid:resource_id>/api-keys/<uuid:api_key_id>")
class DatasetApiKeyResource(BaseApiKeyResource):
@api.doc("delete_dataset_api_key")
@api.doc(description="Delete an API key for a dataset")
@api.doc(params={"resource_id": "Dataset ID", "api_key_id": "API key ID"})
@api.response(204, "API key deleted successfully")
@console_ns.doc("delete_dataset_api_key")
@console_ns.doc(description="Delete an API key for a dataset")
@console_ns.doc(params={"resource_id": "Dataset ID", "api_key_id": "API key ID"})
@console_ns.response(204, "API key deleted successfully")
def delete(self, resource_id, api_key_id):
"""Delete an API key for a dataset"""
return super().delete(resource_id, api_key_id)

View File

@ -1,37 +1,32 @@
from flask_restx import Resource, fields, reqparse
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import login_required
from services.advanced_prompt_template_service import AdvancedPromptTemplateService
parser = (
reqparse.RequestParser()
.add_argument("app_mode", type=str, required=True, location="args", help="Application mode")
.add_argument("model_mode", type=str, required=True, location="args", help="Model mode")
.add_argument("has_context", type=str, required=False, default="true", location="args", help="Whether has context")
.add_argument("model_name", type=str, required=True, location="args", help="Model name")
)
@console_ns.route("/app/prompt-templates")
class AdvancedPromptTemplateList(Resource):
@api.doc("get_advanced_prompt_templates")
@api.doc(description="Get advanced prompt templates based on app mode and model configuration")
@api.expect(
api.parser()
.add_argument("app_mode", type=str, required=True, location="args", help="Application mode")
.add_argument("model_mode", type=str, required=True, location="args", help="Model mode")
.add_argument("has_context", type=str, default="true", location="args", help="Whether has context")
.add_argument("model_name", type=str, required=True, location="args", help="Model name")
)
@api.response(
@console_ns.doc("get_advanced_prompt_templates")
@console_ns.doc(description="Get advanced prompt templates based on app mode and model configuration")
@console_ns.expect(parser)
@console_ns.response(
200, "Prompt templates retrieved successfully", fields.List(fields.Raw(description="Prompt template data"))
)
@api.response(400, "Invalid request parameters")
@console_ns.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
def get(self):
parser = (
reqparse.RequestParser()
.add_argument("app_mode", type=str, required=True, location="args")
.add_argument("model_mode", type=str, required=True, location="args")
.add_argument("has_context", type=str, required=False, default="true", location="args")
.add_argument("model_name", type=str, required=True, location="args")
)
args = parser.parse_args()
return AdvancedPromptTemplateService.get_prompt(args)

View File

@ -1,6 +1,6 @@
from flask_restx import Resource, fields, reqparse
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from libs.helper import uuid_value
@ -8,31 +8,29 @@ from libs.login import login_required
from models.model import AppMode
from services.agent_service import AgentService
parser = (
reqparse.RequestParser()
.add_argument("message_id", type=uuid_value, required=True, location="args", help="Message UUID")
.add_argument("conversation_id", type=uuid_value, required=True, location="args", help="Conversation UUID")
)
@console_ns.route("/apps/<uuid:app_id>/agent/logs")
class AgentLogApi(Resource):
@api.doc("get_agent_logs")
@api.doc(description="Get agent execution logs for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("message_id", type=str, required=True, location="args", help="Message UUID")
.add_argument("conversation_id", type=str, required=True, location="args", help="Conversation UUID")
@console_ns.doc("get_agent_logs")
@console_ns.doc(description="Get agent execution logs for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(parser)
@console_ns.response(
200, "Agent logs retrieved successfully", fields.List(fields.Raw(description="Agent log entries"))
)
@api.response(200, "Agent logs retrieved successfully", fields.List(fields.Raw(description="Agent log entries")))
@api.response(400, "Invalid request parameters")
@console_ns.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.AGENT_CHAT])
def get(self, app_model):
"""Get agent logs"""
parser = (
reqparse.RequestParser()
.add_argument("message_id", type=uuid_value, required=True, location="args")
.add_argument("conversation_id", type=uuid_value, required=True, location="args")
)
args = parser.parse_args()
return AgentService.get_agent_logs(app_model, args["conversation_id"], args["message_id"])

View File

@ -4,7 +4,7 @@ from flask import request
from flask_restx import Resource, fields, marshal, marshal_with, reqparse
from controllers.common.errors import NoFileUploadedError, TooManyFilesError
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
@ -15,6 +15,7 @@ from extensions.ext_redis import redis_client
from fields.annotation_fields import (
annotation_fields,
annotation_hit_history_fields,
build_annotation_model,
)
from libs.helper import uuid_value
from libs.login import login_required
@ -23,11 +24,11 @@ from services.annotation_service import AppAnnotationService
@console_ns.route("/apps/<uuid:app_id>/annotation-reply/<string:action>")
class AnnotationReplyActionApi(Resource):
@api.doc("annotation_reply_action")
@api.doc(description="Enable or disable annotation reply for an app")
@api.doc(params={"app_id": "Application ID", "action": "Action to perform (enable/disable)"})
@api.expect(
api.model(
@console_ns.doc("annotation_reply_action")
@console_ns.doc(description="Enable or disable annotation reply for an app")
@console_ns.doc(params={"app_id": "Application ID", "action": "Action to perform (enable/disable)"})
@console_ns.expect(
console_ns.model(
"AnnotationReplyActionRequest",
{
"score_threshold": fields.Float(required=True, description="Score threshold for annotation matching"),
@ -36,8 +37,8 @@ class AnnotationReplyActionApi(Resource):
},
)
)
@api.response(200, "Action completed successfully")
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "Action completed successfully")
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -61,11 +62,11 @@ class AnnotationReplyActionApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/annotation-setting")
class AppAnnotationSettingDetailApi(Resource):
@api.doc("get_annotation_setting")
@api.doc(description="Get annotation settings for an app")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Annotation settings retrieved successfully")
@api.response(403, "Insufficient permissions")
@console_ns.doc("get_annotation_setting")
@console_ns.doc(description="Get annotation settings for an app")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Annotation settings retrieved successfully")
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -78,11 +79,11 @@ class AppAnnotationSettingDetailApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/annotation-settings/<uuid:annotation_setting_id>")
class AppAnnotationSettingUpdateApi(Resource):
@api.doc("update_annotation_setting")
@api.doc(description="Update annotation settings for an app")
@api.doc(params={"app_id": "Application ID", "annotation_setting_id": "Annotation setting ID"})
@api.expect(
api.model(
@console_ns.doc("update_annotation_setting")
@console_ns.doc(description="Update annotation settings for an app")
@console_ns.doc(params={"app_id": "Application ID", "annotation_setting_id": "Annotation setting ID"})
@console_ns.expect(
console_ns.model(
"AnnotationSettingUpdateRequest",
{
"score_threshold": fields.Float(required=True, description="Score threshold"),
@ -91,8 +92,8 @@ class AppAnnotationSettingUpdateApi(Resource):
},
)
)
@api.response(200, "Settings updated successfully")
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "Settings updated successfully")
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -110,11 +111,11 @@ class AppAnnotationSettingUpdateApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/annotation-reply/<string:action>/status/<uuid:job_id>")
class AnnotationReplyActionStatusApi(Resource):
@api.doc("get_annotation_reply_action_status")
@api.doc(description="Get status of annotation reply action job")
@api.doc(params={"app_id": "Application ID", "job_id": "Job ID", "action": "Action type"})
@api.response(200, "Job status retrieved successfully")
@api.response(403, "Insufficient permissions")
@console_ns.doc("get_annotation_reply_action_status")
@console_ns.doc(description="Get status of annotation reply action job")
@console_ns.doc(params={"app_id": "Application ID", "job_id": "Job ID", "action": "Action type"})
@console_ns.response(200, "Job status retrieved successfully")
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -138,17 +139,17 @@ class AnnotationReplyActionStatusApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/annotations")
class AnnotationApi(Resource):
@api.doc("list_annotations")
@api.doc(description="Get annotations for an app with pagination")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
@console_ns.doc("list_annotations")
@console_ns.doc(description="Get annotations for an app with pagination")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.parser()
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size")
.add_argument("keyword", type=str, location="args", default="", help="Search keyword")
)
@api.response(200, "Annotations retrieved successfully")
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "Annotations retrieved successfully")
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -169,11 +170,11 @@ class AnnotationApi(Resource):
}
return response, 200
@api.doc("create_annotation")
@api.doc(description="Create a new annotation for an app")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("create_annotation")
@console_ns.doc(description="Create a new annotation for an app")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"CreateAnnotationRequest",
{
"message_id": fields.String(description="Message ID (optional)"),
@ -184,8 +185,8 @@ class AnnotationApi(Resource):
},
)
)
@api.response(201, "Annotation created successfully", annotation_fields)
@api.response(403, "Insufficient permissions")
@console_ns.response(201, "Annotation created successfully", build_annotation_model(console_ns))
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -235,11 +236,15 @@ class AnnotationApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/annotations/export")
class AnnotationExportApi(Resource):
@api.doc("export_annotations")
@api.doc(description="Export all annotations for an app")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Annotations exported successfully", fields.List(fields.Nested(annotation_fields)))
@api.response(403, "Insufficient permissions")
@console_ns.doc("export_annotations")
@console_ns.doc(description="Export all annotations for an app")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(
200,
"Annotations exported successfully",
console_ns.model("AnnotationList", {"data": fields.List(fields.Nested(build_annotation_model(console_ns)))}),
)
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -251,14 +256,22 @@ class AnnotationExportApi(Resource):
return response, 200
parser = (
reqparse.RequestParser()
.add_argument("question", required=True, type=str, location="json")
.add_argument("answer", required=True, type=str, location="json")
)
@console_ns.route("/apps/<uuid:app_id>/annotations/<uuid:annotation_id>")
class AnnotationUpdateDeleteApi(Resource):
@api.doc("update_delete_annotation")
@api.doc(description="Update or delete an annotation")
@api.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"})
@api.response(200, "Annotation updated successfully", annotation_fields)
@api.response(204, "Annotation deleted successfully")
@api.response(403, "Insufficient permissions")
@console_ns.doc("update_delete_annotation")
@console_ns.doc(description="Update or delete an annotation")
@console_ns.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"})
@console_ns.response(200, "Annotation updated successfully", build_annotation_model(console_ns))
@console_ns.response(204, "Annotation deleted successfully")
@console_ns.response(403, "Insufficient permissions")
@console_ns.expect(parser)
@setup_required
@login_required
@account_initialization_required
@ -268,11 +281,6 @@ class AnnotationUpdateDeleteApi(Resource):
def post(self, app_id, annotation_id):
app_id = str(app_id)
annotation_id = str(annotation_id)
parser = (
reqparse.RequestParser()
.add_argument("question", required=True, type=str, location="json")
.add_argument("answer", required=True, type=str, location="json")
)
args = parser.parse_args()
annotation = AppAnnotationService.update_app_annotation_directly(args, app_id, annotation_id)
return annotation
@ -290,12 +298,12 @@ class AnnotationUpdateDeleteApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/annotations/batch-import")
class AnnotationBatchImportApi(Resource):
@api.doc("batch_import_annotations")
@api.doc(description="Batch import annotations from CSV file")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Batch import started successfully")
@api.response(403, "Insufficient permissions")
@api.response(400, "No file uploaded or too many files")
@console_ns.doc("batch_import_annotations")
@console_ns.doc(description="Batch import annotations from CSV file")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Batch import started successfully")
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(400, "No file uploaded or too many files")
@setup_required
@login_required
@account_initialization_required
@ -320,11 +328,11 @@ class AnnotationBatchImportApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/annotations/batch-import-status/<uuid:job_id>")
class AnnotationBatchImportStatusApi(Resource):
@api.doc("get_batch_import_status")
@api.doc(description="Get status of batch import job")
@api.doc(params={"app_id": "Application ID", "job_id": "Job ID"})
@api.response(200, "Job status retrieved successfully")
@api.response(403, "Insufficient permissions")
@console_ns.doc("get_batch_import_status")
@console_ns.doc(description="Get status of batch import job")
@console_ns.doc(params={"app_id": "Application ID", "job_id": "Job ID"})
@console_ns.response(200, "Job status retrieved successfully")
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@ -347,18 +355,27 @@ class AnnotationBatchImportStatusApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/annotations/<uuid:annotation_id>/hit-histories")
class AnnotationHitHistoryListApi(Resource):
@api.doc("list_annotation_hit_histories")
@api.doc(description="Get hit histories for an annotation")
@api.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"})
@api.expect(
api.parser()
@console_ns.doc("list_annotation_hit_histories")
@console_ns.doc(description="Get hit histories for an annotation")
@console_ns.doc(params={"app_id": "Application ID", "annotation_id": "Annotation ID"})
@console_ns.expect(
console_ns.parser()
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size")
)
@api.response(
200, "Hit histories retrieved successfully", fields.List(fields.Nested(annotation_hit_history_fields))
@console_ns.response(
200,
"Hit histories retrieved successfully",
console_ns.model(
"AnnotationHitHistoryList",
{
"data": fields.List(
fields.Nested(console_ns.model("AnnotationHitHistoryItem", annotation_hit_history_fields))
)
},
),
)
@api.response(403, "Insufficient permissions")
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required

View File

@ -3,23 +3,33 @@ import uuid
from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import BadRequest, Forbidden, abort
from werkzeug.exceptions import BadRequest, abort
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
edit_permission_required,
enterprise_license_required,
is_admin_or_owner_required,
setup_required,
)
from core.ops.ops_trace_manager import OpsTraceManager
from core.workflow.enums import NodeType
from extensions.ext_database import db
from fields.app_fields import app_detail_fields, app_detail_fields_with_site, app_pagination_fields
from fields.app_fields import (
deleted_tool_fields,
model_config_fields,
model_config_partial_fields,
site_fields,
tag_fields,
)
from fields.workflow_fields import workflow_partial_fields as _workflow_partial_fields_dict
from libs.helper import AppIconUrlField, TimestampField
from libs.login import current_account_with_tenant, login_required
from libs.validators import validate_description_length
from models import App
from models import App, Workflow
from services.app_dsl_service import AppDslService, ImportMode
from services.app_service import AppService
from services.enterprise.enterprise_service import EnterpriseService
@ -27,13 +37,118 @@ from services.feature_service import FeatureService
ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"]
# Register models for flask_restx to avoid dict type issues in Swagger
# Register base models first
tag_model = console_ns.model("Tag", tag_fields)
workflow_partial_model = console_ns.model("WorkflowPartial", _workflow_partial_fields_dict)
model_config_model = console_ns.model("ModelConfig", model_config_fields)
model_config_partial_model = console_ns.model("ModelConfigPartial", model_config_partial_fields)
deleted_tool_model = console_ns.model("DeletedTool", deleted_tool_fields)
site_model = console_ns.model("Site", site_fields)
app_partial_model = console_ns.model(
"AppPartial",
{
"id": fields.String,
"name": fields.String,
"max_active_requests": fields.Raw(),
"description": fields.String(attribute="desc_or_prompt"),
"mode": fields.String(attribute="mode_compatible_with_agent"),
"icon_type": fields.String,
"icon": fields.String,
"icon_background": fields.String,
"icon_url": AppIconUrlField,
"model_config": fields.Nested(model_config_partial_model, attribute="app_model_config", allow_null=True),
"workflow": fields.Nested(workflow_partial_model, allow_null=True),
"use_icon_as_answer_icon": fields.Boolean,
"created_by": fields.String,
"created_at": TimestampField,
"updated_by": fields.String,
"updated_at": TimestampField,
"tags": fields.List(fields.Nested(tag_model)),
"access_mode": fields.String,
"create_user_name": fields.String,
"author_name": fields.String,
"has_draft_trigger": fields.Boolean,
},
)
app_detail_model = console_ns.model(
"AppDetail",
{
"id": fields.String,
"name": fields.String,
"description": fields.String,
"mode": fields.String(attribute="mode_compatible_with_agent"),
"icon": fields.String,
"icon_background": fields.String,
"enable_site": fields.Boolean,
"enable_api": fields.Boolean,
"model_config": fields.Nested(model_config_model, attribute="app_model_config", allow_null=True),
"workflow": fields.Nested(workflow_partial_model, allow_null=True),
"tracing": fields.Raw,
"use_icon_as_answer_icon": fields.Boolean,
"created_by": fields.String,
"created_at": TimestampField,
"updated_by": fields.String,
"updated_at": TimestampField,
"access_mode": fields.String,
"tags": fields.List(fields.Nested(tag_model)),
},
)
app_detail_with_site_model = console_ns.model(
"AppDetailWithSite",
{
"id": fields.String,
"name": fields.String,
"description": fields.String,
"mode": fields.String(attribute="mode_compatible_with_agent"),
"icon_type": fields.String,
"icon": fields.String,
"icon_background": fields.String,
"icon_url": AppIconUrlField,
"enable_site": fields.Boolean,
"enable_api": fields.Boolean,
"model_config": fields.Nested(model_config_model, attribute="app_model_config", allow_null=True),
"workflow": fields.Nested(workflow_partial_model, allow_null=True),
"api_base_url": fields.String,
"use_icon_as_answer_icon": fields.Boolean,
"max_active_requests": fields.Integer,
"created_by": fields.String,
"created_at": TimestampField,
"updated_by": fields.String,
"updated_at": TimestampField,
"deleted_tools": fields.List(fields.Nested(deleted_tool_model)),
"access_mode": fields.String,
"tags": fields.List(fields.Nested(tag_model)),
"site": fields.Nested(site_model),
},
)
app_pagination_model = console_ns.model(
"AppPagination",
{
"page": fields.Integer,
"limit": fields.Integer(attribute="per_page"),
"total": fields.Integer,
"has_more": fields.Boolean(attribute="has_next"),
"data": fields.List(fields.Nested(app_partial_model), attribute="items"),
},
)
@console_ns.route("/apps")
class AppListApi(Resource):
@api.doc("list_apps")
@api.doc(description="Get list of applications with pagination and filtering")
@api.expect(
api.parser()
@console_ns.doc("list_apps")
@console_ns.doc(description="Get list of applications with pagination and filtering")
@console_ns.expect(
console_ns.parser()
.add_argument("page", type=int, location="args", help="Page number (1-99999)", default=1)
.add_argument("limit", type=int, location="args", help="Page size (1-100)", default=20)
.add_argument(
@ -48,7 +163,7 @@ class AppListApi(Resource):
.add_argument("tag_ids", type=str, location="args", help="Comma-separated tag IDs")
.add_argument("is_created_by_me", type=bool, location="args", help="Filter by creator")
)
@api.response(200, "Success", app_pagination_fields)
@console_ns.response(200, "Success", app_pagination_model)
@setup_required
@login_required
@account_initialization_required
@ -106,12 +221,41 @@ class AppListApi(Resource):
if str(app.id) in res:
app.access_mode = res[str(app.id)].access_mode
return marshal(app_pagination, app_pagination_fields), 200
workflow_capable_app_ids = [
str(app.id) for app in app_pagination.items if app.mode in {"workflow", "advanced-chat"}
]
draft_trigger_app_ids: set[str] = set()
if workflow_capable_app_ids:
draft_workflows = (
db.session.execute(
select(Workflow).where(
Workflow.version == Workflow.VERSION_DRAFT,
Workflow.app_id.in_(workflow_capable_app_ids),
)
)
.scalars()
.all()
)
trigger_node_types = {
NodeType.TRIGGER_WEBHOOK,
NodeType.TRIGGER_SCHEDULE,
NodeType.TRIGGER_PLUGIN,
}
for workflow in draft_workflows:
for _, node_data in workflow.walk_nodes():
if node_data.get("type") in trigger_node_types:
draft_trigger_app_ids.add(str(workflow.app_id))
break
@api.doc("create_app")
@api.doc(description="Create a new application")
@api.expect(
api.model(
for app in app_pagination.items:
app.has_draft_trigger = str(app.id) in draft_trigger_app_ids
return marshal(app_pagination, app_pagination_model), 200
@console_ns.doc("create_app")
@console_ns.doc(description="Create a new application")
@console_ns.expect(
console_ns.model(
"CreateAppRequest",
{
"name": fields.String(required=True, description="App name"),
@ -123,13 +267,13 @@ class AppListApi(Resource):
},
)
)
@api.response(201, "App created successfully", app_detail_fields)
@api.response(403, "Insufficient permissions")
@api.response(400, "Invalid request parameters")
@console_ns.response(201, "App created successfully", app_detail_model)
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_detail_fields)
@marshal_with(app_detail_model)
@cloud_edition_billing_resource_check("apps")
@edit_permission_required
def post(self):
@ -157,16 +301,16 @@ class AppListApi(Resource):
@console_ns.route("/apps/<uuid:app_id>")
class AppApi(Resource):
@api.doc("get_app_detail")
@api.doc(description="Get application details")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Success", app_detail_fields_with_site)
@console_ns.doc("get_app_detail")
@console_ns.doc(description="Get application details")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Success", app_detail_with_site_model)
@setup_required
@login_required
@account_initialization_required
@enterprise_license_required
@get_app_model
@marshal_with(app_detail_fields_with_site)
@marshal_with(app_detail_with_site_model)
def get(self, app_model):
"""Get app detail"""
app_service = AppService()
@ -179,11 +323,11 @@ class AppApi(Resource):
return app_model
@api.doc("update_app")
@api.doc(description="Update application details")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("update_app")
@console_ns.doc(description="Update application details")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"UpdateAppRequest",
{
"name": fields.String(required=True, description="App name"),
@ -196,15 +340,15 @@ class AppApi(Resource):
},
)
)
@api.response(200, "App updated successfully", app_detail_fields_with_site)
@api.response(403, "Insufficient permissions")
@api.response(400, "Invalid request parameters")
@console_ns.response(200, "App updated successfully", app_detail_with_site_model)
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@get_app_model
@edit_permission_required
@marshal_with(app_detail_fields_with_site)
@marshal_with(app_detail_with_site_model)
def put(self, app_model):
"""Update app"""
parser = (
@ -220,10 +364,8 @@ class AppApi(Resource):
args = parser.parse_args()
app_service = AppService()
# Construct ArgsDict from parsed arguments
from services.app_service import AppService as AppServiceType
args_dict: AppServiceType.ArgsDict = {
args_dict: AppService.ArgsDict = {
"name": args["name"],
"description": args.get("description", ""),
"icon_type": args.get("icon_type", ""),
@ -236,11 +378,11 @@ class AppApi(Resource):
return app_model
@api.doc("delete_app")
@api.doc(description="Delete application")
@api.doc(params={"app_id": "Application ID"})
@api.response(204, "App deleted successfully")
@api.response(403, "Insufficient permissions")
@console_ns.doc("delete_app")
@console_ns.doc(description="Delete application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(204, "App deleted successfully")
@console_ns.response(403, "Insufficient permissions")
@get_app_model
@setup_required
@login_required
@ -256,11 +398,11 @@ class AppApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/copy")
class AppCopyApi(Resource):
@api.doc("copy_app")
@api.doc(description="Create a copy of an existing application")
@api.doc(params={"app_id": "Application ID to copy"})
@api.expect(
api.model(
@console_ns.doc("copy_app")
@console_ns.doc(description="Create a copy of an existing application")
@console_ns.doc(params={"app_id": "Application ID to copy"})
@console_ns.expect(
console_ns.model(
"CopyAppRequest",
{
"name": fields.String(description="Name for the copied app"),
@ -271,14 +413,14 @@ class AppCopyApi(Resource):
},
)
)
@api.response(201, "App copied successfully", app_detail_fields_with_site)
@api.response(403, "Insufficient permissions")
@console_ns.response(201, "App copied successfully", app_detail_with_site_model)
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@get_app_model
@edit_permission_required
@marshal_with(app_detail_fields_with_site)
@marshal_with(app_detail_with_site_model)
def post(self, app_model):
"""Copy app"""
# The role of the current user in the ta table must be admin, owner, or editor
@ -317,20 +459,20 @@ class AppCopyApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/export")
class AppExportApi(Resource):
@api.doc("export_app")
@api.doc(description="Export application configuration as DSL")
@api.doc(params={"app_id": "Application ID to export"})
@api.expect(
api.parser()
@console_ns.doc("export_app")
@console_ns.doc(description="Export application configuration as DSL")
@console_ns.doc(params={"app_id": "Application ID to export"})
@console_ns.expect(
console_ns.parser()
.add_argument("include_secret", type=bool, location="args", default=False, help="Include secrets in export")
.add_argument("workflow_id", type=str, location="args", help="Specific workflow ID to export")
)
@api.response(
@console_ns.response(
200,
"App exported successfully",
api.model("AppExportResponse", {"data": fields.String(description="DSL export data")}),
console_ns.model("AppExportResponse", {"data": fields.String(description="DSL export data")}),
)
@api.response(403, "Insufficient permissions")
@console_ns.response(403, "Insufficient permissions")
@get_app_model
@setup_required
@login_required
@ -353,21 +495,23 @@ class AppExportApi(Resource):
}
parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json", help="Name to check")
@console_ns.route("/apps/<uuid:app_id>/name")
class AppNameApi(Resource):
@api.doc("check_app_name")
@api.doc(description="Check if app name is available")
@api.doc(params={"app_id": "Application ID"})
@api.expect(api.parser().add_argument("name", type=str, required=True, location="args", help="Name to check"))
@api.response(200, "Name availability checked")
@console_ns.doc("check_app_name")
@console_ns.doc(description="Check if app name is available")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(parser)
@console_ns.response(200, "Name availability checked")
@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_detail_fields)
@marshal_with(app_detail_model)
@edit_permission_required
def post(self, app_model):
parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json")
args = parser.parse_args()
app_service = AppService()
@ -378,11 +522,11 @@ class AppNameApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/icon")
class AppIconApi(Resource):
@api.doc("update_app_icon")
@api.doc(description="Update application icon")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("update_app_icon")
@console_ns.doc(description="Update application icon")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"AppIconRequest",
{
"icon": fields.String(required=True, description="Icon data"),
@ -391,13 +535,13 @@ class AppIconApi(Resource):
},
)
)
@api.response(200, "Icon updated successfully")
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "Icon updated successfully")
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_detail_fields)
@marshal_with(app_detail_model)
@edit_permission_required
def post(self, app_model):
parser = (
@ -415,21 +559,21 @@ class AppIconApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/site-enable")
class AppSiteStatus(Resource):
@api.doc("update_app_site_status")
@api.doc(description="Enable or disable app site")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("update_app_site_status")
@console_ns.doc(description="Enable or disable app site")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"AppSiteStatusRequest", {"enable_site": fields.Boolean(required=True, description="Enable or disable site")}
)
)
@api.response(200, "Site status updated successfully", app_detail_fields)
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "Site status updated successfully", app_detail_model)
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_detail_fields)
@marshal_with(app_detail_model)
@edit_permission_required
def post(self, app_model):
parser = reqparse.RequestParser().add_argument("enable_site", type=bool, required=True, location="json")
@ -443,27 +587,23 @@ class AppSiteStatus(Resource):
@console_ns.route("/apps/<uuid:app_id>/api-enable")
class AppApiStatus(Resource):
@api.doc("update_app_api_status")
@api.doc(description="Enable or disable app API")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("update_app_api_status")
@console_ns.doc(description="Enable or disable app API")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"AppApiStatusRequest", {"enable_api": fields.Boolean(required=True, description="Enable or disable API")}
)
)
@api.response(200, "API status updated successfully", app_detail_fields)
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "API status updated successfully", app_detail_model)
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@is_admin_or_owner_required
@account_initialization_required
@get_app_model
@marshal_with(app_detail_fields)
@marshal_with(app_detail_model)
def post(self, app_model):
# The role of the current user in the ta table must be admin or owner
current_user, _ = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
parser = reqparse.RequestParser().add_argument("enable_api", type=bool, required=True, location="json")
args = parser.parse_args()
@ -475,10 +615,10 @@ class AppApiStatus(Resource):
@console_ns.route("/apps/<uuid:app_id>/trace")
class AppTraceApi(Resource):
@api.doc("get_app_trace")
@api.doc(description="Get app tracing configuration")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Trace configuration retrieved successfully")
@console_ns.doc("get_app_trace")
@console_ns.doc(description="Get app tracing configuration")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Trace configuration retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -488,11 +628,11 @@ class AppTraceApi(Resource):
return app_trace_config
@api.doc("update_app_trace")
@api.doc(description="Update app tracing configuration")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("update_app_trace")
@console_ns.doc(description="Update app tracing configuration")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"AppTraceRequest",
{
"enabled": fields.Boolean(required=True, description="Enable or disable tracing"),
@ -500,8 +640,8 @@ class AppTraceApi(Resource):
},
)
)
@api.response(200, "Trace configuration updated successfully")
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "Trace configuration updated successfully")
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required

View File

@ -1,4 +1,4 @@
from flask_restx import Resource, marshal_with, reqparse
from flask_restx import Resource, fields, marshal_with, reqparse
from sqlalchemy.orm import Session
from controllers.console.app.wraps import get_app_model
@ -9,7 +9,11 @@ from controllers.console.wraps import (
setup_required,
)
from extensions.ext_database import db
from fields.app_fields import app_import_check_dependencies_fields, app_import_fields
from fields.app_fields import (
app_import_check_dependencies_fields,
app_import_fields,
leaked_dependency_fields,
)
from libs.login import current_account_with_tenant, login_required
from models.model import App
from services.app_dsl_service import AppDslService, ImportStatus
@ -18,30 +22,45 @@ from services.feature_service import FeatureService
from .. import console_ns
# Register models for flask_restx to avoid dict type issues in Swagger
# Register base model first
leaked_dependency_model = console_ns.model("LeakedDependency", leaked_dependency_fields)
app_import_model = console_ns.model("AppImport", app_import_fields)
# For nested models, need to replace nested dict with registered model
app_import_check_dependencies_fields_copy = app_import_check_dependencies_fields.copy()
app_import_check_dependencies_fields_copy["leaked_dependencies"] = fields.List(fields.Nested(leaked_dependency_model))
app_import_check_dependencies_model = console_ns.model(
"AppImportCheckDependencies", app_import_check_dependencies_fields_copy
)
parser = (
reqparse.RequestParser()
.add_argument("mode", type=str, required=True, location="json")
.add_argument("yaml_content", type=str, location="json")
.add_argument("yaml_url", type=str, location="json")
.add_argument("name", type=str, location="json")
.add_argument("description", type=str, location="json")
.add_argument("icon_type", type=str, location="json")
.add_argument("icon", type=str, location="json")
.add_argument("icon_background", type=str, location="json")
.add_argument("app_id", type=str, location="json")
)
@console_ns.route("/apps/imports")
class AppImportApi(Resource):
@console_ns.expect(parser)
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_import_fields)
@marshal_with(app_import_model)
@cloud_edition_billing_resource_check("apps")
@edit_permission_required
def post(self):
# Check user role first
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("mode", type=str, required=True, location="json")
.add_argument("yaml_content", type=str, location="json")
.add_argument("yaml_url", type=str, location="json")
.add_argument("name", type=str, location="json")
.add_argument("description", type=str, location="json")
.add_argument("icon_type", type=str, location="json")
.add_argument("icon", type=str, location="json")
.add_argument("icon_background", type=str, location="json")
.add_argument("app_id", type=str, location="json")
)
args = parser.parse_args()
# Create service with session
@ -79,7 +98,7 @@ class AppImportConfirmApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_import_fields)
@marshal_with(app_import_model)
@edit_permission_required
def post(self, import_id):
# Check user role first
@ -105,7 +124,7 @@ class AppImportCheckDependenciesApi(Resource):
@login_required
@get_app_model
@account_initialization_required
@marshal_with(app_import_check_dependencies_fields)
@marshal_with(app_import_check_dependencies_model)
@edit_permission_required
def get(self, app_model: App):
with Session(db.engine) as session:

View File

@ -5,7 +5,7 @@ from flask_restx import Resource, fields, reqparse
from werkzeug.exceptions import InternalServerError
import services
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.error import (
AppUnavailableError,
AudioTooLargeError,
@ -36,16 +36,16 @@ logger = logging.getLogger(__name__)
@console_ns.route("/apps/<uuid:app_id>/audio-to-text")
class ChatMessageAudioApi(Resource):
@api.doc("chat_message_audio_transcript")
@api.doc(description="Transcript audio to text for chat messages")
@api.doc(params={"app_id": "App ID"})
@api.response(
@console_ns.doc("chat_message_audio_transcript")
@console_ns.doc(description="Transcript audio to text for chat messages")
@console_ns.doc(params={"app_id": "App ID"})
@console_ns.response(
200,
"Audio transcription successful",
api.model("AudioTranscriptResponse", {"text": fields.String(description="Transcribed text from audio")}),
console_ns.model("AudioTranscriptResponse", {"text": fields.String(description="Transcribed text from audio")}),
)
@api.response(400, "Bad request - No audio uploaded or unsupported type")
@api.response(413, "Audio file too large")
@console_ns.response(400, "Bad request - No audio uploaded or unsupported type")
@console_ns.response(413, "Audio file too large")
@setup_required
@login_required
@account_initialization_required
@ -89,11 +89,11 @@ class ChatMessageAudioApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/text-to-audio")
class ChatMessageTextApi(Resource):
@api.doc("chat_message_text_to_speech")
@api.doc(description="Convert text to speech for chat messages")
@api.doc(params={"app_id": "App ID"})
@api.expect(
api.model(
@console_ns.doc("chat_message_text_to_speech")
@console_ns.doc(description="Convert text to speech for chat messages")
@console_ns.doc(params={"app_id": "App ID"})
@console_ns.expect(
console_ns.model(
"TextToSpeechRequest",
{
"message_id": fields.String(description="Message ID"),
@ -103,8 +103,8 @@ class ChatMessageTextApi(Resource):
},
)
)
@api.response(200, "Text to speech conversion successful")
@api.response(400, "Bad request - Invalid parameters")
@console_ns.response(200, "Text to speech conversion successful")
@console_ns.response(400, "Bad request - Invalid parameters")
@get_app_model
@setup_required
@login_required
@ -156,12 +156,16 @@ class ChatMessageTextApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/text-to-audio/voices")
class TextModesApi(Resource):
@api.doc("get_text_to_speech_voices")
@api.doc(description="Get available TTS voices for a specific language")
@api.doc(params={"app_id": "App ID"})
@api.expect(api.parser().add_argument("language", type=str, required=True, location="args", help="Language code"))
@api.response(200, "TTS voices retrieved successfully", fields.List(fields.Raw(description="Available voices")))
@api.response(400, "Invalid language parameter")
@console_ns.doc("get_text_to_speech_voices")
@console_ns.doc(description="Get available TTS voices for a specific language")
@console_ns.doc(params={"app_id": "App ID"})
@console_ns.expect(
console_ns.parser().add_argument("language", type=str, required=True, location="args", help="Language code")
)
@console_ns.response(
200, "TTS voices retrieved successfully", fields.List(fields.Raw(description="Available voices"))
)
@console_ns.response(400, "Invalid language parameter")
@get_app_model
@setup_required
@login_required

View File

@ -5,7 +5,7 @@ from flask_restx import Resource, fields, reqparse
from werkzeug.exceptions import InternalServerError, NotFound
import services
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.error import (
AppUnavailableError,
CompletionRequestError,
@ -17,7 +17,6 @@ from controllers.console.app.error import (
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.errors.error import (
ModelCurrentlyNotSupportError,
@ -32,6 +31,7 @@ from libs.login import current_user, login_required
from models import Account
from models.model import AppMode
from services.app_generate_service import AppGenerateService
from services.app_task_service import AppTaskService
from services.errors.llm import InvokeRateLimitError
logger = logging.getLogger(__name__)
@ -40,11 +40,11 @@ logger = logging.getLogger(__name__)
# define completion message api for user
@console_ns.route("/apps/<uuid:app_id>/completion-messages")
class CompletionMessageApi(Resource):
@api.doc("create_completion_message")
@api.doc(description="Generate completion message for debugging")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("create_completion_message")
@console_ns.doc(description="Generate completion message for debugging")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"CompletionMessageRequest",
{
"inputs": fields.Raw(required=True, description="Input variables"),
@ -56,9 +56,9 @@ class CompletionMessageApi(Resource):
},
)
)
@api.response(200, "Completion generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(404, "App not found")
@console_ns.response(200, "Completion generated successfully")
@console_ns.response(400, "Invalid request parameters")
@console_ns.response(404, "App not found")
@setup_required
@login_required
@account_initialization_required
@ -110,10 +110,10 @@ class CompletionMessageApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/completion-messages/<string:task_id>/stop")
class CompletionMessageStopApi(Resource):
@api.doc("stop_completion_message")
@api.doc(description="Stop a running completion message generation")
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"})
@api.response(200, "Task stopped successfully")
@console_ns.doc("stop_completion_message")
@console_ns.doc(description="Stop a running completion message generation")
@console_ns.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"})
@console_ns.response(200, "Task stopped successfully")
@setup_required
@login_required
@account_initialization_required
@ -121,18 +121,24 @@ class CompletionMessageStopApi(Resource):
def post(self, app_model, task_id):
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
AppTaskService.stop_task(
task_id=task_id,
invoke_from=InvokeFrom.DEBUGGER,
user_id=current_user.id,
app_mode=AppMode.value_of(app_model.mode),
)
return {"result": "success"}, 200
@console_ns.route("/apps/<uuid:app_id>/chat-messages")
class ChatMessageApi(Resource):
@api.doc("create_chat_message")
@api.doc(description="Generate chat message for debugging")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("create_chat_message")
@console_ns.doc(description="Generate chat message for debugging")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"ChatMessageRequest",
{
"inputs": fields.Raw(required=True, description="Input variables"),
@ -146,9 +152,9 @@ class ChatMessageApi(Resource):
},
)
)
@api.response(200, "Chat message generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(404, "App or conversation not found")
@console_ns.response(200, "Chat message generated successfully")
@console_ns.response(400, "Invalid request parameters")
@console_ns.response(404, "App or conversation not found")
@setup_required
@login_required
@account_initialization_required
@ -209,10 +215,10 @@ class ChatMessageApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/chat-messages/<string:task_id>/stop")
class ChatMessageStopApi(Resource):
@api.doc("stop_chat_message")
@api.doc(description="Stop a running chat message generation")
@api.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"})
@api.response(200, "Task stopped successfully")
@console_ns.doc("stop_chat_message")
@console_ns.doc(description="Stop a running chat message generation")
@console_ns.doc(params={"app_id": "Application ID", "task_id": "Task ID to stop"})
@console_ns.response(200, "Task stopped successfully")
@setup_required
@login_required
@account_initialization_required
@ -220,6 +226,12 @@ class ChatMessageStopApi(Resource):
def post(self, app_model, task_id):
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
AppTaskService.stop_task(
task_id=task_id,
invoke_from=InvokeFrom.DEBUGGER,
user_id=current_user.id,
app_mode=AppMode.value_of(app_model.mode),
)
return {"result": "success"}, 200

View File

@ -1,38 +1,290 @@
import sqlalchemy as sa
from flask import abort
from flask_restx import Resource, marshal_with, reqparse
from flask_restx import Resource, fields, marshal_with, reqparse
from flask_restx.inputs import int_range
from sqlalchemy import func, or_
from sqlalchemy.orm import joinedload
from werkzeug.exceptions import NotFound
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from core.app.entities.app_invoke_entities import InvokeFrom
from extensions.ext_database import db
from fields.conversation_fields import (
conversation_detail_fields,
conversation_message_detail_fields,
conversation_pagination_fields,
conversation_with_summary_pagination_fields,
)
from fields.conversation_fields import MessageTextField
from fields.raws import FilesContainedField
from libs.datetime_utils import naive_utc_now, parse_time_range
from libs.helper import DatetimeString
from libs.helper import DatetimeString, TimestampField
from libs.login import current_account_with_tenant, login_required
from models import Conversation, EndUser, Message, MessageAnnotation
from models.model import AppMode
from services.conversation_service import ConversationService
from services.errors.conversation import ConversationNotExistsError
# Register models for flask_restx to avoid dict type issues in Swagger
# Register in dependency order: base models first, then dependent models
# Base models
simple_account_model = console_ns.model(
"SimpleAccount",
{
"id": fields.String,
"name": fields.String,
"email": fields.String,
},
)
feedback_stat_model = console_ns.model(
"FeedbackStat",
{
"like": fields.Integer,
"dislike": fields.Integer,
},
)
status_count_model = console_ns.model(
"StatusCount",
{
"success": fields.Integer,
"failed": fields.Integer,
"partial_success": fields.Integer,
},
)
message_file_model = console_ns.model(
"MessageFile",
{
"id": fields.String,
"filename": fields.String,
"type": fields.String,
"url": fields.String,
"mime_type": fields.String,
"size": fields.Integer,
"transfer_method": fields.String,
"belongs_to": fields.String(default="user"),
"upload_file_id": fields.String(default=None),
},
)
agent_thought_model = console_ns.model(
"AgentThought",
{
"id": fields.String,
"chain_id": fields.String,
"message_id": fields.String,
"position": fields.Integer,
"thought": fields.String,
"tool": fields.String,
"tool_labels": fields.Raw,
"tool_input": fields.String,
"created_at": TimestampField,
"observation": fields.String,
"files": fields.List(fields.String),
},
)
simple_model_config_model = console_ns.model(
"SimpleModelConfig",
{
"model": fields.Raw(attribute="model_dict"),
"pre_prompt": fields.String,
},
)
model_config_model = console_ns.model(
"ModelConfig",
{
"opening_statement": fields.String,
"suggested_questions": fields.Raw,
"model": fields.Raw,
"user_input_form": fields.Raw,
"pre_prompt": fields.String,
"agent_mode": fields.Raw,
},
)
# Models that depend on simple_account_model
feedback_model = console_ns.model(
"Feedback",
{
"rating": fields.String,
"content": fields.String,
"from_source": fields.String,
"from_end_user_id": fields.String,
"from_account": fields.Nested(simple_account_model, allow_null=True),
},
)
annotation_model = console_ns.model(
"Annotation",
{
"id": fields.String,
"question": fields.String,
"content": fields.String,
"account": fields.Nested(simple_account_model, allow_null=True),
"created_at": TimestampField,
},
)
annotation_hit_history_model = console_ns.model(
"AnnotationHitHistory",
{
"annotation_id": fields.String(attribute="id"),
"annotation_create_account": fields.Nested(simple_account_model, allow_null=True),
"created_at": TimestampField,
},
)
# Simple message detail model
simple_message_detail_model = console_ns.model(
"SimpleMessageDetail",
{
"inputs": FilesContainedField,
"query": fields.String,
"message": MessageTextField,
"answer": fields.String,
},
)
# Message detail model that depends on multiple models
message_detail_model = console_ns.model(
"MessageDetail",
{
"id": fields.String,
"conversation_id": fields.String,
"inputs": FilesContainedField,
"query": fields.String,
"message": fields.Raw,
"message_tokens": fields.Integer,
"answer": fields.String(attribute="re_sign_file_url_answer"),
"answer_tokens": fields.Integer,
"provider_response_latency": fields.Float,
"from_source": fields.String,
"from_end_user_id": fields.String,
"from_account_id": fields.String,
"feedbacks": fields.List(fields.Nested(feedback_model)),
"workflow_run_id": fields.String,
"annotation": fields.Nested(annotation_model, allow_null=True),
"annotation_hit_history": fields.Nested(annotation_hit_history_model, allow_null=True),
"created_at": TimestampField,
"agent_thoughts": fields.List(fields.Nested(agent_thought_model)),
"message_files": fields.List(fields.Nested(message_file_model)),
"metadata": fields.Raw(attribute="message_metadata_dict"),
"status": fields.String,
"error": fields.String,
"parent_message_id": fields.String,
},
)
# Conversation models
conversation_fields_model = console_ns.model(
"Conversation",
{
"id": fields.String,
"status": fields.String,
"from_source": fields.String,
"from_end_user_id": fields.String,
"from_end_user_session_id": fields.String(),
"from_account_id": fields.String,
"from_account_name": fields.String,
"read_at": TimestampField,
"created_at": TimestampField,
"updated_at": TimestampField,
"annotation": fields.Nested(annotation_model, allow_null=True),
"model_config": fields.Nested(simple_model_config_model),
"user_feedback_stats": fields.Nested(feedback_stat_model),
"admin_feedback_stats": fields.Nested(feedback_stat_model),
"message": fields.Nested(simple_message_detail_model, attribute="first_message"),
},
)
conversation_pagination_model = console_ns.model(
"ConversationPagination",
{
"page": fields.Integer,
"limit": fields.Integer(attribute="per_page"),
"total": fields.Integer,
"has_more": fields.Boolean(attribute="has_next"),
"data": fields.List(fields.Nested(conversation_fields_model), attribute="items"),
},
)
conversation_message_detail_model = console_ns.model(
"ConversationMessageDetail",
{
"id": fields.String,
"status": fields.String,
"from_source": fields.String,
"from_end_user_id": fields.String,
"from_account_id": fields.String,
"created_at": TimestampField,
"model_config": fields.Nested(model_config_model),
"message": fields.Nested(message_detail_model, attribute="first_message"),
},
)
conversation_with_summary_model = console_ns.model(
"ConversationWithSummary",
{
"id": fields.String,
"status": fields.String,
"from_source": fields.String,
"from_end_user_id": fields.String,
"from_end_user_session_id": fields.String,
"from_account_id": fields.String,
"from_account_name": fields.String,
"name": fields.String,
"summary": fields.String(attribute="summary_or_query"),
"read_at": TimestampField,
"created_at": TimestampField,
"updated_at": TimestampField,
"annotated": fields.Boolean,
"model_config": fields.Nested(simple_model_config_model),
"message_count": fields.Integer,
"user_feedback_stats": fields.Nested(feedback_stat_model),
"admin_feedback_stats": fields.Nested(feedback_stat_model),
"status_count": fields.Nested(status_count_model),
},
)
conversation_with_summary_pagination_model = console_ns.model(
"ConversationWithSummaryPagination",
{
"page": fields.Integer,
"limit": fields.Integer(attribute="per_page"),
"total": fields.Integer,
"has_more": fields.Boolean(attribute="has_next"),
"data": fields.List(fields.Nested(conversation_with_summary_model), attribute="items"),
},
)
conversation_detail_model = console_ns.model(
"ConversationDetail",
{
"id": fields.String,
"status": fields.String,
"from_source": fields.String,
"from_end_user_id": fields.String,
"from_account_id": fields.String,
"created_at": TimestampField,
"updated_at": TimestampField,
"annotated": fields.Boolean,
"introduction": fields.String,
"model_config": fields.Nested(model_config_model),
"message_count": fields.Integer,
"user_feedback_stats": fields.Nested(feedback_stat_model),
"admin_feedback_stats": fields.Nested(feedback_stat_model),
},
)
@console_ns.route("/apps/<uuid:app_id>/completion-conversations")
class CompletionConversationApi(Resource):
@api.doc("list_completion_conversations")
@api.doc(description="Get completion conversations with pagination and filtering")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
@console_ns.doc("list_completion_conversations")
@console_ns.doc(description="Get completion conversations with pagination and filtering")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.parser()
.add_argument("keyword", type=str, location="args", help="Search keyword")
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
@ -47,13 +299,13 @@ class CompletionConversationApi(Resource):
.add_argument("page", type=int, location="args", default=1, help="Page number")
.add_argument("limit", type=int, location="args", default=20, help="Page size (1-100)")
)
@api.response(200, "Success", conversation_pagination_fields)
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "Success", conversation_pagination_model)
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_pagination_fields)
@marshal_with(conversation_pagination_model)
@edit_permission_required
def get(self, app_model):
current_user, _ = current_account_with_tenant()
@ -122,29 +374,29 @@ class CompletionConversationApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/completion-conversations/<uuid:conversation_id>")
class CompletionConversationDetailApi(Resource):
@api.doc("get_completion_conversation")
@api.doc(description="Get completion conversation details with messages")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(200, "Success", conversation_message_detail_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@console_ns.doc("get_completion_conversation")
@console_ns.doc(description="Get completion conversation details with messages")
@console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@console_ns.response(200, "Success", conversation_message_detail_model)
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(404, "Conversation not found")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.COMPLETION)
@marshal_with(conversation_message_detail_fields)
@marshal_with(conversation_message_detail_model)
@edit_permission_required
def get(self, app_model, conversation_id):
conversation_id = str(conversation_id)
return _get_conversation(app_model, conversation_id)
@api.doc("delete_completion_conversation")
@api.doc(description="Delete a completion conversation")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(204, "Conversation deleted successfully")
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@console_ns.doc("delete_completion_conversation")
@console_ns.doc(description="Delete a completion conversation")
@console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@console_ns.response(204, "Conversation deleted successfully")
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(404, "Conversation not found")
@setup_required
@login_required
@account_initialization_required
@ -164,11 +416,11 @@ class CompletionConversationDetailApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/chat-conversations")
class ChatConversationApi(Resource):
@api.doc("list_chat_conversations")
@api.doc(description="Get chat conversations with pagination, filtering and summary")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
@console_ns.doc("list_chat_conversations")
@console_ns.doc(description="Get chat conversations with pagination, filtering and summary")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.parser()
.add_argument("keyword", type=str, location="args", help="Search keyword")
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
@ -192,13 +444,13 @@ class ChatConversationApi(Resource):
help="Sort field and direction",
)
)
@api.response(200, "Success", conversation_with_summary_pagination_fields)
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "Success", conversation_with_summary_pagination_model)
@console_ns.response(403, "Insufficient permissions")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_with_summary_pagination_fields)
@marshal_with(conversation_with_summary_pagination_model)
@edit_permission_required
def get(self, app_model):
current_user, _ = current_account_with_tenant()
@ -322,29 +574,29 @@ class ChatConversationApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/chat-conversations/<uuid:conversation_id>")
class ChatConversationDetailApi(Resource):
@api.doc("get_chat_conversation")
@api.doc(description="Get chat conversation details")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(200, "Success", conversation_detail_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@console_ns.doc("get_chat_conversation")
@console_ns.doc(description="Get chat conversation details")
@console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@console_ns.response(200, "Success", conversation_detail_model)
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(404, "Conversation not found")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(conversation_detail_fields)
@marshal_with(conversation_detail_model)
@edit_permission_required
def get(self, app_model, conversation_id):
conversation_id = str(conversation_id)
return _get_conversation(app_model, conversation_id)
@api.doc("delete_chat_conversation")
@api.doc(description="Delete a chat conversation")
@api.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@api.response(204, "Conversation deleted successfully")
@api.response(403, "Insufficient permissions")
@api.response(404, "Conversation not found")
@console_ns.doc("delete_chat_conversation")
@console_ns.doc(description="Delete a chat conversation")
@console_ns.doc(params={"app_id": "Application ID", "conversation_id": "Conversation ID"})
@console_ns.response(204, "Conversation deleted successfully")
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(404, "Conversation not found")
@setup_required
@login_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])

View File

@ -1,33 +1,49 @@
from flask_restx import Resource, marshal_with, reqparse
from flask_restx import Resource, fields, marshal_with, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from fields.conversation_variable_fields import paginated_conversation_variable_fields
from fields.conversation_variable_fields import (
conversation_variable_fields,
paginated_conversation_variable_fields,
)
from libs.login import login_required
from models import ConversationVariable
from models.model import AppMode
# Register models for flask_restx to avoid dict type issues in Swagger
# Register base model first
conversation_variable_model = console_ns.model("ConversationVariable", conversation_variable_fields)
# For nested models, need to replace nested dict with registered model
paginated_conversation_variable_fields_copy = paginated_conversation_variable_fields.copy()
paginated_conversation_variable_fields_copy["data"] = fields.List(
fields.Nested(conversation_variable_model), attribute="data"
)
paginated_conversation_variable_model = console_ns.model(
"PaginatedConversationVariable", paginated_conversation_variable_fields_copy
)
@console_ns.route("/apps/<uuid:app_id>/conversation-variables")
class ConversationVariablesApi(Resource):
@api.doc("get_conversation_variables")
@api.doc(description="Get conversation variables for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser().add_argument(
@console_ns.doc("get_conversation_variables")
@console_ns.doc(description="Get conversation variables for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.parser().add_argument(
"conversation_id", type=str, location="args", help="Conversation ID to filter variables"
)
)
@api.response(200, "Conversation variables retrieved successfully", paginated_conversation_variable_fields)
@console_ns.response(200, "Conversation variables retrieved successfully", paginated_conversation_variable_model)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.ADVANCED_CHAT)
@marshal_with(paginated_conversation_variable_fields)
@marshal_with(paginated_conversation_variable_model)
def get(self, app_model):
parser = reqparse.RequestParser().add_argument("conversation_id", type=str, location="args")
args = parser.parse_args()

View File

@ -115,9 +115,3 @@ class InvokeRateLimitError(BaseHTTPException):
error_code = "rate_limit_error"
description = "Rate Limit Error"
code = 429
class NeedAddIdsError(BaseHTTPException):
error_code = "need_add_ids"
description = "Need to add ids."
code = 400

View File

@ -2,7 +2,7 @@ from collections.abc import Sequence
from flask_restx import Resource, fields, reqparse
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.error import (
CompletionRequestError,
ProviderModelCurrentlyNotSupportError,
@ -11,6 +11,7 @@ from controllers.console.app.error import (
)
from controllers.console.wraps import account_initialization_required, setup_required
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.helper.code_executor.code_node_provider import CodeNodeProvider
from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider
from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider
from core.llm_generator.llm_generator import LLMGenerator
@ -23,10 +24,10 @@ from services.workflow_service import WorkflowService
@console_ns.route("/rule-generate")
class RuleGenerateApi(Resource):
@api.doc("generate_rule_config")
@api.doc(description="Generate rule configuration using LLM")
@api.expect(
api.model(
@console_ns.doc("generate_rule_config")
@console_ns.doc(description="Generate rule configuration using LLM")
@console_ns.expect(
console_ns.model(
"RuleGenerateRequest",
{
"instruction": fields.String(required=True, description="Rule generation instruction"),
@ -35,9 +36,9 @@ class RuleGenerateApi(Resource):
},
)
)
@api.response(200, "Rule configuration generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(402, "Provider quota exceeded")
@console_ns.response(200, "Rule configuration generated successfully")
@console_ns.response(400, "Invalid request parameters")
@console_ns.response(402, "Provider quota exceeded")
@setup_required
@login_required
@account_initialization_required
@ -72,10 +73,10 @@ class RuleGenerateApi(Resource):
@console_ns.route("/rule-code-generate")
class RuleCodeGenerateApi(Resource):
@api.doc("generate_rule_code")
@api.doc(description="Generate code rules using LLM")
@api.expect(
api.model(
@console_ns.doc("generate_rule_code")
@console_ns.doc(description="Generate code rules using LLM")
@console_ns.expect(
console_ns.model(
"RuleCodeGenerateRequest",
{
"instruction": fields.String(required=True, description="Code generation instruction"),
@ -87,9 +88,9 @@ class RuleCodeGenerateApi(Resource):
},
)
)
@api.response(200, "Code rules generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(402, "Provider quota exceeded")
@console_ns.response(200, "Code rules generated successfully")
@console_ns.response(400, "Invalid request parameters")
@console_ns.response(402, "Provider quota exceeded")
@setup_required
@login_required
@account_initialization_required
@ -125,10 +126,10 @@ class RuleCodeGenerateApi(Resource):
@console_ns.route("/rule-structured-output-generate")
class RuleStructuredOutputGenerateApi(Resource):
@api.doc("generate_structured_output")
@api.doc(description="Generate structured output rules using LLM")
@api.expect(
api.model(
@console_ns.doc("generate_structured_output")
@console_ns.doc(description="Generate structured output rules using LLM")
@console_ns.expect(
console_ns.model(
"StructuredOutputGenerateRequest",
{
"instruction": fields.String(required=True, description="Structured output generation instruction"),
@ -136,9 +137,9 @@ class RuleStructuredOutputGenerateApi(Resource):
},
)
)
@api.response(200, "Structured output generated successfully")
@api.response(400, "Invalid request parameters")
@api.response(402, "Provider quota exceeded")
@console_ns.response(200, "Structured output generated successfully")
@console_ns.response(400, "Invalid request parameters")
@console_ns.response(402, "Provider quota exceeded")
@setup_required
@login_required
@account_initialization_required
@ -171,10 +172,10 @@ class RuleStructuredOutputGenerateApi(Resource):
@console_ns.route("/instruction-generate")
class InstructionGenerateApi(Resource):
@api.doc("generate_instruction")
@api.doc(description="Generate instruction for workflow nodes or general use")
@api.expect(
api.model(
@console_ns.doc("generate_instruction")
@console_ns.doc(description="Generate instruction for workflow nodes or general use")
@console_ns.expect(
console_ns.model(
"InstructionGenerateRequest",
{
"flow_id": fields.String(required=True, description="Workflow/Flow ID"),
@ -187,9 +188,9 @@ class InstructionGenerateApi(Resource):
},
)
)
@api.response(200, "Instruction generated successfully")
@api.response(400, "Invalid request parameters or flow/workflow not found")
@api.response(402, "Provider quota exceeded")
@console_ns.response(200, "Instruction generated successfully")
@console_ns.response(400, "Invalid request parameters or flow/workflow not found")
@console_ns.response(402, "Provider quota exceeded")
@setup_required
@login_required
@account_initialization_required
@ -206,13 +207,11 @@ class InstructionGenerateApi(Resource):
)
args = parser.parse_args()
_, current_tenant_id = current_account_with_tenant()
code_template = (
Python3CodeProvider.get_default_code()
if args["language"] == "python"
else (JavascriptCodeProvider.get_default_code())
if args["language"] == "javascript"
else ""
providers: list[type[CodeNodeProvider]] = [Python3CodeProvider, JavascriptCodeProvider]
code_provider: type[CodeNodeProvider] | None = next(
(p for p in providers if p.is_accept_language(args["language"])), None
)
code_template = code_provider.get_default_code() if code_provider else ""
try:
# Generate from nothing for a workflow node
if (args["current"] == code_template or args["current"] == "") and args["node_id"] != "":
@ -284,10 +283,10 @@ class InstructionGenerateApi(Resource):
@console_ns.route("/instruction-generate/template")
class InstructionGenerationTemplateApi(Resource):
@api.doc("get_instruction_template")
@api.doc(description="Get instruction generation template")
@api.expect(
api.model(
@console_ns.doc("get_instruction_template")
@console_ns.doc(description="Get instruction generation template")
@console_ns.expect(
console_ns.model(
"InstructionTemplateRequest",
{
"instruction": fields.String(required=True, description="Template instruction"),
@ -295,8 +294,8 @@ class InstructionGenerationTemplateApi(Resource):
},
)
)
@api.response(200, "Template retrieved successfully")
@api.response(400, "Invalid request parameters")
@console_ns.response(200, "Template retrieved successfully")
@console_ns.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required

View File

@ -4,7 +4,7 @@ from enum import StrEnum
from flask_restx import Resource, fields, marshal_with, reqparse
from werkzeug.exceptions import NotFound
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from extensions.ext_database import db
@ -12,6 +12,9 @@ from fields.app_fields import app_server_fields
from libs.login import current_account_with_tenant, login_required
from models.model import AppMCPServer
# Register model for flask_restx to avoid dict type issues in Swagger
app_server_model = console_ns.model("AppServer", app_server_fields)
class AppMCPServerStatus(StrEnum):
ACTIVE = "active"
@ -20,24 +23,24 @@ class AppMCPServerStatus(StrEnum):
@console_ns.route("/apps/<uuid:app_id>/server")
class AppMCPServerController(Resource):
@api.doc("get_app_mcp_server")
@api.doc(description="Get MCP server configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "MCP server configuration retrieved successfully", app_server_fields)
@console_ns.doc("get_app_mcp_server")
@console_ns.doc(description="Get MCP server configuration for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "MCP server configuration retrieved successfully", app_server_model)
@login_required
@account_initialization_required
@setup_required
@get_app_model
@marshal_with(app_server_fields)
@marshal_with(app_server_model)
def get(self, app_model):
server = db.session.query(AppMCPServer).where(AppMCPServer.app_id == app_model.id).first()
return server
@api.doc("create_app_mcp_server")
@api.doc(description="Create MCP server configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("create_app_mcp_server")
@console_ns.doc(description="Create MCP server configuration for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"MCPServerCreateRequest",
{
"description": fields.String(description="Server description"),
@ -45,13 +48,13 @@ class AppMCPServerController(Resource):
},
)
)
@api.response(201, "MCP server configuration created successfully", app_server_fields)
@api.response(403, "Insufficient permissions")
@console_ns.response(201, "MCP server configuration created successfully", app_server_model)
@console_ns.response(403, "Insufficient permissions")
@account_initialization_required
@get_app_model
@login_required
@setup_required
@marshal_with(app_server_fields)
@marshal_with(app_server_model)
@edit_permission_required
def post(self, app_model):
_, current_tenant_id = current_account_with_tenant()
@ -79,11 +82,11 @@ class AppMCPServerController(Resource):
db.session.commit()
return server
@api.doc("update_app_mcp_server")
@api.doc(description="Update MCP server configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("update_app_mcp_server")
@console_ns.doc(description="Update MCP server configuration for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"MCPServerUpdateRequest",
{
"id": fields.String(required=True, description="Server ID"),
@ -93,14 +96,14 @@ class AppMCPServerController(Resource):
},
)
)
@api.response(200, "MCP server configuration updated successfully", app_server_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Server not found")
@console_ns.response(200, "MCP server configuration updated successfully", app_server_model)
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(404, "Server not found")
@get_app_model
@login_required
@setup_required
@account_initialization_required
@marshal_with(app_server_fields)
@marshal_with(app_server_model)
@edit_permission_required
def put(self, app_model):
parser = (
@ -134,16 +137,16 @@ class AppMCPServerController(Resource):
@console_ns.route("/apps/<uuid:server_id>/server/refresh")
class AppMCPServerRefreshController(Resource):
@api.doc("refresh_app_mcp_server")
@api.doc(description="Refresh MCP server configuration and regenerate server code")
@api.doc(params={"server_id": "Server ID"})
@api.response(200, "MCP server refreshed successfully", app_server_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "Server not found")
@console_ns.doc("refresh_app_mcp_server")
@console_ns.doc(description="Refresh MCP server configuration and regenerate server code")
@console_ns.doc(params={"server_id": "Server ID"})
@console_ns.response(200, "MCP server refreshed successfully", app_server_model)
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(404, "Server not found")
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_server_fields)
@marshal_with(app_server_model)
@edit_permission_required
def get(self, server_id):
_, current_tenant_id = current_account_with_tenant()

View File

@ -5,7 +5,7 @@ from flask_restx.inputs import int_range
from sqlalchemy import exists, select
from werkzeug.exceptions import InternalServerError, NotFound
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.error import (
CompletionRequestError,
ProviderModelCurrentlyNotSupportError,
@ -23,8 +23,8 @@ from core.app.entities.app_invoke_entities import InvokeFrom
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from extensions.ext_database import db
from fields.conversation_fields import message_detail_fields
from libs.helper import uuid_value
from fields.raws import FilesContainedField
from libs.helper import TimestampField, uuid_value
from libs.infinite_scroll_pagination import InfiniteScrollPagination
from libs.login import current_account_with_tenant, login_required
from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback
@ -34,31 +34,142 @@ from services.message_service import MessageService
logger = logging.getLogger(__name__)
# Register models for flask_restx to avoid dict type issues in Swagger
# Register in dependency order: base models first, then dependent models
# Base models
simple_account_model = console_ns.model(
"SimpleAccount",
{
"id": fields.String,
"name": fields.String,
"email": fields.String,
},
)
message_file_model = console_ns.model(
"MessageFile",
{
"id": fields.String,
"filename": fields.String,
"type": fields.String,
"url": fields.String,
"mime_type": fields.String,
"size": fields.Integer,
"transfer_method": fields.String,
"belongs_to": fields.String(default="user"),
"upload_file_id": fields.String(default=None),
},
)
agent_thought_model = console_ns.model(
"AgentThought",
{
"id": fields.String,
"chain_id": fields.String,
"message_id": fields.String,
"position": fields.Integer,
"thought": fields.String,
"tool": fields.String,
"tool_labels": fields.Raw,
"tool_input": fields.String,
"created_at": TimestampField,
"observation": fields.String,
"files": fields.List(fields.String),
},
)
# Models that depend on simple_account_model
feedback_model = console_ns.model(
"Feedback",
{
"rating": fields.String,
"content": fields.String,
"from_source": fields.String,
"from_end_user_id": fields.String,
"from_account": fields.Nested(simple_account_model, allow_null=True),
},
)
annotation_model = console_ns.model(
"Annotation",
{
"id": fields.String,
"question": fields.String,
"content": fields.String,
"account": fields.Nested(simple_account_model, allow_null=True),
"created_at": TimestampField,
},
)
annotation_hit_history_model = console_ns.model(
"AnnotationHitHistory",
{
"annotation_id": fields.String(attribute="id"),
"annotation_create_account": fields.Nested(simple_account_model, allow_null=True),
"created_at": TimestampField,
},
)
# Message detail model that depends on multiple models
message_detail_model = console_ns.model(
"MessageDetail",
{
"id": fields.String,
"conversation_id": fields.String,
"inputs": FilesContainedField,
"query": fields.String,
"message": fields.Raw,
"message_tokens": fields.Integer,
"answer": fields.String(attribute="re_sign_file_url_answer"),
"answer_tokens": fields.Integer,
"provider_response_latency": fields.Float,
"from_source": fields.String,
"from_end_user_id": fields.String,
"from_account_id": fields.String,
"feedbacks": fields.List(fields.Nested(feedback_model)),
"workflow_run_id": fields.String,
"annotation": fields.Nested(annotation_model, allow_null=True),
"annotation_hit_history": fields.Nested(annotation_hit_history_model, allow_null=True),
"created_at": TimestampField,
"agent_thoughts": fields.List(fields.Nested(agent_thought_model)),
"message_files": fields.List(fields.Nested(message_file_model)),
"metadata": fields.Raw(attribute="message_metadata_dict"),
"status": fields.String,
"error": fields.String,
"parent_message_id": fields.String,
},
)
# Message infinite scroll pagination model
message_infinite_scroll_pagination_model = console_ns.model(
"MessageInfiniteScrollPagination",
{
"limit": fields.Integer,
"has_more": fields.Boolean,
"data": fields.List(fields.Nested(message_detail_model)),
},
)
@console_ns.route("/apps/<uuid:app_id>/chat-messages")
class ChatMessageListApi(Resource):
message_infinite_scroll_pagination_fields = {
"limit": fields.Integer,
"has_more": fields.Boolean,
"data": fields.List(fields.Nested(message_detail_fields)),
}
@api.doc("list_chat_messages")
@api.doc(description="Get chat messages for a conversation with pagination")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
@console_ns.doc("list_chat_messages")
@console_ns.doc(description="Get chat messages for a conversation with pagination")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.parser()
.add_argument("conversation_id", type=str, required=True, location="args", help="Conversation ID")
.add_argument("first_id", type=str, location="args", help="First message ID for pagination")
.add_argument("limit", type=int, location="args", default=20, help="Number of messages to return (1-100)")
)
@api.response(200, "Success", message_infinite_scroll_pagination_fields)
@api.response(404, "Conversation not found")
@console_ns.response(200, "Success", message_infinite_scroll_pagination_model)
@console_ns.response(404, "Conversation not found")
@login_required
@account_initialization_required
@setup_required
@get_app_model(mode=[AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT])
@marshal_with(message_infinite_scroll_pagination_fields)
@marshal_with(message_infinite_scroll_pagination_model)
@edit_permission_required
def get(self, app_model):
parser = (
@ -132,11 +243,11 @@ class ChatMessageListApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/feedbacks")
class MessageFeedbackApi(Resource):
@api.doc("create_message_feedback")
@api.doc(description="Create or update message feedback (like/dislike)")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("create_message_feedback")
@console_ns.doc(description="Create or update message feedback (like/dislike)")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"MessageFeedbackRequest",
{
"message_id": fields.String(required=True, description="Message ID"),
@ -144,9 +255,9 @@ class MessageFeedbackApi(Resource):
},
)
)
@api.response(200, "Feedback updated successfully")
@api.response(404, "Message not found")
@api.response(403, "Insufficient permissions")
@console_ns.response(200, "Feedback updated successfully")
@console_ns.response(404, "Message not found")
@console_ns.response(403, "Insufficient permissions")
@get_app_model
@setup_required
@login_required
@ -194,13 +305,13 @@ class MessageFeedbackApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/annotations/count")
class MessageAnnotationCountApi(Resource):
@api.doc("get_annotation_count")
@api.doc(description="Get count of message annotations for the app")
@api.doc(params={"app_id": "Application ID"})
@api.response(
@console_ns.doc("get_annotation_count")
@console_ns.doc(description="Get count of message annotations for the app")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(
200,
"Annotation count retrieved successfully",
api.model("AnnotationCountResponse", {"count": fields.Integer(description="Number of annotations")}),
console_ns.model("AnnotationCountResponse", {"count": fields.Integer(description="Number of annotations")}),
)
@get_app_model
@setup_required
@ -214,15 +325,17 @@ class MessageAnnotationCountApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/chat-messages/<uuid:message_id>/suggested-questions")
class MessageSuggestedQuestionApi(Resource):
@api.doc("get_message_suggested_questions")
@api.doc(description="Get suggested questions for a message")
@api.doc(params={"app_id": "Application ID", "message_id": "Message ID"})
@api.response(
@console_ns.doc("get_message_suggested_questions")
@console_ns.doc(description="Get suggested questions for a message")
@console_ns.doc(params={"app_id": "Application ID", "message_id": "Message ID"})
@console_ns.response(
200,
"Suggested questions retrieved successfully",
api.model("SuggestedQuestionsResponse", {"data": fields.List(fields.String(description="Suggested question"))}),
console_ns.model(
"SuggestedQuestionsResponse", {"data": fields.List(fields.String(description="Suggested question"))}
),
)
@api.response(404, "Message or conversation not found")
@console_ns.response(404, "Message or conversation not found")
@setup_required
@login_required
@account_initialization_required
@ -256,18 +369,70 @@ class MessageSuggestedQuestionApi(Resource):
return {"data": questions}
@console_ns.route("/apps/<uuid:app_id>/messages/<uuid:message_id>")
class MessageApi(Resource):
@api.doc("get_message")
@api.doc(description="Get message details by ID")
@api.doc(params={"app_id": "Application ID", "message_id": "Message ID"})
@api.response(200, "Message retrieved successfully", message_detail_fields)
@api.response(404, "Message not found")
# Shared parser for feedback export (used for both documentation and runtime parsing)
feedback_export_parser = (
console_ns.parser()
.add_argument("from_source", type=str, choices=["user", "admin"], location="args", help="Filter by feedback source")
.add_argument("rating", type=str, choices=["like", "dislike"], location="args", help="Filter by rating")
.add_argument("has_comment", type=bool, location="args", help="Only include feedback with comments")
.add_argument("start_date", type=str, location="args", help="Start date (YYYY-MM-DD)")
.add_argument("end_date", type=str, location="args", help="End date (YYYY-MM-DD)")
.add_argument("format", type=str, choices=["csv", "json"], default="csv", location="args", help="Export format")
)
@console_ns.route("/apps/<uuid:app_id>/feedbacks/export")
class MessageFeedbackExportApi(Resource):
@console_ns.doc("export_feedbacks")
@console_ns.doc(description="Export user feedback data for Google Sheets")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(feedback_export_parser)
@console_ns.response(200, "Feedback data exported successfully")
@console_ns.response(400, "Invalid parameters")
@console_ns.response(500, "Internal server error")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@marshal_with(message_detail_fields)
def get(self, app_model):
args = feedback_export_parser.parse_args()
# Import the service function
from services.feedback_service import FeedbackService
try:
export_data = FeedbackService.export_feedbacks(
app_id=app_model.id,
from_source=args.get("from_source"),
rating=args.get("rating"),
has_comment=args.get("has_comment"),
start_date=args.get("start_date"),
end_date=args.get("end_date"),
format_type=args.get("format", "csv"),
)
return export_data
except ValueError as e:
logger.exception("Parameter validation error in feedback export")
return {"error": f"Parameter validation error: {str(e)}"}, 400
except Exception as e:
logger.exception("Error exporting feedback data")
raise InternalServerError(str(e))
@console_ns.route("/apps/<uuid:app_id>/messages/<uuid:message_id>")
class MessageApi(Resource):
@console_ns.doc("get_message")
@console_ns.doc(description="Get message details by ID")
@console_ns.doc(params={"app_id": "Application ID", "message_id": "Message ID"})
@console_ns.response(200, "Message retrieved successfully", message_detail_model)
@console_ns.response(404, "Message not found")
@get_app_model
@setup_required
@login_required
@account_initialization_required
@marshal_with(message_detail_model)
def get(self, app_model, message_id: str):
message_id = str(message_id)

View File

@ -3,11 +3,10 @@ from typing import cast
from flask import request
from flask_restx import Resource, fields
from werkzeug.exceptions import Forbidden
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from core.agent.entities import AgentToolEntity
from core.tools.tool_manager import ToolManager
from core.tools.utils.configuration import ToolParameterConfigurationManager
@ -21,11 +20,11 @@ from services.app_model_config_service import AppModelConfigService
@console_ns.route("/apps/<uuid:app_id>/model-config")
class ModelConfigResource(Resource):
@api.doc("update_app_model_config")
@api.doc(description="Update application model configuration")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("update_app_model_config")
@console_ns.doc(description="Update application model configuration")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"ModelConfigRequest",
{
"provider": fields.String(description="Model provider"),
@ -43,20 +42,17 @@ class ModelConfigResource(Resource):
},
)
)
@api.response(200, "Model configuration updated successfully")
@api.response(400, "Invalid configuration")
@api.response(404, "App not found")
@console_ns.response(200, "Model configuration updated successfully")
@console_ns.response(400, "Invalid configuration")
@console_ns.response(404, "App not found")
@setup_required
@login_required
@edit_permission_required
@account_initialization_required
@get_app_model(mode=[AppMode.AGENT_CHAT, AppMode.CHAT, AppMode.COMPLETION])
def post(self, app_model):
"""Modify app model config"""
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
# validate config
model_configuration = AppModelConfigService.validate_configuration(
tenant_id=current_tenant_id,

View File

@ -1,339 +0,0 @@
import json
import time
from werkzeug.wrappers import Request as WerkzeugRequest
from extensions.ext_redis import redis_client
from extensions.ext_socketio import sio
from libs.passport import PassportService
from libs.token import extract_access_token
from services.account_service import AccountService
SESSION_STATE_TTL_SECONDS = 3600
WORKFLOW_ONLINE_USERS_PREFIX = "workflow_online_users:"
WORKFLOW_LEADER_PREFIX = "workflow_leader:"
WS_SID_MAP_PREFIX = "ws_sid_map:"
def _workflow_key(workflow_id: str) -> str:
return f"{WORKFLOW_ONLINE_USERS_PREFIX}{workflow_id}"
def _leader_key(workflow_id: str) -> str:
return f"{WORKFLOW_LEADER_PREFIX}{workflow_id}"
def _sid_key(sid: str) -> str:
return f"{WS_SID_MAP_PREFIX}{sid}"
def _refresh_session_state(workflow_id: str, sid: str) -> None:
"""
Refresh TTLs for workflow + session keys so healthy sessions do not linger forever after crashes.
"""
workflow_key = _workflow_key(workflow_id)
sid_key = _sid_key(sid)
if redis_client.exists(workflow_key):
redis_client.expire(workflow_key, SESSION_STATE_TTL_SECONDS)
if redis_client.exists(sid_key):
redis_client.expire(sid_key, SESSION_STATE_TTL_SECONDS)
@sio.on("connect")
def socket_connect(sid, environ, auth):
"""
WebSocket connect event, do authentication here.
"""
token = None
if auth and isinstance(auth, dict):
token = auth.get("token")
if not token:
try:
request_environ = WerkzeugRequest(environ)
token = extract_access_token(request_environ)
except Exception:
token = None
if not token:
return False
try:
decoded = PassportService().verify(token)
user_id = decoded.get("user_id")
if not user_id:
return False
with sio.app.app_context():
user = AccountService.load_logged_in_account(account_id=user_id)
if not user:
return False
sio.save_session(sid, {"user_id": user.id, "username": user.name, "avatar": user.avatar})
return True
except Exception:
return False
@sio.on("user_connect")
def handle_user_connect(sid, data):
"""
Handle user connect event. Each session (tab) is treated as an independent collaborator.
"""
workflow_id = data.get("workflow_id")
if not workflow_id:
return {"msg": "workflow_id is required"}, 400
session = sio.get_session(sid)
user_id = session.get("user_id")
if not user_id:
return {"msg": "unauthorized"}, 401
# Each session is stored independently with sid as key
session_info = {
"user_id": user_id,
"username": session.get("username", "Unknown"),
"avatar": session.get("avatar", None),
"sid": sid,
"connected_at": int(time.time()), # Add timestamp to differentiate tabs
}
workflow_key = _workflow_key(workflow_id)
# Store session info with sid as key
redis_client.hset(workflow_key, sid, json.dumps(session_info))
redis_client.set(
_sid_key(sid),
json.dumps({"workflow_id": workflow_id, "user_id": user_id}),
ex=SESSION_STATE_TTL_SECONDS,
)
_refresh_session_state(workflow_id, sid)
# Leader election: first session becomes the leader
leader_sid = get_or_set_leader(workflow_id, sid)
is_leader = leader_sid == sid
sio.enter_room(sid, workflow_id)
broadcast_online_users(workflow_id)
# Notify this session of their leader status
sio.emit("status", {"isLeader": is_leader}, room=sid)
return {"msg": "connected", "user_id": user_id, "sid": sid, "isLeader": is_leader}
@sio.on("disconnect")
def handle_disconnect(sid):
"""
Handle session disconnect event. Remove the specific session from online users.
"""
mapping = redis_client.get(_sid_key(sid))
if mapping:
data = json.loads(mapping)
workflow_id = data["workflow_id"]
# Remove this specific session
redis_client.hdel(_workflow_key(workflow_id), sid)
redis_client.delete(_sid_key(sid))
# Handle leader re-election if the leader session disconnected
handle_leader_disconnect(workflow_id, sid)
broadcast_online_users(workflow_id)
def _clear_session_state(workflow_id: str, sid: str) -> None:
redis_client.hdel(_workflow_key(workflow_id), sid)
redis_client.delete(_sid_key(sid))
def _is_session_active(workflow_id: str, sid: str) -> bool:
if not sid:
return False
try:
if not sio.manager.is_connected(sid, "/"):
return False
except AttributeError:
return False
if not redis_client.hexists(_workflow_key(workflow_id), sid):
return False
if not redis_client.exists(_sid_key(sid)):
return False
return True
def get_or_set_leader(workflow_id: str, sid: str) -> str:
"""
Get current leader session or set this session as leader if no valid leader exists.
Returns the leader session id (sid).
"""
raw_leader = redis_client.get(_leader_key(workflow_id))
current_leader = raw_leader.decode("utf-8") if isinstance(raw_leader, bytes) else raw_leader
leader_replaced = False
if current_leader and not _is_session_active(workflow_id, current_leader):
_clear_session_state(workflow_id, current_leader)
redis_client.delete(_leader_key(workflow_id))
current_leader = None
leader_replaced = True
if not current_leader:
redis_client.set(_leader_key(workflow_id), sid, ex=SESSION_STATE_TTL_SECONDS) # Expire in 1 hour
if leader_replaced:
broadcast_leader_change(workflow_id, sid)
return sid
return current_leader
def handle_leader_disconnect(workflow_id, disconnected_sid):
"""
Handle leader re-election when a session disconnects.
If the disconnected session was the leader, elect a new leader from remaining sessions.
"""
current_leader = redis_client.get(_leader_key(workflow_id))
if current_leader:
current_leader = current_leader.decode("utf-8") if isinstance(current_leader, bytes) else current_leader
if current_leader == disconnected_sid:
# Leader session disconnected, elect a new leader
sessions_json = redis_client.hgetall(_workflow_key(workflow_id))
if sessions_json:
# Get the first remaining session as new leader
new_leader_sid = list(sessions_json.keys())[0]
if isinstance(new_leader_sid, bytes):
new_leader_sid = new_leader_sid.decode("utf-8")
redis_client.set(_leader_key(workflow_id), new_leader_sid, ex=SESSION_STATE_TTL_SECONDS)
# Notify all sessions about the new leader
broadcast_leader_change(workflow_id, new_leader_sid)
else:
# No sessions left, remove leader
redis_client.delete(_leader_key(workflow_id))
def broadcast_leader_change(workflow_id, new_leader_sid):
"""
Broadcast leader change to all sessions in the workflow.
"""
sessions_json = redis_client.hgetall(_workflow_key(workflow_id))
for sid, session_info_json in sessions_json.items():
try:
sid_str = sid.decode("utf-8") if isinstance(sid, bytes) else sid
is_leader = sid_str == new_leader_sid
# Emit to each session whether they are the new leader
sio.emit("status", {"isLeader": is_leader}, room=sid_str)
except Exception:
continue
def get_current_leader(workflow_id):
"""
Get the current leader for a workflow.
"""
leader = redis_client.get(_leader_key(workflow_id))
return leader.decode("utf-8") if leader and isinstance(leader, bytes) else leader
def broadcast_online_users(workflow_id):
"""
Broadcast online users to the workflow room.
Each session is shown as a separate user (even if same person has multiple tabs).
"""
sessions_json = redis_client.hgetall(_workflow_key(workflow_id))
users = []
for sid, session_info_json in sessions_json.items():
try:
session_info = json.loads(session_info_json)
# Each session appears as a separate "user" in the UI
users.append(
{
"user_id": session_info["user_id"],
"username": session_info["username"],
"avatar": session_info.get("avatar"),
"sid": session_info["sid"],
"connected_at": session_info.get("connected_at"),
}
)
except Exception:
continue
# Sort by connection time to maintain consistent order
users.sort(key=lambda x: x.get("connected_at") or 0)
# Get current leader session
leader_sid = get_current_leader(workflow_id)
sio.emit("online_users", {"workflow_id": workflow_id, "users": users, "leader": leader_sid}, room=workflow_id)
@sio.on("collaboration_event")
def handle_collaboration_event(sid, data):
"""
Handle general collaboration events, include:
1. mouse_move
2. vars_and_features_update
3. sync_request (ask leader to update graph)
4. app_state_update
5. mcp_server_update
6. workflow_update
7. comments_update
8. node_panel_presence
"""
mapping = redis_client.get(_sid_key(sid))
if not mapping:
return {"msg": "unauthorized"}, 401
mapping_data = json.loads(mapping)
workflow_id = mapping_data["workflow_id"]
user_id = mapping_data["user_id"]
_refresh_session_state(workflow_id, sid)
event_type = data.get("type")
event_data = data.get("data")
timestamp = data.get("timestamp", int(time.time()))
if not event_type:
return {"msg": "invalid event type"}, 400
sio.emit(
"collaboration_update",
{"type": event_type, "userId": user_id, "data": event_data, "timestamp": timestamp},
room=workflow_id,
skip_sid=sid,
)
return {"msg": "event_broadcasted"}
@sio.on("graph_event")
def handle_graph_event(sid, data):
"""
Handle graph events - simple broadcast relay.
"""
mapping = redis_client.get(_sid_key(sid))
if not mapping:
return {"msg": "unauthorized"}, 401
mapping_data = json.loads(mapping)
workflow_id = mapping_data["workflow_id"]
_refresh_session_state(workflow_id, sid)
sio.emit("graph_update", data, room=workflow_id, skip_sid=sid)
return {"msg": "graph_update_broadcasted"}

View File

@ -1,7 +1,7 @@
from flask_restx import Resource, fields, reqparse
from werkzeug.exceptions import BadRequest
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.error import TracingConfigCheckError, TracingConfigIsExist, TracingConfigNotExist
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import login_required
@ -14,18 +14,18 @@ class TraceAppConfigApi(Resource):
Manage trace app configurations
"""
@api.doc("get_trace_app_config")
@api.doc(description="Get tracing configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser().add_argument(
@console_ns.doc("get_trace_app_config")
@console_ns.doc(description="Get tracing configuration for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.parser().add_argument(
"tracing_provider", type=str, required=True, location="args", help="Tracing provider name"
)
)
@api.response(
@console_ns.response(
200, "Tracing configuration retrieved successfully", fields.Raw(description="Tracing configuration data")
)
@api.response(400, "Invalid request parameters")
@console_ns.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@ -41,11 +41,11 @@ class TraceAppConfigApi(Resource):
except Exception as e:
raise BadRequest(str(e))
@api.doc("create_trace_app_config")
@api.doc(description="Create a new tracing configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("create_trace_app_config")
@console_ns.doc(description="Create a new tracing configuration for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"TraceConfigCreateRequest",
{
"tracing_provider": fields.String(required=True, description="Tracing provider name"),
@ -53,10 +53,10 @@ class TraceAppConfigApi(Resource):
},
)
)
@api.response(
@console_ns.response(
201, "Tracing configuration created successfully", fields.Raw(description="Created configuration data")
)
@api.response(400, "Invalid request parameters or configuration already exists")
@console_ns.response(400, "Invalid request parameters or configuration already exists")
@setup_required
@login_required
@account_initialization_required
@ -81,11 +81,11 @@ class TraceAppConfigApi(Resource):
except Exception as e:
raise BadRequest(str(e))
@api.doc("update_trace_app_config")
@api.doc(description="Update an existing tracing configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("update_trace_app_config")
@console_ns.doc(description="Update an existing tracing configuration for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"TraceConfigUpdateRequest",
{
"tracing_provider": fields.String(required=True, description="Tracing provider name"),
@ -93,8 +93,8 @@ class TraceAppConfigApi(Resource):
},
)
)
@api.response(200, "Tracing configuration updated successfully", fields.Raw(description="Success response"))
@api.response(400, "Invalid request parameters or configuration not found")
@console_ns.response(200, "Tracing configuration updated successfully", fields.Raw(description="Success response"))
@console_ns.response(400, "Invalid request parameters or configuration not found")
@setup_required
@login_required
@account_initialization_required
@ -117,16 +117,16 @@ class TraceAppConfigApi(Resource):
except Exception as e:
raise BadRequest(str(e))
@api.doc("delete_trace_app_config")
@api.doc(description="Delete an existing tracing configuration for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser().add_argument(
@console_ns.doc("delete_trace_app_config")
@console_ns.doc(description="Delete an existing tracing configuration for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.parser().add_argument(
"tracing_provider", type=str, required=True, location="args", help="Tracing provider name"
)
)
@api.response(204, "Tracing configuration deleted successfully")
@api.response(400, "Invalid request parameters or configuration not found")
@console_ns.response(204, "Tracing configuration deleted successfully")
@console_ns.response(400, "Invalid request parameters or configuration not found")
@setup_required
@login_required
@account_initialization_required

View File

@ -1,16 +1,24 @@
from flask_restx import Resource, fields, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, NotFound
from werkzeug.exceptions import NotFound
from constants.languages import supported_language
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.console.wraps import (
account_initialization_required,
edit_permission_required,
is_admin_or_owner_required,
setup_required,
)
from extensions.ext_database import db
from fields.app_fields import app_site_fields
from libs.datetime_utils import naive_utc_now
from libs.login import current_account_with_tenant, login_required
from models import Site
# Register model for flask_restx to avoid dict type issues in Swagger
app_site_model = console_ns.model("AppSite", app_site_fields)
def parse_app_site_args():
parser = (
@ -43,11 +51,11 @@ def parse_app_site_args():
@console_ns.route("/apps/<uuid:app_id>/site")
class AppSite(Resource):
@api.doc("update_app_site")
@api.doc(description="Update application site configuration")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.model(
@console_ns.doc("update_app_site")
@console_ns.doc(description="Update application site configuration")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.model(
"AppSiteRequest",
{
"title": fields.String(description="Site title"),
@ -71,22 +79,18 @@ class AppSite(Resource):
},
)
)
@api.response(200, "Site configuration updated successfully", app_site_fields)
@api.response(403, "Insufficient permissions")
@api.response(404, "App not found")
@console_ns.response(200, "Site configuration updated successfully", app_site_model)
@console_ns.response(403, "Insufficient permissions")
@console_ns.response(404, "App not found")
@setup_required
@login_required
@edit_permission_required
@account_initialization_required
@get_app_model
@marshal_with(app_site_fields)
@marshal_with(app_site_model)
def post(self, app_model):
args = parse_app_site_args()
current_user, _ = current_account_with_tenant()
# The role of the current user in the ta table must be editor, admin, or owner
if not current_user.has_edit_permission:
raise Forbidden()
site = db.session.query(Site).where(Site.app_id == app_model.id).first()
if not site:
raise NotFound
@ -122,24 +126,20 @@ class AppSite(Resource):
@console_ns.route("/apps/<uuid:app_id>/site/access-token-reset")
class AppSiteAccessTokenReset(Resource):
@api.doc("reset_app_site_access_token")
@api.doc(description="Reset access token for application site")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Access token reset successfully", app_site_fields)
@api.response(403, "Insufficient permissions (admin/owner required)")
@api.response(404, "App or site not found")
@console_ns.doc("reset_app_site_access_token")
@console_ns.doc(description="Reset access token for application site")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Access token reset successfully", app_site_model)
@console_ns.response(403, "Insufficient permissions (admin/owner required)")
@console_ns.response(404, "App or site not found")
@setup_required
@login_required
@is_admin_or_owner_required
@account_initialization_required
@get_app_model
@marshal_with(app_site_fields)
@marshal_with(app_site_model)
def post(self, app_model):
# The role of the current user in the ta table must be admin or owner
current_user, _ = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
site = db.session.query(Site).where(Site.app_id == app_model.id).first()
if not site:

View File

@ -4,28 +4,28 @@ import sqlalchemy as sa
from flask import abort, jsonify
from flask_restx import Resource, fields, reqparse
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.app.entities.app_invoke_entities import InvokeFrom
from extensions.ext_database import db
from libs.datetime_utils import parse_time_range
from libs.helper import DatetimeString
from libs.helper import DatetimeString, convert_datetime_to_date
from libs.login import current_account_with_tenant, login_required
from models import AppMode, Message
from models import AppMode
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-messages")
class DailyMessageStatistic(Resource):
@api.doc("get_daily_message_statistics")
@api.doc(description="Get daily message statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
@console_ns.doc("get_daily_message_statistics")
@console_ns.doc(description="Get daily message statistics for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(
console_ns.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
@console_ns.response(
200,
"Daily message statistics retrieved successfully",
fields.List(fields.Raw(description="Daily message count data")),
@ -44,8 +44,9 @@ class DailyMessageStatistic(Resource):
)
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
COUNT(*) AS message_count
FROM
messages
@ -80,17 +81,20 @@ WHERE
return jsonify({"data": response_data})
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-conversations")
class DailyConversationStatistic(Resource):
@api.doc("get_daily_conversation_statistics")
@api.doc(description="Get daily conversation statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
@console_ns.doc("get_daily_conversation_statistics")
@console_ns.doc(description="Get daily conversation statistics for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(parser)
@console_ns.response(
200,
"Daily conversation statistics retrieved successfully",
fields.List(fields.Raw(description="Daily conversation count data")),
@ -102,12 +106,18 @@ class DailyConversationStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
COUNT(DISTINCT conversation_id) AS conversation_count
FROM
messages
WHERE
app_id = :app_id
AND invoke_from != :invoke_from"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id, "invoke_from": InvokeFrom.DEBUGGER}
assert account.timezone is not None
try:
@ -115,45 +125,32 @@ class DailyConversationStatistic(Resource):
except ValueError as e:
abort(400, description=str(e))
stmt = (
sa.select(
sa.func.date(
sa.func.date_trunc("day", sa.text("created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz"))
).label("date"),
sa.func.count(sa.distinct(Message.conversation_id)).label("conversation_count"),
)
.select_from(Message)
.where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER)
)
if start_datetime_utc:
stmt = stmt.where(Message.created_at >= start_datetime_utc)
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
if end_datetime_utc:
stmt = stmt.where(Message.created_at < end_datetime_utc)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
stmt = stmt.group_by("date").order_by("date")
sql_query += " GROUP BY date ORDER BY date"
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(stmt, {"tz": account.timezone})
for row in rs:
response_data.append({"date": str(row.date), "conversation_count": row.conversation_count})
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "conversation_count": i.conversation_count})
return jsonify({"data": response_data})
@console_ns.route("/apps/<uuid:app_id>/statistics/daily-end-users")
class DailyTerminalsStatistic(Resource):
@api.doc("get_daily_terminals_statistics")
@api.doc(description="Get daily terminal/end-user statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
@console_ns.doc("get_daily_terminals_statistics")
@console_ns.doc(description="Get daily terminal/end-user statistics for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(parser)
@console_ns.response(
200,
"Daily terminal statistics retrieved successfully",
fields.List(fields.Raw(description="Daily terminal count data")),
@ -165,15 +162,11 @@ class DailyTerminalsStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
COUNT(DISTINCT messages.from_end_user_id) AS terminal_count
FROM
messages
@ -210,15 +203,11 @@ WHERE
@console_ns.route("/apps/<uuid:app_id>/statistics/token-costs")
class DailyTokenCostStatistic(Resource):
@api.doc("get_daily_token_cost_statistics")
@api.doc(description="Get daily token cost statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
@console_ns.doc("get_daily_token_cost_statistics")
@console_ns.doc(description="Get daily token cost statistics for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(parser)
@console_ns.response(
200,
"Daily token cost statistics retrieved successfully",
fields.List(fields.Raw(description="Daily token cost data")),
@ -230,15 +219,11 @@ class DailyTokenCostStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
(SUM(messages.message_tokens) + SUM(messages.answer_tokens)) AS token_count,
SUM(total_price) AS total_price
FROM
@ -278,15 +263,11 @@ WHERE
@console_ns.route("/apps/<uuid:app_id>/statistics/average-session-interactions")
class AverageSessionInteractionStatistic(Resource):
@api.doc("get_average_session_interaction_statistics")
@api.doc(description="Get average session interaction statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
@console_ns.doc("get_average_session_interaction_statistics")
@console_ns.doc(description="Get average session interaction statistics for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(parser)
@console_ns.response(
200,
"Average session interaction statistics retrieved successfully",
fields.List(fields.Raw(description="Average session interaction data")),
@ -298,15 +279,11 @@ class AverageSessionInteractionStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', c.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("c.created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
AVG(subquery.message_count) AS interactions
FROM
(
@ -362,15 +339,11 @@ ORDER BY
@console_ns.route("/apps/<uuid:app_id>/statistics/user-satisfaction-rate")
class UserSatisfactionRateStatistic(Resource):
@api.doc("get_user_satisfaction_rate_statistics")
@api.doc(description="Get user satisfaction rate statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
@console_ns.doc("get_user_satisfaction_rate_statistics")
@console_ns.doc(description="Get user satisfaction rate statistics for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(parser)
@console_ns.response(
200,
"User satisfaction rate statistics retrieved successfully",
fields.List(fields.Raw(description="User satisfaction rate data")),
@ -382,15 +355,11 @@ class UserSatisfactionRateStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', m.created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("m.created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
COUNT(m.id) AS message_count,
COUNT(mf.id) AS feedback_count
FROM
@ -436,15 +405,11 @@ WHERE
@console_ns.route("/apps/<uuid:app_id>/statistics/average-response-time")
class AverageResponseTimeStatistic(Resource):
@api.doc("get_average_response_time_statistics")
@api.doc(description="Get average response time statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
@console_ns.doc("get_average_response_time_statistics")
@console_ns.doc(description="Get average response time statistics for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(parser)
@console_ns.response(
200,
"Average response time statistics retrieved successfully",
fields.List(fields.Raw(description="Average response time data")),
@ -456,15 +421,11 @@ class AverageResponseTimeStatistic(Resource):
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
AVG(provider_response_latency) AS latency
FROM
messages
@ -501,15 +462,11 @@ WHERE
@console_ns.route("/apps/<uuid:app_id>/statistics/tokens-per-second")
class TokensPerSecondStatistic(Resource):
@api.doc("get_tokens_per_second_statistics")
@api.doc(description="Get tokens per second statistics for an application")
@api.doc(params={"app_id": "Application ID"})
@api.expect(
api.parser()
.add_argument("start", type=str, location="args", help="Start date (YYYY-MM-DD HH:MM)")
.add_argument("end", type=str, location="args", help="End date (YYYY-MM-DD HH:MM)")
)
@api.response(
@console_ns.doc("get_tokens_per_second_statistics")
@console_ns.doc(description="Get tokens per second statistics for an application")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.expect(parser)
@console_ns.response(
200,
"Tokens per second statistics retrieved successfully",
fields.List(fields.Raw(description="Tokens per second data")),
@ -520,16 +477,11 @@ class TokensPerSecondStatistic(Resource):
@account_initialization_required
def get(self, app_model):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("start", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
)
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
converted_created_at = convert_datetime_to_date("created_at")
sql_query = f"""SELECT
{converted_created_at} AS date,
CASE
WHEN SUM(provider_response_latency) = 0 THEN 0
ELSE (SUM(answer_tokens) / SUM(provider_response_latency))

File diff suppressed because it is too large Load Diff

View File

@ -3,24 +3,27 @@ from flask_restx import Resource, marshal_with, reqparse
from flask_restx.inputs import int_range
from sqlalchemy.orm import Session
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.workflow.enums import WorkflowExecutionStatus
from extensions.ext_database import db
from fields.workflow_app_log_fields import workflow_app_log_pagination_fields
from fields.workflow_app_log_fields import build_workflow_app_log_pagination_model
from libs.login import login_required
from models import App
from models.model import AppMode
from services.workflow_app_service import WorkflowAppService
# Register model for flask_restx to avoid dict type issues in Swagger
workflow_app_log_pagination_model = build_workflow_app_log_pagination_model(console_ns)
@console_ns.route("/apps/<uuid:app_id>/workflow-app-logs")
class WorkflowAppLogApi(Resource):
@api.doc("get_workflow_app_logs")
@api.doc(description="Get workflow application execution logs")
@api.doc(params={"app_id": "Application ID"})
@api.doc(
@console_ns.doc("get_workflow_app_logs")
@console_ns.doc(description="Get workflow application execution logs")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(
params={
"keyword": "Search keyword for filtering logs",
"status": "Filter by execution status (succeeded, failed, stopped, partial-succeeded)",
@ -28,16 +31,17 @@ class WorkflowAppLogApi(Resource):
"created_at__after": "Filter logs created after this timestamp",
"created_by_end_user_session_id": "Filter by end user session ID",
"created_by_account": "Filter by account",
"detail": "Whether to return detailed logs",
"page": "Page number (1-99999)",
"limit": "Number of items per page (1-100)",
}
)
@api.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_fields)
@console_ns.response(200, "Workflow app logs retrieved successfully", workflow_app_log_pagination_model)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.WORKFLOW])
@marshal_with(workflow_app_log_pagination_fields)
@marshal_with(workflow_app_log_pagination_model)
def get(self, app_model: App):
"""
Get workflow app logs
@ -68,6 +72,7 @@ class WorkflowAppLogApi(Resource):
required=False,
default=None,
)
.add_argument("detail", type=bool, location="args", required=False, default=False)
.add_argument("page", type=int_range(1, 99999), default=1, location="args")
.add_argument("limit", type=int_range(1, 100), default=20, location="args")
)
@ -92,6 +97,7 @@ class WorkflowAppLogApi(Resource):
created_at_after=args.created_at__after,
page=args.page,
limit=args.limit,
detail=args.detail,
created_by_end_user_session_id=args.created_by_end_user_session_id,
created_by_account=args.created_by_account,
)

View File

@ -1,240 +0,0 @@
import logging
from flask_restx import Resource, fields, marshal_with, reqparse
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from fields.member_fields import account_with_role_fields
from fields.workflow_comment_fields import (
workflow_comment_basic_fields,
workflow_comment_create_fields,
workflow_comment_detail_fields,
workflow_comment_reply_create_fields,
workflow_comment_reply_update_fields,
workflow_comment_resolve_fields,
workflow_comment_update_fields,
)
from libs.login import current_user, login_required
from models import App
from services.account_service import TenantService
from services.workflow_comment_service import WorkflowCommentService
logger = logging.getLogger(__name__)
class WorkflowCommentListApi(Resource):
"""API for listing and creating workflow comments."""
@login_required
@setup_required
@account_initialization_required
@get_app_model
@marshal_with(workflow_comment_basic_fields, envelope="data")
def get(self, app_model: App):
"""Get all comments for a workflow."""
comments = WorkflowCommentService.get_comments(tenant_id=current_user.current_tenant_id, app_id=app_model.id)
return comments
@login_required
@setup_required
@account_initialization_required
@get_app_model
@marshal_with(workflow_comment_create_fields)
def post(self, app_model: App):
"""Create a new workflow comment."""
parser = reqparse.RequestParser()
parser.add_argument("position_x", type=float, required=True, location="json")
parser.add_argument("position_y", type=float, required=True, location="json")
parser.add_argument("content", type=str, required=True, location="json")
parser.add_argument("mentioned_user_ids", type=list, location="json", default=[])
args = parser.parse_args()
result = WorkflowCommentService.create_comment(
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
created_by=current_user.id,
content=args.content,
position_x=args.position_x,
position_y=args.position_y,
mentioned_user_ids=args.mentioned_user_ids,
)
return result, 201
class WorkflowCommentDetailApi(Resource):
"""API for managing individual workflow comments."""
@login_required
@setup_required
@account_initialization_required
@get_app_model
@marshal_with(workflow_comment_detail_fields)
def get(self, app_model: App, comment_id: str):
"""Get a specific workflow comment."""
comment = WorkflowCommentService.get_comment(
tenant_id=current_user.current_tenant_id, app_id=app_model.id, comment_id=comment_id
)
return comment
@login_required
@setup_required
@account_initialization_required
@get_app_model
@marshal_with(workflow_comment_update_fields)
def put(self, app_model: App, comment_id: str):
"""Update a workflow comment."""
parser = reqparse.RequestParser()
parser.add_argument("content", type=str, required=True, location="json")
parser.add_argument("position_x", type=float, required=False, location="json")
parser.add_argument("position_y", type=float, required=False, location="json")
parser.add_argument("mentioned_user_ids", type=list, location="json", default=[])
args = parser.parse_args()
result = WorkflowCommentService.update_comment(
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
comment_id=comment_id,
user_id=current_user.id,
content=args.content,
position_x=args.position_x,
position_y=args.position_y,
mentioned_user_ids=args.mentioned_user_ids,
)
return result
@login_required
@setup_required
@account_initialization_required
@get_app_model
def delete(self, app_model: App, comment_id: str):
"""Delete a workflow comment."""
WorkflowCommentService.delete_comment(
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
comment_id=comment_id,
user_id=current_user.id,
)
return {"result": "success"}, 204
class WorkflowCommentResolveApi(Resource):
"""API for resolving and reopening workflow comments."""
@login_required
@setup_required
@account_initialization_required
@get_app_model
@marshal_with(workflow_comment_resolve_fields)
def post(self, app_model: App, comment_id: str):
"""Resolve a workflow comment."""
comment = WorkflowCommentService.resolve_comment(
tenant_id=current_user.current_tenant_id,
app_id=app_model.id,
comment_id=comment_id,
user_id=current_user.id,
)
return comment
class WorkflowCommentReplyApi(Resource):
"""API for managing comment replies."""
@login_required
@setup_required
@account_initialization_required
@get_app_model
@marshal_with(workflow_comment_reply_create_fields)
def post(self, app_model: App, comment_id: str):
"""Add a reply to a workflow comment."""
# Validate comment access first
WorkflowCommentService.validate_comment_access(
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
)
parser = reqparse.RequestParser()
parser.add_argument("content", type=str, required=True, location="json")
parser.add_argument("mentioned_user_ids", type=list, location="json", default=[])
args = parser.parse_args()
result = WorkflowCommentService.create_reply(
comment_id=comment_id,
content=args.content,
created_by=current_user.id,
mentioned_user_ids=args.mentioned_user_ids,
)
return result, 201
class WorkflowCommentReplyDetailApi(Resource):
"""API for managing individual comment replies."""
@login_required
@setup_required
@account_initialization_required
@get_app_model
@marshal_with(workflow_comment_reply_update_fields)
def put(self, app_model: App, comment_id: str, reply_id: str):
"""Update a comment reply."""
# Validate comment access first
WorkflowCommentService.validate_comment_access(
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
)
parser = reqparse.RequestParser()
parser.add_argument("content", type=str, required=True, location="json")
parser.add_argument("mentioned_user_ids", type=list, location="json", default=[])
args = parser.parse_args()
reply = WorkflowCommentService.update_reply(
reply_id=reply_id, user_id=current_user.id, content=args.content, mentioned_user_ids=args.mentioned_user_ids
)
return reply
@login_required
@setup_required
@account_initialization_required
@get_app_model
def delete(self, app_model: App, comment_id: str, reply_id: str):
"""Delete a comment reply."""
# Validate comment access first
WorkflowCommentService.validate_comment_access(
comment_id=comment_id, tenant_id=current_user.current_tenant_id, app_id=app_model.id
)
WorkflowCommentService.delete_reply(reply_id=reply_id, user_id=current_user.id)
return {"result": "success"}, 204
class WorkflowCommentMentionUsersApi(Resource):
"""API for getting mentionable users for workflow comments."""
@login_required
@setup_required
@account_initialization_required
@get_app_model
@marshal_with({"users": fields.List(fields.Nested(account_with_role_fields))})
def get(self, app_model: App):
"""Get all users in current tenant for mentions."""
members = TenantService.get_tenant_members(current_user.current_tenant)
return {"users": members}
# Register API routes
api.add_resource(WorkflowCommentListApi, "/apps/<uuid:app_id>/workflow/comments")
api.add_resource(WorkflowCommentDetailApi, "/apps/<uuid:app_id>/workflow/comments/<string:comment_id>")
api.add_resource(WorkflowCommentResolveApi, "/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/resolve")
api.add_resource(WorkflowCommentReplyApi, "/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies")
api.add_resource(
WorkflowCommentReplyDetailApi, "/apps/<uuid:app_id>/workflow/comments/<string:comment_id>/replies/<string:reply_id>"
)
api.add_resource(WorkflowCommentMentionUsersApi, "/apps/<uuid:app_id>/workflow/comments/mention-users")

View File

@ -1,17 +1,18 @@
import logging
from typing import NoReturn
from collections.abc import Callable
from functools import wraps
from typing import NoReturn, ParamSpec, TypeVar
from flask import Response
from flask_restx import Resource, fields, inputs, marshal, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.error import (
DraftWorkflowNotExist,
)
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from controllers.web.error import InvalidArgumentError, NotFoundError
from core.file import helpers as file_helpers
from core.variables.segment_group import SegmentGroup
@ -19,10 +20,10 @@ from core.variables.segments import ArrayFileSegment, FileSegment, Segment
from core.variables.types import SegmentType
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from extensions.ext_database import db
from factories import variable_factory
from factories.file_factory import build_from_mapping, build_from_mappings
from libs.login import current_user, login_required
from models import Account, App, AppMode
from factories.variable_factory import build_segment_with_type
from libs.login import login_required
from models import App, AppMode
from models.workflow import WorkflowDraftVariable
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
from services.workflow_service import WorkflowService
@ -140,8 +141,42 @@ _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS = {
"items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_FIELDS), attribute=_get_items),
}
# Register models for flask_restx to avoid dict type issues in Swagger
workflow_draft_variable_without_value_model = console_ns.model(
"WorkflowDraftVariableWithoutValue", _WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS
)
def _api_prerequisite(f):
workflow_draft_variable_model = console_ns.model("WorkflowDraftVariable", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
workflow_draft_env_variable_model = console_ns.model("WorkflowDraftEnvVariable", _WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS)
workflow_draft_env_variable_list_fields_copy = _WORKFLOW_DRAFT_ENV_VARIABLE_LIST_FIELDS.copy()
workflow_draft_env_variable_list_fields_copy["items"] = fields.List(fields.Nested(workflow_draft_env_variable_model))
workflow_draft_env_variable_list_model = console_ns.model(
"WorkflowDraftEnvVariableList", workflow_draft_env_variable_list_fields_copy
)
workflow_draft_variable_list_without_value_fields_copy = _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS.copy()
workflow_draft_variable_list_without_value_fields_copy["items"] = fields.List(
fields.Nested(workflow_draft_variable_without_value_model), attribute=_get_items
)
workflow_draft_variable_list_without_value_model = console_ns.model(
"WorkflowDraftVariableListWithoutValue", workflow_draft_variable_list_without_value_fields_copy
)
workflow_draft_variable_list_fields_copy = _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS.copy()
workflow_draft_variable_list_fields_copy["items"] = fields.List(
fields.Nested(workflow_draft_variable_model), attribute=_get_items
)
workflow_draft_variable_list_model = console_ns.model(
"WorkflowDraftVariableList", workflow_draft_variable_list_fields_copy
)
P = ParamSpec("P")
R = TypeVar("R")
def _api_prerequisite(f: Callable[P, R]):
"""Common prerequisites for all draft workflow variable APIs.
It ensures the following conditions are satisfied:
@ -155,11 +190,10 @@ def _api_prerequisite(f):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
def wrapper(*args, **kwargs):
assert isinstance(current_user, Account)
if not current_user.has_edit_permission:
raise Forbidden()
@wraps(f)
def wrapper(*args: P.args, **kwargs: P.kwargs):
return f(*args, **kwargs)
return wrapper
@ -167,13 +201,16 @@ def _api_prerequisite(f):
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables")
class WorkflowVariableCollectionApi(Resource):
@api.doc("get_workflow_variables")
@api.doc(description="Get draft workflow variables")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"page": "Page number (1-100000)", "limit": "Number of items per page (1-100)"})
@api.response(200, "Workflow variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
@console_ns.expect(_create_pagination_parser())
@console_ns.doc("get_workflow_variables")
@console_ns.doc(description="Get draft workflow variables")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(params={"page": "Page number (1-100000)", "limit": "Number of items per page (1-100)"})
@console_ns.response(
200, "Workflow variables retrieved successfully", workflow_draft_variable_list_without_value_model
)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
@marshal_with(workflow_draft_variable_list_without_value_model)
def get(self, app_model: App):
"""
Get draft workflow
@ -200,9 +237,9 @@ class WorkflowVariableCollectionApi(Resource):
return workflow_vars
@api.doc("delete_workflow_variables")
@api.doc(description="Delete all draft workflow variables")
@api.response(204, "Workflow variables deleted successfully")
@console_ns.doc("delete_workflow_variables")
@console_ns.doc(description="Delete all draft workflow variables")
@console_ns.response(204, "Workflow variables deleted successfully")
@_api_prerequisite
def delete(self, app_model: App):
draft_var_srv = WorkflowDraftVariableService(
@ -233,12 +270,12 @@ def validate_node_id(node_id: str) -> NoReturn | None:
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
class NodeVariableCollectionApi(Resource):
@api.doc("get_node_variables")
@api.doc(description="Get variables for a specific node")
@api.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@api.response(200, "Node variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@console_ns.doc("get_node_variables")
@console_ns.doc(description="Get variables for a specific node")
@console_ns.doc(params={"app_id": "Application ID", "node_id": "Node ID"})
@console_ns.response(200, "Node variables retrieved successfully", workflow_draft_variable_list_model)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@marshal_with(workflow_draft_variable_list_model)
def get(self, app_model: App, node_id: str):
validate_node_id(node_id)
with Session(bind=db.engine, expire_on_commit=False) as session:
@ -249,9 +286,9 @@ class NodeVariableCollectionApi(Resource):
return node_vars
@api.doc("delete_node_variables")
@api.doc(description="Delete all variables for a specific node")
@api.response(204, "Node variables deleted successfully")
@console_ns.doc("delete_node_variables")
@console_ns.doc(description="Delete all variables for a specific node")
@console_ns.response(204, "Node variables deleted successfully")
@_api_prerequisite
def delete(self, app_model: App, node_id: str):
validate_node_id(node_id)
@ -266,13 +303,13 @@ class VariableApi(Resource):
_PATCH_NAME_FIELD = "name"
_PATCH_VALUE_FIELD = "value"
@api.doc("get_variable")
@api.doc(description="Get a specific workflow variable")
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@api.response(200, "Variable retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(404, "Variable not found")
@console_ns.doc("get_variable")
@console_ns.doc(description="Get a specific workflow variable")
@console_ns.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@console_ns.response(200, "Variable retrieved successfully", workflow_draft_variable_model)
@console_ns.response(404, "Variable not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
@marshal_with(workflow_draft_variable_model)
def get(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
@ -284,10 +321,10 @@ class VariableApi(Resource):
raise NotFoundError(description=f"variable not found, id={variable_id}")
return variable
@api.doc("update_variable")
@api.doc(description="Update a workflow variable")
@api.expect(
api.model(
@console_ns.doc("update_variable")
@console_ns.doc(description="Update a workflow variable")
@console_ns.expect(
console_ns.model(
"UpdateVariableRequest",
{
"name": fields.String(description="Variable name"),
@ -295,10 +332,10 @@ class VariableApi(Resource):
},
)
)
@api.response(200, "Variable updated successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(404, "Variable not found")
@console_ns.response(200, "Variable updated successfully", workflow_draft_variable_model)
@console_ns.response(404, "Variable not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
@marshal_with(workflow_draft_variable_model)
def patch(self, app_model: App, variable_id: str):
# Request payload for file types:
#
@ -355,15 +392,15 @@ class VariableApi(Resource):
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
raw_value = build_from_mappings(mappings=raw_value, tenant_id=app_model.tenant_id)
new_value = variable_factory.build_segment_with_type(variable.value_type, raw_value)
new_value = build_segment_with_type(variable.value_type, raw_value)
draft_var_srv.update_variable(variable, name=new_name, value=new_value)
db.session.commit()
return variable
@api.doc("delete_variable")
@api.doc(description="Delete a workflow variable")
@api.response(204, "Variable deleted successfully")
@api.response(404, "Variable not found")
@console_ns.doc("delete_variable")
@console_ns.doc(description="Delete a workflow variable")
@console_ns.response(204, "Variable deleted successfully")
@console_ns.response(404, "Variable not found")
@_api_prerequisite
def delete(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
@ -381,12 +418,12 @@ class VariableApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
class VariableResetApi(Resource):
@api.doc("reset_variable")
@api.doc(description="Reset a workflow variable to its default value")
@api.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@api.response(200, "Variable reset successfully", _WORKFLOW_DRAFT_VARIABLE_FIELDS)
@api.response(204, "Variable reset (no content)")
@api.response(404, "Variable not found")
@console_ns.doc("reset_variable")
@console_ns.doc(description="Reset a workflow variable to its default value")
@console_ns.doc(params={"app_id": "Application ID", "variable_id": "Variable ID"})
@console_ns.response(200, "Variable reset successfully", workflow_draft_variable_model)
@console_ns.response(204, "Variable reset (no content)")
@console_ns.response(404, "Variable not found")
@_api_prerequisite
def put(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
@ -410,7 +447,7 @@ class VariableResetApi(Resource):
if resetted is None:
return Response("", 204)
else:
return marshal(resetted, _WORKFLOW_DRAFT_VARIABLE_FIELDS)
return marshal(resetted, workflow_draft_variable_model)
def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
@ -429,13 +466,13 @@ def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/conversation-variables")
class ConversationVariableCollectionApi(Resource):
@api.doc("get_conversation_variables")
@api.doc(description="Get conversation variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Conversation variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@api.response(404, "Draft workflow not found")
@console_ns.doc("get_conversation_variables")
@console_ns.doc(description="Get conversation variables for workflow")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Conversation variables retrieved successfully", workflow_draft_variable_list_model)
@console_ns.response(404, "Draft workflow not found")
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@marshal_with(workflow_draft_variable_list_model)
def get(self, app_model: App):
# NOTE(QuantumGhost): Prefill conversation variables into the draft variables table
# so their IDs can be returned to the caller.
@ -448,53 +485,26 @@ class ConversationVariableCollectionApi(Resource):
db.session.commit()
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.ADVANCED_CHAT)
def post(self, app_model: App):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("conversation_variables", type=list, required=True, location="json")
args = parser.parse_args()
workflow_service = WorkflowService()
conversation_variables_list = args.get("conversation_variables") or []
conversation_variables = [
variable_factory.build_conversation_variable_from_mapping(obj) for obj in conversation_variables_list
]
workflow_service.update_draft_workflow_conversation_variables(
app_model=app_model,
account=current_user,
conversation_variables=conversation_variables,
)
return {"result": "success"}
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/system-variables")
class SystemVariableCollectionApi(Resource):
@api.doc("get_system_variables")
@api.doc(description="Get system variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "System variables retrieved successfully", _WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@console_ns.doc("get_system_variables")
@console_ns.doc(description="Get system variables for workflow")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "System variables retrieved successfully", workflow_draft_variable_list_model)
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
@marshal_with(workflow_draft_variable_list_model)
def get(self, app_model: App):
return _get_variable_list(app_model, SYSTEM_VARIABLE_NODE_ID)
@console_ns.route("/apps/<uuid:app_id>/workflows/draft/environment-variables")
class EnvironmentVariableCollectionApi(Resource):
@api.doc("get_environment_variables")
@api.doc(description="Get environment variables for workflow")
@api.doc(params={"app_id": "Application ID"})
@api.response(200, "Environment variables retrieved successfully")
@api.response(404, "Draft workflow not found")
@console_ns.doc("get_environment_variables")
@console_ns.doc(description="Get environment variables for workflow")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.response(200, "Environment variables retrieved successfully")
@console_ns.response(404, "Draft workflow not found")
@_api_prerequisite
def get(self, app_model: App):
"""
@ -526,44 +536,3 @@ class EnvironmentVariableCollectionApi(Resource):
)
return {"items": env_vars_list}
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
def post(self, app_model: App):
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.is_editor:
raise Forbidden()
parser = reqparse.RequestParser()
parser.add_argument("environment_variables", type=list, required=True, location="json")
args = parser.parse_args()
workflow_service = WorkflowService()
environment_variables_list = args.get("environment_variables") or []
environment_variables = [
variable_factory.build_environment_variable_from_mapping(obj) for obj in environment_variables_list
]
workflow_service.update_draft_workflow_environment_variables(
app_model=app_model,
account=current_user,
environment_variables=environment_variables,
)
return {"result": "success"}
api.add_resource(
WorkflowVariableCollectionApi,
"/apps/<uuid:app_id>/workflows/draft/variables",
)
api.add_resource(NodeVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
api.add_resource(VariableApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
api.add_resource(VariableResetApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
api.add_resource(ConversationVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/conversation-variables")
api.add_resource(SystemVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/system-variables")
api.add_resource(EnvironmentVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/environment-variables")

View File

@ -1,15 +1,20 @@
from typing import cast
from flask_restx import Resource, marshal_with, reqparse
from flask_restx import Resource, fields, marshal_with, reqparse
from flask_restx.inputs import int_range
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from fields.end_user_fields import simple_end_user_fields
from fields.member_fields import simple_account_fields
from fields.workflow_run_fields import (
advanced_chat_workflow_run_for_list_fields,
advanced_chat_workflow_run_pagination_fields,
workflow_run_count_fields,
workflow_run_detail_fields,
workflow_run_for_list_fields,
workflow_run_node_execution_fields,
workflow_run_node_execution_list_fields,
workflow_run_pagination_fields,
)
@ -22,6 +27,71 @@ from services.workflow_run_service import WorkflowRunService
# Workflow run status choices for filtering
WORKFLOW_RUN_STATUS_CHOICES = ["running", "succeeded", "failed", "stopped", "partial-succeeded"]
# Register models for flask_restx to avoid dict type issues in Swagger
# Register in dependency order: base models first, then dependent models
# Base models
simple_account_model = console_ns.model("SimpleAccount", simple_account_fields)
simple_end_user_model = console_ns.model("SimpleEndUser", simple_end_user_fields)
# Models that depend on simple_account_fields
workflow_run_for_list_fields_copy = workflow_run_for_list_fields.copy()
workflow_run_for_list_fields_copy["created_by_account"] = fields.Nested(
simple_account_model, attribute="created_by_account", allow_null=True
)
workflow_run_for_list_model = console_ns.model("WorkflowRunForList", workflow_run_for_list_fields_copy)
advanced_chat_workflow_run_for_list_fields_copy = advanced_chat_workflow_run_for_list_fields.copy()
advanced_chat_workflow_run_for_list_fields_copy["created_by_account"] = fields.Nested(
simple_account_model, attribute="created_by_account", allow_null=True
)
advanced_chat_workflow_run_for_list_model = console_ns.model(
"AdvancedChatWorkflowRunForList", advanced_chat_workflow_run_for_list_fields_copy
)
workflow_run_detail_fields_copy = workflow_run_detail_fields.copy()
workflow_run_detail_fields_copy["created_by_account"] = fields.Nested(
simple_account_model, attribute="created_by_account", allow_null=True
)
workflow_run_detail_fields_copy["created_by_end_user"] = fields.Nested(
simple_end_user_model, attribute="created_by_end_user", allow_null=True
)
workflow_run_detail_model = console_ns.model("WorkflowRunDetail", workflow_run_detail_fields_copy)
workflow_run_node_execution_fields_copy = workflow_run_node_execution_fields.copy()
workflow_run_node_execution_fields_copy["created_by_account"] = fields.Nested(
simple_account_model, attribute="created_by_account", allow_null=True
)
workflow_run_node_execution_fields_copy["created_by_end_user"] = fields.Nested(
simple_end_user_model, attribute="created_by_end_user", allow_null=True
)
workflow_run_node_execution_model = console_ns.model(
"WorkflowRunNodeExecution", workflow_run_node_execution_fields_copy
)
# Simple models without nested dependencies
workflow_run_count_model = console_ns.model("WorkflowRunCount", workflow_run_count_fields)
# Pagination models that depend on list models
advanced_chat_workflow_run_pagination_fields_copy = advanced_chat_workflow_run_pagination_fields.copy()
advanced_chat_workflow_run_pagination_fields_copy["data"] = fields.List(
fields.Nested(advanced_chat_workflow_run_for_list_model), attribute="data"
)
advanced_chat_workflow_run_pagination_model = console_ns.model(
"AdvancedChatWorkflowRunPagination", advanced_chat_workflow_run_pagination_fields_copy
)
workflow_run_pagination_fields_copy = workflow_run_pagination_fields.copy()
workflow_run_pagination_fields_copy["data"] = fields.List(fields.Nested(workflow_run_for_list_model), attribute="data")
workflow_run_pagination_model = console_ns.model("WorkflowRunPagination", workflow_run_pagination_fields_copy)
workflow_run_node_execution_list_fields_copy = workflow_run_node_execution_list_fields.copy()
workflow_run_node_execution_list_fields_copy["data"] = fields.List(fields.Nested(workflow_run_node_execution_model))
workflow_run_node_execution_list_model = console_ns.model(
"WorkflowRunNodeExecutionList", workflow_run_node_execution_list_fields_copy
)
def _parse_workflow_run_list_args():
"""
@ -30,23 +100,25 @@ def _parse_workflow_run_list_args():
Returns:
Parsed arguments containing last_id, limit, status, and triggered_from filters
"""
parser = reqparse.RequestParser()
parser.add_argument("last_id", type=uuid_value, location="args")
parser.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
parser.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
parser.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
parser = (
reqparse.RequestParser()
.add_argument("last_id", type=uuid_value, location="args")
.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
)
)
return parser.parse_args()
@ -58,46 +130,52 @@ def _parse_workflow_run_count_args():
Returns:
Parsed arguments containing status, time_range, and triggered_from filters
"""
parser = reqparse.RequestParser()
parser.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
parser.add_argument(
"time_range",
type=time_duration,
location="args",
required=False,
help="Time range filter (e.g., 7d, 4h, 30m, 30s)",
)
parser.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
parser = (
reqparse.RequestParser()
.add_argument(
"status",
type=str,
choices=WORKFLOW_RUN_STATUS_CHOICES,
location="args",
required=False,
)
.add_argument(
"time_range",
type=time_duration,
location="args",
required=False,
help="Time range filter (e.g., 7d, 4h, 30m, 30s)",
)
.add_argument(
"triggered_from",
type=str,
choices=["debugging", "app-run"],
location="args",
required=False,
help="Filter by trigger source: debugging or app-run",
)
)
return parser.parse_args()
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs")
class AdvancedChatAppWorkflowRunListApi(Resource):
@api.doc("get_advanced_chat_workflow_runs")
@api.doc(description="Get advanced chat workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"})
@api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"})
@api.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_fields)
@console_ns.doc("get_advanced_chat_workflow_runs")
@console_ns.doc(description="Get advanced chat workflow run list")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@console_ns.doc(
params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}
)
@console_ns.doc(
params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}
)
@console_ns.response(200, "Workflow runs retrieved successfully", advanced_chat_workflow_run_pagination_model)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
@marshal_with(advanced_chat_workflow_run_pagination_fields)
@marshal_with(advanced_chat_workflow_run_pagination_model)
def get(self, app_model: App):
"""
Get advanced chat app workflow run list
@ -121,11 +199,13 @@ class AdvancedChatAppWorkflowRunListApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/advanced-chat/workflow-runs/count")
class AdvancedChatAppWorkflowRunCountApi(Resource):
@api.doc("get_advanced_chat_workflow_runs_count")
@api.doc(description="Get advanced chat workflow runs count statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"})
@api.doc(
@console_ns.doc("get_advanced_chat_workflow_runs_count")
@console_ns.doc(description="Get advanced chat workflow runs count statistics")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(
params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}
)
@console_ns.doc(
params={
"time_range": (
"Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), "
@ -133,13 +213,15 @@ class AdvancedChatAppWorkflowRunCountApi(Resource):
)
}
)
@api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"})
@api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields)
@console_ns.doc(
params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}
)
@console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_model)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT])
@marshal_with(workflow_run_count_fields)
@marshal_with(workflow_run_count_model)
def get(self, app_model: App):
"""
Get advanced chat workflow runs count statistics
@ -166,18 +248,22 @@ class AdvancedChatAppWorkflowRunCountApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/workflow-runs")
class WorkflowRunListApi(Resource):
@api.doc("get_workflow_runs")
@api.doc(description="Get workflow run list")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"})
@api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"})
@api.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_fields)
@console_ns.doc("get_workflow_runs")
@console_ns.doc(description="Get workflow run list")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(params={"last_id": "Last run ID for pagination", "limit": "Number of items per page (1-100)"})
@console_ns.doc(
params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}
)
@console_ns.doc(
params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}
)
@console_ns.response(200, "Workflow runs retrieved successfully", workflow_run_pagination_model)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_run_pagination_fields)
@marshal_with(workflow_run_pagination_model)
def get(self, app_model: App):
"""
Get workflow run list
@ -201,11 +287,13 @@ class WorkflowRunListApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/count")
class WorkflowRunCountApi(Resource):
@api.doc("get_workflow_runs_count")
@api.doc(description="Get workflow runs count statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"})
@api.doc(
@console_ns.doc("get_workflow_runs_count")
@console_ns.doc(description="Get workflow runs count statistics")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(
params={"status": "Filter by status (optional): running, succeeded, failed, stopped, partial-succeeded"}
)
@console_ns.doc(
params={
"time_range": (
"Filter by time range (optional): e.g., 7d (7 days), 4h (4 hours), "
@ -213,13 +301,15 @@ class WorkflowRunCountApi(Resource):
)
}
)
@api.doc(params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"})
@api.response(200, "Workflow runs count retrieved successfully", workflow_run_count_fields)
@console_ns.doc(
params={"triggered_from": "Filter by trigger source (optional): debugging or app-run. Default: debugging"}
)
@console_ns.response(200, "Workflow runs count retrieved successfully", workflow_run_count_model)
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_run_count_fields)
@marshal_with(workflow_run_count_model)
def get(self, app_model: App):
"""
Get workflow runs count statistics
@ -246,16 +336,16 @@ class WorkflowRunCountApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>")
class WorkflowRunDetailApi(Resource):
@api.doc("get_workflow_run_detail")
@api.doc(description="Get workflow run detail")
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@api.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_fields)
@api.response(404, "Workflow run not found")
@console_ns.doc("get_workflow_run_detail")
@console_ns.doc(description="Get workflow run detail")
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@console_ns.response(200, "Workflow run detail retrieved successfully", workflow_run_detail_model)
@console_ns.response(404, "Workflow run not found")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_run_detail_fields)
@marshal_with(workflow_run_detail_model)
def get(self, app_model: App, run_id):
"""
Get workflow run detail
@ -270,16 +360,16 @@ class WorkflowRunDetailApi(Resource):
@console_ns.route("/apps/<uuid:app_id>/workflow-runs/<uuid:run_id>/node-executions")
class WorkflowRunNodeExecutionListApi(Resource):
@api.doc("get_workflow_run_node_executions")
@api.doc(description="Get workflow run node execution list")
@api.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@api.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_fields)
@api.response(404, "Workflow run not found")
@console_ns.doc("get_workflow_run_node_executions")
@console_ns.doc(description="Get workflow run node execution list")
@console_ns.doc(params={"app_id": "Application ID", "run_id": "Workflow run ID"})
@console_ns.response(200, "Node executions retrieved successfully", workflow_run_node_execution_list_model)
@console_ns.response(404, "Workflow run not found")
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_run_node_execution_list_fields)
@marshal_with(workflow_run_node_execution_list_model)
def get(self, app_model: App, run_id):
"""
Get workflow run node execution list

View File

@ -2,7 +2,7 @@ from flask import abort, jsonify
from flask_restx import Resource, reqparse
from sqlalchemy.orm import sessionmaker
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
@ -21,11 +21,13 @@ class WorkflowDailyRunsStatistic(Resource):
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
@api.doc("get_workflow_daily_runs_statistic")
@api.doc(description="Get workflow daily runs statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily runs statistics retrieved successfully")
@console_ns.doc("get_workflow_daily_runs_statistic")
@console_ns.doc(description="Get workflow daily runs statistics")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(
params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"}
)
@console_ns.response(200, "Daily runs statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@ -66,11 +68,13 @@ class WorkflowDailyTerminalsStatistic(Resource):
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
@api.doc("get_workflow_daily_terminals_statistic")
@api.doc(description="Get workflow daily terminals statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily terminals statistics retrieved successfully")
@console_ns.doc("get_workflow_daily_terminals_statistic")
@console_ns.doc(description="Get workflow daily terminals statistics")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(
params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"}
)
@console_ns.response(200, "Daily terminals statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@ -111,11 +115,13 @@ class WorkflowDailyTokenCostStatistic(Resource):
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
@api.doc("get_workflow_daily_token_cost_statistic")
@api.doc(description="Get workflow daily token cost statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Daily token cost statistics retrieved successfully")
@console_ns.doc("get_workflow_daily_token_cost_statistic")
@console_ns.doc(description="Get workflow daily token cost statistics")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(
params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"}
)
@console_ns.response(200, "Daily token cost statistics retrieved successfully")
@get_app_model
@setup_required
@login_required
@ -156,11 +162,13 @@ class WorkflowAverageAppInteractionStatistic(Resource):
session_maker = sessionmaker(bind=db.engine, expire_on_commit=False)
self._workflow_run_repo = DifyAPIRepositoryFactory.create_api_workflow_run_repository(session_maker)
@api.doc("get_workflow_average_app_interaction_statistic")
@api.doc(description="Get workflow average app interaction statistics")
@api.doc(params={"app_id": "Application ID"})
@api.doc(params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"})
@api.response(200, "Average app interaction statistics retrieved successfully")
@console_ns.doc("get_workflow_average_app_interaction_statistic")
@console_ns.doc(description="Get workflow average app interaction statistics")
@console_ns.doc(params={"app_id": "Application ID"})
@console_ns.doc(
params={"start": "Start date and time (YYYY-MM-DD HH:MM)", "end": "End date and time (YYYY-MM-DD HH:MM)"}
)
@console_ns.response(200, "Average app interaction statistics retrieved successfully")
@setup_required
@login_required
@account_initialization_required

View File

@ -0,0 +1,157 @@
import logging
from flask import request
from flask_restx import Resource, marshal_with
from pydantic import BaseModel
from sqlalchemy import select
from sqlalchemy.orm import Session
from werkzeug.exceptions import NotFound
from configs import dify_config
from extensions.ext_database import db
from fields.workflow_trigger_fields import trigger_fields, triggers_list_fields, webhook_trigger_fields
from libs.login import current_user, login_required
from models.enums import AppTriggerStatus
from models.model import Account, App, AppMode
from models.trigger import AppTrigger, WorkflowWebhookTrigger
from .. import console_ns
from ..app.wraps import get_app_model
from ..wraps import account_initialization_required, edit_permission_required, setup_required
logger = logging.getLogger(__name__)
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
class Parser(BaseModel):
node_id: str
class ParserEnable(BaseModel):
trigger_id: str
enable_trigger: bool
console_ns.schema_model(Parser.__name__, Parser.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
console_ns.schema_model(
ParserEnable.__name__, ParserEnable.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
)
@console_ns.route("/apps/<uuid:app_id>/workflows/triggers/webhook")
class WebhookTriggerApi(Resource):
"""Webhook Trigger API"""
@console_ns.expect(console_ns.models[Parser.__name__])
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.WORKFLOW)
@marshal_with(webhook_trigger_fields)
def get(self, app_model: App):
"""Get webhook trigger for a node"""
args = Parser.model_validate(request.args.to_dict(flat=True)) # type: ignore
node_id = args.node_id
with Session(db.engine) as session:
# Get webhook trigger for this app and node
webhook_trigger = (
session.query(WorkflowWebhookTrigger)
.where(
WorkflowWebhookTrigger.app_id == app_model.id,
WorkflowWebhookTrigger.node_id == node_id,
)
.first()
)
if not webhook_trigger:
raise NotFound("Webhook trigger not found for this node")
return webhook_trigger
@console_ns.route("/apps/<uuid:app_id>/triggers")
class AppTriggersApi(Resource):
"""App Triggers list API"""
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=AppMode.WORKFLOW)
@marshal_with(triggers_list_fields)
def get(self, app_model: App):
"""Get app triggers list"""
assert isinstance(current_user, Account)
assert current_user.current_tenant_id is not None
with Session(db.engine) as session:
# Get all triggers for this app using select API
triggers = (
session.execute(
select(AppTrigger)
.where(
AppTrigger.tenant_id == current_user.current_tenant_id,
AppTrigger.app_id == app_model.id,
)
.order_by(AppTrigger.created_at.desc(), AppTrigger.id.desc())
)
.scalars()
.all()
)
# Add computed icon field for each trigger
url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/"
for trigger in triggers:
if trigger.trigger_type == "trigger-plugin":
trigger.icon = url_prefix + trigger.provider_name + "/icon" # type: ignore
else:
trigger.icon = "" # type: ignore
return {"data": triggers}
@console_ns.route("/apps/<uuid:app_id>/trigger-enable")
class AppTriggerEnableApi(Resource):
@console_ns.expect(console_ns.models[ParserEnable.__name__], validate=True)
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_app_model(mode=AppMode.WORKFLOW)
@marshal_with(trigger_fields)
def post(self, app_model: App):
"""Update app trigger (enable/disable)"""
args = ParserEnable.model_validate(console_ns.payload)
assert current_user.current_tenant_id is not None
trigger_id = args.trigger_id
with Session(db.engine) as session:
# Find the trigger using select
trigger = session.execute(
select(AppTrigger).where(
AppTrigger.id == trigger_id,
AppTrigger.tenant_id == current_user.current_tenant_id,
AppTrigger.app_id == app_model.id,
)
).scalar_one_or_none()
if not trigger:
raise NotFound("Trigger not found")
# Update status based on enable_trigger boolean
trigger.status = AppTriggerStatus.ENABLED if args.enable_trigger else AppTriggerStatus.DISABLED
session.commit()
session.refresh(trigger)
# Add computed icon field
url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/"
if trigger.trigger_type == "trigger-plugin":
trigger.icon = url_prefix + trigger.provider_name + "/icon" # type: ignore
else:
trigger.icon = "" # type: ignore
return trigger

View File

@ -23,15 +23,6 @@ def _load_app_model(app_id: str) -> App | None:
return app_model
def _load_app_model_with_trial(app_id: str) -> App | None:
app_model = (
db.session.query(App)
.where(App.id == app_id, App.status == "normal")
.first()
)
return app_model
def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None):
def decorator(view_func: Callable[P1, R1]):
@wraps(view_func)
@ -71,44 +62,3 @@ def get_app_model(view: Callable[P, R] | None = None, *, mode: Union[AppMode, li
return decorator
else:
return decorator(view)
def get_app_model_with_trial(view: Callable[P, R] | None = None, *, mode: Union[AppMode, list[AppMode], None] = None):
def decorator(view_func: Callable[P, R]):
@wraps(view_func)
def decorated_view(*args: P.args, **kwargs: P.kwargs):
if not kwargs.get("app_id"):
raise ValueError("missing app_id in path parameters")
app_id = kwargs.get("app_id")
app_id = str(app_id)
del kwargs["app_id"]
app_model = _load_app_model_with_trial(app_id)
if not app_model:
raise AppNotFoundError()
app_mode = AppMode.value_of(app_model.mode)
if mode is not None:
if isinstance(mode, list):
modes = mode
else:
modes = [mode]
if app_mode not in modes:
mode_values = {m.value for m in modes}
raise AppNotFoundError(f"App mode is not in the supported list: {mode_values}")
kwargs["app_model"] = app_model
return view_func(*args, **kwargs)
return decorated_view
if view is None:
return decorator
else:
return decorator(view)

View File

@ -2,7 +2,7 @@ from flask import request
from flask_restx import Resource, fields, reqparse
from constants.languages import supported_language
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.error import AlreadyActivateError
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
@ -20,13 +20,13 @@ active_check_parser = (
@console_ns.route("/activate/check")
class ActivateCheckApi(Resource):
@api.doc("check_activation_token")
@api.doc(description="Check if activation token is valid")
@api.expect(active_check_parser)
@api.response(
@console_ns.doc("check_activation_token")
@console_ns.doc(description="Check if activation token is valid")
@console_ns.expect(active_check_parser)
@console_ns.response(
200,
"Success",
api.model(
console_ns.model(
"ActivationCheckResponse",
{
"is_valid": fields.Boolean(description="Whether token is valid"),
@ -69,13 +69,13 @@ active_parser = (
@console_ns.route("/activate")
class ActivateApi(Resource):
@api.doc("activate_account")
@api.doc(description="Activate account with invitation token")
@api.expect(active_parser)
@api.response(
@console_ns.doc("activate_account")
@console_ns.doc(description="Activate account with invitation token")
@console_ns.expect(active_parser)
@console_ns.response(
200,
"Account activated successfully",
api.model(
console_ns.model(
"ActivationResponse",
{
"result": fields.String(description="Operation result"),
@ -83,7 +83,7 @@ class ActivateApi(Resource):
},
),
)
@api.response(400, "Already activated or invalid token")
@console_ns.response(400, "Already activated or invalid token")
def post(self):
args = active_parser.parse_args()

View File

@ -1,8 +1,8 @@
from flask_restx import Resource, reqparse
from werkzeug.exceptions import Forbidden
from controllers.console import console_ns
from controllers.console.auth.error import ApiKeyAuthFailedError
from controllers.console.wraps import is_admin_or_owner_required
from libs.login import current_account_with_tenant, login_required
from services.auth.api_key_auth_service import ApiKeyAuthService
@ -39,12 +39,10 @@ class ApiKeyAuthDataSourceBinding(Resource):
@setup_required
@login_required
@account_initialization_required
@is_admin_or_owner_required
def post(self):
# The role of the current user in the table must be admin or owner
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
_, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("category", type=str, required=True, nullable=False, location="json")
@ -65,12 +63,10 @@ class ApiKeyAuthDataSourceBindingDelete(Resource):
@setup_required
@login_required
@account_initialization_required
@is_admin_or_owner_required
def delete(self, binding_id):
# The role of the current user in the table must be admin or owner
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
_, current_tenant_id = current_account_with_tenant()
ApiKeyAuthService.delete_provider_auth(current_tenant_id, binding_id)

View File

@ -3,11 +3,11 @@ import logging
import httpx
from flask import current_app, redirect, request
from flask_restx import Resource, fields
from werkzeug.exceptions import Forbidden
from configs import dify_config
from controllers.console import api, console_ns
from libs.login import current_account_with_tenant, login_required
from controllers.console import console_ns
from controllers.console.wraps import is_admin_or_owner_required
from libs.login import login_required
from libs.oauth_data_source import NotionOAuth
from ..wraps import account_initialization_required, setup_required
@ -29,24 +29,22 @@ def get_oauth_providers():
@console_ns.route("/oauth/data-source/<string:provider>")
class OAuthDataSource(Resource):
@api.doc("oauth_data_source")
@api.doc(description="Get OAuth authorization URL for data source provider")
@api.doc(params={"provider": "Data source provider name (notion)"})
@api.response(
@console_ns.doc("oauth_data_source")
@console_ns.doc(description="Get OAuth authorization URL for data source provider")
@console_ns.doc(params={"provider": "Data source provider name (notion)"})
@console_ns.response(
200,
"Authorization URL or internal setup success",
api.model(
console_ns.model(
"OAuthDataSourceResponse",
{"data": fields.Raw(description="Authorization URL or 'internal' for internal setup")},
),
)
@api.response(400, "Invalid provider")
@api.response(403, "Admin privileges required")
@console_ns.response(400, "Invalid provider")
@console_ns.response(403, "Admin privileges required")
@is_admin_or_owner_required
def get(self, provider: str):
# The role of the current user in the table must be admin or owner
current_user, _ = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
with current_app.app_context():
oauth_provider = OAUTH_DATASOURCE_PROVIDERS.get(provider)
@ -65,17 +63,17 @@ class OAuthDataSource(Resource):
@console_ns.route("/oauth/data-source/callback/<string:provider>")
class OAuthDataSourceCallback(Resource):
@api.doc("oauth_data_source_callback")
@api.doc(description="Handle OAuth callback from data source provider")
@api.doc(
@console_ns.doc("oauth_data_source_callback")
@console_ns.doc(description="Handle OAuth callback from data source provider")
@console_ns.doc(
params={
"provider": "Data source provider name (notion)",
"code": "Authorization code from OAuth provider",
"error": "Error message from OAuth provider",
}
)
@api.response(302, "Redirect to console with result")
@api.response(400, "Invalid provider")
@console_ns.response(302, "Redirect to console with result")
@console_ns.response(400, "Invalid provider")
def get(self, provider: str):
OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
with current_app.app_context():
@ -96,17 +94,17 @@ class OAuthDataSourceCallback(Resource):
@console_ns.route("/oauth/data-source/binding/<string:provider>")
class OAuthDataSourceBinding(Resource):
@api.doc("oauth_data_source_binding")
@api.doc(description="Bind OAuth data source with authorization code")
@api.doc(
@console_ns.doc("oauth_data_source_binding")
@console_ns.doc(description="Bind OAuth data source with authorization code")
@console_ns.doc(
params={"provider": "Data source provider name (notion)", "code": "Authorization code from OAuth provider"}
)
@api.response(
@console_ns.response(
200,
"Data source binding success",
api.model("OAuthDataSourceBindingResponse", {"result": fields.String(description="Operation result")}),
console_ns.model("OAuthDataSourceBindingResponse", {"result": fields.String(description="Operation result")}),
)
@api.response(400, "Invalid provider or code")
@console_ns.response(400, "Invalid provider or code")
def get(self, provider: str):
OAUTH_DATASOURCE_PROVIDERS = get_oauth_providers()
with current_app.app_context():
@ -130,15 +128,15 @@ class OAuthDataSourceBinding(Resource):
@console_ns.route("/oauth/data-source/<string:provider>/<uuid:binding_id>/sync")
class OAuthDataSourceSync(Resource):
@api.doc("oauth_data_source_sync")
@api.doc(description="Sync data from OAuth data source")
@api.doc(params={"provider": "Data source provider name (notion)", "binding_id": "Data source binding ID"})
@api.response(
@console_ns.doc("oauth_data_source_sync")
@console_ns.doc(description="Sync data from OAuth data source")
@console_ns.doc(params={"provider": "Data source provider name (notion)", "binding_id": "Data source binding ID"})
@console_ns.response(
200,
"Data source sync success",
api.model("OAuthDataSourceSyncResponse", {"result": fields.String(description="Operation result")}),
console_ns.model("OAuthDataSourceSyncResponse", {"result": fields.String(description="Operation result")}),
)
@api.response(400, "Invalid provider or sync failed")
@console_ns.response(400, "Invalid provider or sync failed")
@setup_required
@login_required
@account_initialization_required

View File

@ -6,7 +6,7 @@ from flask_restx import Resource, fields, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.auth.error import (
EmailCodeError,
EmailPasswordResetLimitError,
@ -27,10 +27,10 @@ from services.feature_service import FeatureService
@console_ns.route("/forgot-password")
class ForgotPasswordSendEmailApi(Resource):
@api.doc("send_forgot_password_email")
@api.doc(description="Send password reset email")
@api.expect(
api.model(
@console_ns.doc("send_forgot_password_email")
@console_ns.doc(description="Send password reset email")
@console_ns.expect(
console_ns.model(
"ForgotPasswordEmailRequest",
{
"email": fields.String(required=True, description="Email address"),
@ -38,10 +38,10 @@ class ForgotPasswordSendEmailApi(Resource):
},
)
)
@api.response(
@console_ns.response(
200,
"Email sent successfully",
api.model(
console_ns.model(
"ForgotPasswordEmailResponse",
{
"result": fields.String(description="Operation result"),
@ -50,7 +50,7 @@ class ForgotPasswordSendEmailApi(Resource):
},
),
)
@api.response(400, "Invalid email or rate limit exceeded")
@console_ns.response(400, "Invalid email or rate limit exceeded")
@setup_required
@email_password_login_enabled
def post(self):
@ -85,10 +85,10 @@ class ForgotPasswordSendEmailApi(Resource):
@console_ns.route("/forgot-password/validity")
class ForgotPasswordCheckApi(Resource):
@api.doc("check_forgot_password_code")
@api.doc(description="Verify password reset code")
@api.expect(
api.model(
@console_ns.doc("check_forgot_password_code")
@console_ns.doc(description="Verify password reset code")
@console_ns.expect(
console_ns.model(
"ForgotPasswordCheckRequest",
{
"email": fields.String(required=True, description="Email address"),
@ -97,10 +97,10 @@ class ForgotPasswordCheckApi(Resource):
},
)
)
@api.response(
@console_ns.response(
200,
"Code verified successfully",
api.model(
console_ns.model(
"ForgotPasswordCheckResponse",
{
"is_valid": fields.Boolean(description="Whether code is valid"),
@ -109,7 +109,7 @@ class ForgotPasswordCheckApi(Resource):
},
),
)
@api.response(400, "Invalid code or token")
@console_ns.response(400, "Invalid code or token")
@setup_required
@email_password_login_enabled
def post(self):
@ -152,10 +152,10 @@ class ForgotPasswordCheckApi(Resource):
@console_ns.route("/forgot-password/resets")
class ForgotPasswordResetApi(Resource):
@api.doc("reset_password")
@api.doc(description="Reset password with verification token")
@api.expect(
api.model(
@console_ns.doc("reset_password")
@console_ns.doc(description="Reset password with verification token")
@console_ns.expect(
console_ns.model(
"ForgotPasswordResetRequest",
{
"token": fields.String(required=True, description="Verification token"),
@ -164,12 +164,12 @@ class ForgotPasswordResetApi(Resource):
},
)
)
@api.response(
@console_ns.response(
200,
"Password reset successfully",
api.model("ForgotPasswordResetResponse", {"result": fields.String(description="Operation result")}),
console_ns.model("ForgotPasswordResetResponse", {"result": fields.String(description="Operation result")}),
)
@api.response(400, "Invalid token or password mismatch")
@console_ns.response(400, "Invalid token or password mismatch")
@setup_required
@email_password_login_enabled
def post(self):

View File

@ -25,13 +25,10 @@ from controllers.console.wraps import email_password_login_enabled, setup_requir
from events.tenant_event import tenant_was_created
from libs.helper import email, extract_remote_ip
from libs.login import current_account_with_tenant
from libs.passport import PassportService
from libs.token import (
check_csrf_token,
clear_access_token_from_cookie,
clear_csrf_token_from_cookie,
clear_refresh_token_from_cookie,
extract_access_token,
extract_refresh_token,
set_access_token_to_cookie,
set_csrf_token_to_cookie,
@ -292,18 +289,3 @@ class RefreshTokenApi(Resource):
return response
except Exception as e:
return {"result": "fail", "message": str(e)}, 401
# this api helps frontend to check whether user is authenticated
# TODO: remove in the future. frontend should redirect to login page by catching 401 status
@console_ns.route("/login/status")
class LoginStatus(Resource):
def get(self):
token = extract_access_token(request)
res = True
try:
validated = PassportService().verify(token=token)
check_csrf_token(request=request, user_id=validated.get("user_id"))
except:
res = False
return {"logged_in": res}

View File

@ -26,7 +26,7 @@ from services.errors.account import AccountNotFoundError, AccountRegisterError
from services.errors.workspace import WorkSpaceNotAllowedCreateError, WorkSpaceNotFoundError
from services.feature_service import FeatureService
from .. import api, console_ns
from .. import console_ns
logger = logging.getLogger(__name__)
@ -56,11 +56,13 @@ def get_oauth_providers():
@console_ns.route("/oauth/login/<provider>")
class OAuthLogin(Resource):
@api.doc("oauth_login")
@api.doc(description="Initiate OAuth login process")
@api.doc(params={"provider": "OAuth provider name (github/google)", "invite_token": "Optional invitation token"})
@api.response(302, "Redirect to OAuth authorization URL")
@api.response(400, "Invalid provider")
@console_ns.doc("oauth_login")
@console_ns.doc(description="Initiate OAuth login process")
@console_ns.doc(
params={"provider": "OAuth provider name (github/google)", "invite_token": "Optional invitation token"}
)
@console_ns.response(302, "Redirect to OAuth authorization URL")
@console_ns.response(400, "Invalid provider")
def get(self, provider: str):
invite_token = request.args.get("invite_token") or None
OAUTH_PROVIDERS = get_oauth_providers()
@ -75,17 +77,17 @@ class OAuthLogin(Resource):
@console_ns.route("/oauth/authorize/<provider>")
class OAuthCallback(Resource):
@api.doc("oauth_callback")
@api.doc(description="Handle OAuth callback and complete login process")
@api.doc(
@console_ns.doc("oauth_callback")
@console_ns.doc(description="Handle OAuth callback and complete login process")
@console_ns.doc(
params={
"provider": "OAuth provider name (github/google)",
"code": "Authorization code from OAuth provider",
"state": "Optional state parameter (used for invite token)",
}
)
@api.response(302, "Redirect to console with access token")
@api.response(400, "OAuth process failed")
@console_ns.response(302, "Redirect to console with access token")
@console_ns.response(400, "OAuth process failed")
def get(self, provider: str):
OAUTH_PROVIDERS = get_oauth_providers()
with current_app.app_context():

View File

@ -1,4 +1,7 @@
from flask_restx import Resource, reqparse
import base64
from flask_restx import Resource, fields, reqparse
from werkzeug.exceptions import BadRequest
from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, only_edition_cloud, setup_required
@ -41,3 +44,37 @@ class Invoices(Resource):
current_user, current_tenant_id = current_account_with_tenant()
BillingService.is_tenant_owner_or_admin(current_user)
return BillingService.get_invoices(current_user.email, current_tenant_id)
@console_ns.route("/billing/partners/<string:partner_key>/tenants")
class PartnerTenants(Resource):
@console_ns.doc("sync_partner_tenants_bindings")
@console_ns.doc(description="Sync partner tenants bindings")
@console_ns.doc(params={"partner_key": "Partner key"})
@console_ns.expect(
console_ns.model(
"SyncPartnerTenantsBindingsRequest",
{"click_id": fields.String(required=True, description="Click Id from partner referral link")},
)
)
@console_ns.response(200, "Tenants synced to partner successfully")
@console_ns.response(400, "Invalid partner information")
@setup_required
@login_required
@account_initialization_required
@only_edition_cloud
def put(self, partner_key: str):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("click_id", required=True, type=str, location="json")
args = parser.parse_args()
try:
click_id = args["click_id"]
decoded_partner_key = base64.b64decode(partner_key).decode("utf-8")
except Exception:
raise BadRequest("Invalid partner_key")
if not click_id or not decoded_partner_key or not current_user.id:
raise BadRequest("Invalid partner information")
return BillingService.sync_partner_tenants_bindings(current_user.id, decoded_partner_key, click_id)

View File

@ -7,14 +7,18 @@ from werkzeug.exceptions import Forbidden, NotFound
import services
from configs import dify_config
from controllers.console import api, console_ns
from controllers.console.apikey import api_key_fields, api_key_list
from controllers.console import console_ns
from controllers.console.apikey import (
api_key_item_model,
api_key_list_model,
)
from controllers.console.app.error import ProviderNotInitializeError
from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_rate_limit_check,
enterprise_license_required,
is_admin_or_owner_required,
setup_required,
)
from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError
@ -26,8 +30,22 @@ from core.rag.extractor.entity.datasource_type import DatasourceType
from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo
from core.rag.retrieval.retrieval_methods import RetrievalMethod
from extensions.ext_database import db
from fields.app_fields import related_app_list
from fields.dataset_fields import dataset_detail_fields, dataset_query_detail_fields
from fields.app_fields import app_detail_kernel_fields, related_app_list
from fields.dataset_fields import (
dataset_detail_fields,
dataset_fields,
dataset_query_detail_fields,
dataset_retrieval_model_fields,
doc_metadata_fields,
external_knowledge_info_fields,
external_retrieval_model_fields,
icon_info_fields,
keyword_setting_fields,
reranking_model_fields,
tag_fields,
vector_setting_fields,
weighted_score_fields,
)
from fields.document_fields import document_status_fields
from libs.login import current_account_with_tenant, login_required
from libs.validators import validate_description_length
@ -37,6 +55,58 @@ from models.provider_ids import ModelProviderID
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
def _get_or_create_model(model_name: str, field_def):
existing = console_ns.models.get(model_name)
if existing is None:
existing = console_ns.model(model_name, field_def)
return existing
# Register models for flask_restx to avoid dict type issues in Swagger
dataset_base_model = _get_or_create_model("DatasetBase", dataset_fields)
tag_model = _get_or_create_model("Tag", tag_fields)
keyword_setting_model = _get_or_create_model("DatasetKeywordSetting", keyword_setting_fields)
vector_setting_model = _get_or_create_model("DatasetVectorSetting", vector_setting_fields)
weighted_score_fields_copy = weighted_score_fields.copy()
weighted_score_fields_copy["keyword_setting"] = fields.Nested(keyword_setting_model)
weighted_score_fields_copy["vector_setting"] = fields.Nested(vector_setting_model)
weighted_score_model = _get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy)
reranking_model = _get_or_create_model("DatasetRerankingModel", reranking_model_fields)
dataset_retrieval_model_fields_copy = dataset_retrieval_model_fields.copy()
dataset_retrieval_model_fields_copy["reranking_model"] = fields.Nested(reranking_model)
dataset_retrieval_model_fields_copy["weights"] = fields.Nested(weighted_score_model, allow_null=True)
dataset_retrieval_model = _get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy)
external_knowledge_info_model = _get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields)
external_retrieval_model = _get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields)
doc_metadata_model = _get_or_create_model("DatasetDocMetadata", doc_metadata_fields)
icon_info_model = _get_or_create_model("DatasetIconInfo", icon_info_fields)
dataset_detail_fields_copy = dataset_detail_fields.copy()
dataset_detail_fields_copy["retrieval_model_dict"] = fields.Nested(dataset_retrieval_model)
dataset_detail_fields_copy["tags"] = fields.List(fields.Nested(tag_model))
dataset_detail_fields_copy["external_knowledge_info"] = fields.Nested(external_knowledge_info_model)
dataset_detail_fields_copy["external_retrieval_model"] = fields.Nested(external_retrieval_model, allow_null=True)
dataset_detail_fields_copy["doc_metadata"] = fields.List(fields.Nested(doc_metadata_model))
dataset_detail_fields_copy["icon_info"] = fields.Nested(icon_info_model)
dataset_detail_model = _get_or_create_model("DatasetDetail", dataset_detail_fields_copy)
dataset_query_detail_model = _get_or_create_model("DatasetQueryDetail", dataset_query_detail_fields)
app_detail_kernel_model = _get_or_create_model("AppDetailKernel", app_detail_kernel_fields)
related_app_list_copy = related_app_list.copy()
related_app_list_copy["data"] = fields.List(fields.Nested(app_detail_kernel_model))
related_app_list_model = _get_or_create_model("RelatedAppList", related_app_list_copy)
def _validate_name(name: str) -> str:
if not name or len(name) < 1 or len(name) > 40:
raise ValueError("Name must be between 1 to 40 characters.")
@ -118,9 +188,9 @@ def _get_retrieval_methods_by_vector_type(vector_type: str | None, is_mock: bool
@console_ns.route("/datasets")
class DatasetListApi(Resource):
@api.doc("get_datasets")
@api.doc(description="Get list of datasets")
@api.doc(
@console_ns.doc("get_datasets")
@console_ns.doc(description="Get list of datasets")
@console_ns.doc(
params={
"page": "Page number (default: 1)",
"limit": "Number of items per page (default: 20)",
@ -130,7 +200,7 @@ class DatasetListApi(Resource):
"include_all": "Include all datasets (default: false)",
}
)
@api.response(200, "Datasets retrieved successfully")
@console_ns.response(200, "Datasets retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -183,10 +253,10 @@ class DatasetListApi(Resource):
response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
return response, 200
@api.doc("create_dataset")
@api.doc(description="Create a new dataset")
@api.expect(
api.model(
@console_ns.doc("create_dataset")
@console_ns.doc(description="Create a new dataset")
@console_ns.expect(
console_ns.model(
"CreateDatasetRequest",
{
"name": fields.String(required=True, description="Dataset name (1-40 characters)"),
@ -199,8 +269,8 @@ class DatasetListApi(Resource):
},
)
)
@api.response(201, "Dataset created successfully")
@api.response(400, "Invalid request parameters")
@console_ns.response(201, "Dataset created successfully")
@console_ns.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@ -278,12 +348,12 @@ class DatasetListApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>")
class DatasetApi(Resource):
@api.doc("get_dataset")
@api.doc(description="Get dataset details")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Dataset retrieved successfully", dataset_detail_fields)
@api.response(404, "Dataset not found")
@api.response(403, "Permission denied")
@console_ns.doc("get_dataset")
@console_ns.doc(description="Get dataset details")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.response(200, "Dataset retrieved successfully", dataset_detail_model)
@console_ns.response(404, "Dataset not found")
@console_ns.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -327,10 +397,10 @@ class DatasetApi(Resource):
return data, 200
@api.doc("update_dataset")
@api.doc(description="Update dataset details")
@api.expect(
api.model(
@console_ns.doc("update_dataset")
@console_ns.doc(description="Update dataset details")
@console_ns.expect(
console_ns.model(
"UpdateDatasetRequest",
{
"name": fields.String(description="Dataset name"),
@ -341,9 +411,9 @@ class DatasetApi(Resource):
},
)
)
@api.response(200, "Dataset updated successfully", dataset_detail_fields)
@api.response(404, "Dataset not found")
@api.response(403, "Permission denied")
@console_ns.response(200, "Dataset updated successfully", dataset_detail_model)
@console_ns.response(404, "Dataset not found")
@console_ns.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -487,10 +557,10 @@ class DatasetApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/use-check")
class DatasetUseCheckApi(Resource):
@api.doc("check_dataset_use")
@api.doc(description="Check if dataset is in use")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Dataset use status retrieved successfully")
@console_ns.doc("check_dataset_use")
@console_ns.doc(description="Check if dataset is in use")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.response(200, "Dataset use status retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -503,10 +573,10 @@ class DatasetUseCheckApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/queries")
class DatasetQueryApi(Resource):
@api.doc("get_dataset_queries")
@api.doc(description="Get dataset query history")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Query history retrieved successfully", dataset_query_detail_fields)
@console_ns.doc("get_dataset_queries")
@console_ns.doc(description="Get dataset query history")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.response(200, "Query history retrieved successfully", dataset_query_detail_model)
@setup_required
@login_required
@account_initialization_required
@ -528,7 +598,7 @@ class DatasetQueryApi(Resource):
dataset_queries, total = DatasetService.get_dataset_queries(dataset_id=dataset.id, page=page, per_page=limit)
response = {
"data": marshal(dataset_queries, dataset_query_detail_fields),
"data": marshal(dataset_queries, dataset_query_detail_model),
"has_more": len(dataset_queries) == limit,
"limit": limit,
"total": total,
@ -539,9 +609,9 @@ class DatasetQueryApi(Resource):
@console_ns.route("/datasets/indexing-estimate")
class DatasetIndexingEstimateApi(Resource):
@api.doc("estimate_dataset_indexing")
@api.doc(description="Estimate dataset indexing cost")
@api.response(200, "Indexing estimate calculated successfully")
@console_ns.doc("estimate_dataset_indexing")
@console_ns.doc(description="Estimate dataset indexing cost")
@console_ns.response(200, "Indexing estimate calculated successfully")
@setup_required
@login_required
@account_initialization_required
@ -649,14 +719,14 @@ class DatasetIndexingEstimateApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/related-apps")
class DatasetRelatedAppListApi(Resource):
@api.doc("get_dataset_related_apps")
@api.doc(description="Get applications related to dataset")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Related apps retrieved successfully", related_app_list)
@console_ns.doc("get_dataset_related_apps")
@console_ns.doc(description="Get applications related to dataset")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.response(200, "Related apps retrieved successfully", related_app_list_model)
@setup_required
@login_required
@account_initialization_required
@marshal_with(related_app_list)
@marshal_with(related_app_list_model)
def get(self, dataset_id):
current_user, _ = current_account_with_tenant()
dataset_id_str = str(dataset_id)
@ -682,10 +752,10 @@ class DatasetRelatedAppListApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/indexing-status")
class DatasetIndexingStatusApi(Resource):
@api.doc("get_dataset_indexing_status")
@api.doc(description="Get dataset indexing status")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Indexing status retrieved successfully")
@console_ns.doc("get_dataset_indexing_status")
@console_ns.doc(description="Get dataset indexing status")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.response(200, "Indexing status retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -737,13 +807,13 @@ class DatasetApiKeyApi(Resource):
token_prefix = "dataset-"
resource_type = "dataset"
@api.doc("get_dataset_api_keys")
@api.doc(description="Get dataset API keys")
@api.response(200, "API keys retrieved successfully", api_key_list)
@console_ns.doc("get_dataset_api_keys")
@console_ns.doc(description="Get dataset API keys")
@console_ns.response(200, "API keys retrieved successfully", api_key_list_model)
@setup_required
@login_required
@account_initialization_required
@marshal_with(api_key_list)
@marshal_with(api_key_list_model)
def get(self):
_, current_tenant_id = current_account_with_tenant()
keys = db.session.scalars(
@ -753,13 +823,11 @@ class DatasetApiKeyApi(Resource):
@setup_required
@login_required
@is_admin_or_owner_required
@account_initialization_required
@marshal_with(api_key_fields)
@marshal_with(api_key_item_model)
def post(self):
# The role of the current user in the ta table must be admin or owner
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.is_admin_or_owner:
raise Forbidden()
_, current_tenant_id = current_account_with_tenant()
current_key_count = (
db.session.query(ApiToken)
@ -768,7 +836,7 @@ class DatasetApiKeyApi(Resource):
)
if current_key_count >= self.max_keys:
api.abort(
console_ns.abort(
400,
message=f"Cannot create more than {self.max_keys} API keys for this resource type.",
code="max_keys_exceeded",
@ -788,21 +856,17 @@ class DatasetApiKeyApi(Resource):
class DatasetApiDeleteApi(Resource):
resource_type = "dataset"
@api.doc("delete_dataset_api_key")
@api.doc(description="Delete dataset API key")
@api.doc(params={"api_key_id": "API key ID"})
@api.response(204, "API key deleted successfully")
@console_ns.doc("delete_dataset_api_key")
@console_ns.doc(description="Delete dataset API key")
@console_ns.doc(params={"api_key_id": "API key ID"})
@console_ns.response(204, "API key deleted successfully")
@setup_required
@login_required
@is_admin_or_owner_required
@account_initialization_required
def delete(self, api_key_id):
current_user, current_tenant_id = current_account_with_tenant()
_, current_tenant_id = current_account_with_tenant()
api_key_id = str(api_key_id)
# The role of the current user in the ta table must be admin or owner
if not current_user.is_admin_or_owner:
raise Forbidden()
key = (
db.session.query(ApiToken)
.where(
@ -814,7 +878,7 @@ class DatasetApiDeleteApi(Resource):
)
if key is None:
api.abort(404, message="API key not found")
console_ns.abort(404, message="API key not found")
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()
@ -837,9 +901,9 @@ class DatasetEnableApiApi(Resource):
@console_ns.route("/datasets/api-base-info")
class DatasetApiBaseUrlApi(Resource):
@api.doc("get_dataset_api_base_info")
@api.doc(description="Get dataset API base information")
@api.response(200, "API base info retrieved successfully")
@console_ns.doc("get_dataset_api_base_info")
@console_ns.doc(description="Get dataset API base information")
@console_ns.response(200, "API base info retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -849,9 +913,9 @@ class DatasetApiBaseUrlApi(Resource):
@console_ns.route("/datasets/retrieval-setting")
class DatasetRetrievalSettingApi(Resource):
@api.doc("get_dataset_retrieval_setting")
@api.doc(description="Get dataset retrieval settings")
@api.response(200, "Retrieval settings retrieved successfully")
@console_ns.doc("get_dataset_retrieval_setting")
@console_ns.doc(description="Get dataset retrieval settings")
@console_ns.response(200, "Retrieval settings retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -862,10 +926,10 @@ class DatasetRetrievalSettingApi(Resource):
@console_ns.route("/datasets/retrieval-setting/<string:vector_type>")
class DatasetRetrievalSettingMockApi(Resource):
@api.doc("get_dataset_retrieval_setting_mock")
@api.doc(description="Get mock dataset retrieval settings by vector type")
@api.doc(params={"vector_type": "Vector store type"})
@api.response(200, "Mock retrieval settings retrieved successfully")
@console_ns.doc("get_dataset_retrieval_setting_mock")
@console_ns.doc(description="Get mock dataset retrieval settings by vector type")
@console_ns.doc(params={"vector_type": "Vector store type"})
@console_ns.response(200, "Mock retrieval settings retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -875,11 +939,11 @@ class DatasetRetrievalSettingMockApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/error-docs")
class DatasetErrorDocs(Resource):
@api.doc("get_dataset_error_docs")
@api.doc(description="Get dataset error documents")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Error documents retrieved successfully")
@api.response(404, "Dataset not found")
@console_ns.doc("get_dataset_error_docs")
@console_ns.doc(description="Get dataset error documents")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.response(200, "Error documents retrieved successfully")
@console_ns.response(404, "Dataset not found")
@setup_required
@login_required
@account_initialization_required
@ -895,12 +959,12 @@ class DatasetErrorDocs(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/permission-part-users")
class DatasetPermissionUserListApi(Resource):
@api.doc("get_dataset_permission_users")
@api.doc(description="Get dataset permission user list")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Permission users retrieved successfully")
@api.response(404, "Dataset not found")
@api.response(403, "Permission denied")
@console_ns.doc("get_dataset_permission_users")
@console_ns.doc(description="Get dataset permission user list")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.response(200, "Permission users retrieved successfully")
@console_ns.response(404, "Dataset not found")
@console_ns.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@ -924,11 +988,11 @@ class DatasetPermissionUserListApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/auto-disable-logs")
class DatasetAutoDisableLogApi(Resource):
@api.doc("get_dataset_auto_disable_logs")
@api.doc(description="Get dataset auto disable logs")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.response(200, "Auto disable logs retrieved successfully")
@api.response(404, "Dataset not found")
@console_ns.doc("get_dataset_auto_disable_logs")
@console_ns.doc(description="Get dataset auto disable logs")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.response(200, "Auto disable logs retrieved successfully")
@console_ns.response(404, "Dataset not found")
@setup_required
@login_required
@account_initialization_required

View File

@ -11,7 +11,7 @@ from sqlalchemy import asc, desc, select
from werkzeug.exceptions import Forbidden, NotFound
import services
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.app.error import (
ProviderModelCurrentlyNotSupportError,
ProviderNotInitializeError,
@ -45,9 +45,11 @@ from core.plugin.impl.exc import PluginDaemonClientSideError
from core.rag.extractor.entity.datasource_type import DatasourceType
from core.rag.extractor.entity.extract_setting import ExtractSetting, NotionInfo, WebsiteInfo
from extensions.ext_database import db
from fields.dataset_fields import dataset_fields
from fields.document_fields import (
dataset_and_document_fields,
document_fields,
document_metadata_fields,
document_status_fields,
document_with_segments_fields,
)
@ -61,6 +63,36 @@ from services.entities.knowledge_entities.knowledge_entities import KnowledgeCon
logger = logging.getLogger(__name__)
def _get_or_create_model(model_name: str, field_def):
existing = console_ns.models.get(model_name)
if existing is None:
existing = console_ns.model(model_name, field_def)
return existing
# Register models for flask_restx to avoid dict type issues in Swagger
dataset_model = _get_or_create_model("Dataset", dataset_fields)
document_metadata_model = _get_or_create_model("DocumentMetadata", document_metadata_fields)
document_fields_copy = document_fields.copy()
document_fields_copy["doc_metadata"] = fields.List(
fields.Nested(document_metadata_model), attribute="doc_metadata_details"
)
document_model = _get_or_create_model("Document", document_fields_copy)
document_with_segments_fields_copy = document_with_segments_fields.copy()
document_with_segments_fields_copy["doc_metadata"] = fields.List(
fields.Nested(document_metadata_model), attribute="doc_metadata_details"
)
document_with_segments_model = _get_or_create_model("DocumentWithSegments", document_with_segments_fields_copy)
dataset_and_document_fields_copy = dataset_and_document_fields.copy()
dataset_and_document_fields_copy["dataset"] = fields.Nested(dataset_model)
dataset_and_document_fields_copy["documents"] = fields.List(fields.Nested(document_model))
dataset_and_document_model = _get_or_create_model("DatasetAndDocument", dataset_and_document_fields_copy)
class DocumentResource(Resource):
def get_document(self, dataset_id: str, document_id: str) -> Document:
current_user, current_tenant_id = current_account_with_tenant()
@ -104,10 +136,10 @@ class DocumentResource(Resource):
@console_ns.route("/datasets/process-rule")
class GetProcessRuleApi(Resource):
@api.doc("get_process_rule")
@api.doc(description="Get dataset document processing rules")
@api.doc(params={"document_id": "Document ID (optional)"})
@api.response(200, "Process rules retrieved successfully")
@console_ns.doc("get_process_rule")
@console_ns.doc(description="Get dataset document processing rules")
@console_ns.doc(params={"document_id": "Document ID (optional)"})
@console_ns.response(200, "Process rules retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -152,9 +184,9 @@ class GetProcessRuleApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/documents")
class DatasetDocumentListApi(Resource):
@api.doc("get_dataset_documents")
@api.doc(description="Get documents in a dataset")
@api.doc(
@console_ns.doc("get_dataset_documents")
@console_ns.doc(description="Get documents in a dataset")
@console_ns.doc(
params={
"dataset_id": "Dataset ID",
"page": "Page number (default: 1)",
@ -162,19 +194,20 @@ class DatasetDocumentListApi(Resource):
"keyword": "Search keyword",
"sort": "Sort order (default: -created_at)",
"fetch": "Fetch full details (default: false)",
"status": "Filter documents by display status",
}
)
@api.response(200, "Documents retrieved successfully")
@console_ns.response(200, "Documents retrieved successfully")
@setup_required
@login_required
@account_initialization_required
def get(self, dataset_id):
def get(self, dataset_id: str):
current_user, current_tenant_id = current_account_with_tenant()
dataset_id = str(dataset_id)
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
search = request.args.get("keyword", default=None, type=str)
sort = request.args.get("sort", default="-created_at", type=str)
status = request.args.get("status", default=None, type=str)
# "yes", "true", "t", "y", "1" convert to True, while others convert to False.
try:
fetch_val = request.args.get("fetch", default="false")
@ -203,6 +236,9 @@ class DatasetDocumentListApi(Resource):
query = select(Document).filter_by(dataset_id=str(dataset_id), tenant_id=current_tenant_id)
if status:
query = DocumentService.apply_display_status_filter(query, status)
if search:
search = f"%{search}%"
query = query.where(Document.name.like(search))
@ -271,7 +307,7 @@ class DatasetDocumentListApi(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(dataset_and_document_fields)
@marshal_with(dataset_and_document_model)
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self, dataset_id):
@ -352,10 +388,10 @@ class DatasetDocumentListApi(Resource):
@console_ns.route("/datasets/init")
class DatasetInitApi(Resource):
@api.doc("init_dataset")
@api.doc(description="Initialize dataset with documents")
@api.expect(
api.model(
@console_ns.doc("init_dataset")
@console_ns.doc(description="Initialize dataset with documents")
@console_ns.expect(
console_ns.model(
"DatasetInitRequest",
{
"upload_file_id": fields.String(required=True, description="Upload file ID"),
@ -365,12 +401,12 @@ class DatasetInitApi(Resource):
},
)
)
@api.response(201, "Dataset initialized successfully", dataset_and_document_fields)
@api.response(400, "Invalid request parameters")
@console_ns.response(201, "Dataset initialized successfully", dataset_and_document_model)
@console_ns.response(400, "Invalid request parameters")
@setup_required
@login_required
@account_initialization_required
@marshal_with(dataset_and_document_fields)
@marshal_with(dataset_and_document_model)
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self):
@ -441,12 +477,12 @@ class DatasetInitApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-estimate")
class DocumentIndexingEstimateApi(DocumentResource):
@api.doc("estimate_document_indexing")
@api.doc(description="Estimate document indexing cost")
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@api.response(200, "Indexing estimate calculated successfully")
@api.response(404, "Document not found")
@api.response(400, "Document already finished")
@console_ns.doc("estimate_document_indexing")
@console_ns.doc(description="Estimate document indexing cost")
@console_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@console_ns.response(200, "Indexing estimate calculated successfully")
@console_ns.response(404, "Document not found")
@console_ns.response(400, "Document already finished")
@setup_required
@login_required
@account_initialization_required
@ -656,11 +692,11 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
class DocumentIndexingStatusApi(DocumentResource):
@api.doc("get_document_indexing_status")
@api.doc(description="Get document indexing status")
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@api.response(200, "Indexing status retrieved successfully")
@api.response(404, "Document not found")
@console_ns.doc("get_document_indexing_status")
@console_ns.doc(description="Get document indexing status")
@console_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@console_ns.response(200, "Indexing status retrieved successfully")
@console_ns.response(404, "Document not found")
@setup_required
@login_required
@account_initialization_required
@ -706,17 +742,17 @@ class DocumentIndexingStatusApi(DocumentResource):
class DocumentApi(DocumentResource):
METADATA_CHOICES = {"all", "only", "without"}
@api.doc("get_document")
@api.doc(description="Get document details")
@api.doc(
@console_ns.doc("get_document")
@console_ns.doc(description="Get document details")
@console_ns.doc(
params={
"dataset_id": "Dataset ID",
"document_id": "Document ID",
"metadata": "Metadata inclusion (all/only/without)",
}
)
@api.response(200, "Document retrieved successfully")
@api.response(404, "Document not found")
@console_ns.response(200, "Document retrieved successfully")
@console_ns.response(404, "Document not found")
@setup_required
@login_required
@account_initialization_required
@ -827,14 +863,14 @@ class DocumentApi(DocumentResource):
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>")
class DocumentProcessingApi(DocumentResource):
@api.doc("update_document_processing")
@api.doc(description="Update document processing status (pause/resume)")
@api.doc(
@console_ns.doc("update_document_processing")
@console_ns.doc(description="Update document processing status (pause/resume)")
@console_ns.doc(
params={"dataset_id": "Dataset ID", "document_id": "Document ID", "action": "Action to perform (pause/resume)"}
)
@api.response(200, "Processing status updated successfully")
@api.response(404, "Document not found")
@api.response(400, "Invalid action")
@console_ns.response(200, "Processing status updated successfully")
@console_ns.response(404, "Document not found")
@console_ns.response(400, "Invalid action")
@setup_required
@login_required
@account_initialization_required
@ -872,11 +908,11 @@ class DocumentProcessingApi(DocumentResource):
@console_ns.route("/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
class DocumentMetadataApi(DocumentResource):
@api.doc("update_document_metadata")
@api.doc(description="Update document metadata")
@api.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@api.expect(
api.model(
@console_ns.doc("update_document_metadata")
@console_ns.doc(description="Update document metadata")
@console_ns.doc(params={"dataset_id": "Dataset ID", "document_id": "Document ID"})
@console_ns.expect(
console_ns.model(
"UpdateDocumentMetadataRequest",
{
"doc_type": fields.String(description="Document type"),
@ -884,9 +920,9 @@ class DocumentMetadataApi(DocumentResource):
},
)
)
@api.response(200, "Document metadata updated successfully")
@api.response(404, "Document not found")
@api.response(403, "Permission denied")
@console_ns.response(200, "Document metadata updated successfully")
@console_ns.response(404, "Document not found")
@console_ns.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required

View File

@ -3,10 +3,22 @@ from flask_restx import Resource, fields, marshal, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.datasets.error import DatasetNameDuplicateError
from controllers.console.wraps import account_initialization_required, setup_required
from fields.dataset_fields import dataset_detail_fields
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from fields.dataset_fields import (
dataset_detail_fields,
dataset_retrieval_model_fields,
doc_metadata_fields,
external_knowledge_info_fields,
external_retrieval_model_fields,
icon_info_fields,
keyword_setting_fields,
reranking_model_fields,
tag_fields,
vector_setting_fields,
weighted_score_fields,
)
from libs.login import current_account_with_tenant, login_required
from services.dataset_service import DatasetService
from services.external_knowledge_service import ExternalDatasetService
@ -14,6 +26,51 @@ from services.hit_testing_service import HitTestingService
from services.knowledge_service import ExternalDatasetTestService
def _get_or_create_model(model_name: str, field_def):
existing = console_ns.models.get(model_name)
if existing is None:
existing = console_ns.model(model_name, field_def)
return existing
def _build_dataset_detail_model():
keyword_setting_model = _get_or_create_model("DatasetKeywordSetting", keyword_setting_fields)
vector_setting_model = _get_or_create_model("DatasetVectorSetting", vector_setting_fields)
weighted_score_fields_copy = weighted_score_fields.copy()
weighted_score_fields_copy["keyword_setting"] = fields.Nested(keyword_setting_model)
weighted_score_fields_copy["vector_setting"] = fields.Nested(vector_setting_model)
weighted_score_model = _get_or_create_model("DatasetWeightedScore", weighted_score_fields_copy)
reranking_model = _get_or_create_model("DatasetRerankingModel", reranking_model_fields)
dataset_retrieval_model_fields_copy = dataset_retrieval_model_fields.copy()
dataset_retrieval_model_fields_copy["reranking_model"] = fields.Nested(reranking_model)
dataset_retrieval_model_fields_copy["weights"] = fields.Nested(weighted_score_model, allow_null=True)
dataset_retrieval_model = _get_or_create_model("DatasetRetrievalModel", dataset_retrieval_model_fields_copy)
tag_model = _get_or_create_model("Tag", tag_fields)
doc_metadata_model = _get_or_create_model("DatasetDocMetadata", doc_metadata_fields)
external_knowledge_info_model = _get_or_create_model("ExternalKnowledgeInfo", external_knowledge_info_fields)
external_retrieval_model = _get_or_create_model("ExternalRetrievalModel", external_retrieval_model_fields)
icon_info_model = _get_or_create_model("DatasetIconInfo", icon_info_fields)
dataset_detail_fields_copy = dataset_detail_fields.copy()
dataset_detail_fields_copy["retrieval_model_dict"] = fields.Nested(dataset_retrieval_model)
dataset_detail_fields_copy["tags"] = fields.List(fields.Nested(tag_model))
dataset_detail_fields_copy["external_knowledge_info"] = fields.Nested(external_knowledge_info_model)
dataset_detail_fields_copy["external_retrieval_model"] = fields.Nested(external_retrieval_model, allow_null=True)
dataset_detail_fields_copy["doc_metadata"] = fields.List(fields.Nested(doc_metadata_model))
dataset_detail_fields_copy["icon_info"] = fields.Nested(icon_info_model)
return _get_or_create_model("DatasetDetail", dataset_detail_fields_copy)
try:
dataset_detail_model = console_ns.models["DatasetDetail"]
except KeyError:
dataset_detail_model = _build_dataset_detail_model()
def _validate_name(name: str) -> str:
if not name or len(name) < 1 or len(name) > 100:
raise ValueError("Name must be between 1 to 100 characters.")
@ -22,16 +79,16 @@ def _validate_name(name: str) -> str:
@console_ns.route("/datasets/external-knowledge-api")
class ExternalApiTemplateListApi(Resource):
@api.doc("get_external_api_templates")
@api.doc(description="Get external knowledge API templates")
@api.doc(
@console_ns.doc("get_external_api_templates")
@console_ns.doc(description="Get external knowledge API templates")
@console_ns.doc(
params={
"page": "Page number (default: 1)",
"limit": "Number of items per page (default: 20)",
"keyword": "Search keyword",
}
)
@api.response(200, "External API templates retrieved successfully")
@console_ns.response(200, "External API templates retrieved successfully")
@setup_required
@login_required
@account_initialization_required
@ -95,11 +152,11 @@ class ExternalApiTemplateListApi(Resource):
@console_ns.route("/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>")
class ExternalApiTemplateApi(Resource):
@api.doc("get_external_api_template")
@api.doc(description="Get external knowledge API template details")
@api.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
@api.response(200, "External API template retrieved successfully")
@api.response(404, "Template not found")
@console_ns.doc("get_external_api_template")
@console_ns.doc(description="Get external knowledge API template details")
@console_ns.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
@console_ns.response(200, "External API template retrieved successfully")
@console_ns.response(404, "Template not found")
@setup_required
@login_required
@account_initialization_required
@ -163,10 +220,10 @@ class ExternalApiTemplateApi(Resource):
@console_ns.route("/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>/use-check")
class ExternalApiUseCheckApi(Resource):
@api.doc("check_external_api_usage")
@api.doc(description="Check if external knowledge API is being used")
@api.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
@api.response(200, "Usage check completed successfully")
@console_ns.doc("check_external_api_usage")
@console_ns.doc(description="Check if external knowledge API is being used")
@console_ns.doc(params={"external_knowledge_api_id": "External knowledge API ID"})
@console_ns.response(200, "Usage check completed successfully")
@setup_required
@login_required
@account_initialization_required
@ -181,10 +238,10 @@ class ExternalApiUseCheckApi(Resource):
@console_ns.route("/datasets/external")
class ExternalDatasetCreateApi(Resource):
@api.doc("create_external_dataset")
@api.doc(description="Create external knowledge dataset")
@api.expect(
api.model(
@console_ns.doc("create_external_dataset")
@console_ns.doc(description="Create external knowledge dataset")
@console_ns.expect(
console_ns.model(
"CreateExternalDatasetRequest",
{
"external_knowledge_api_id": fields.String(required=True, description="External knowledge API ID"),
@ -194,18 +251,16 @@ class ExternalDatasetCreateApi(Resource):
},
)
)
@api.response(201, "External dataset created successfully", dataset_detail_fields)
@api.response(400, "Invalid parameters")
@api.response(403, "Permission denied")
@console_ns.response(201, "External dataset created successfully", dataset_detail_model)
@console_ns.response(400, "Invalid parameters")
@console_ns.response(403, "Permission denied")
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def post(self):
# The role of the current user in the ta table must be admin, owner, or editor
current_user, current_tenant_id = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("external_knowledge_api_id", type=str, required=True, nullable=False, location="json")
@ -241,11 +296,11 @@ class ExternalDatasetCreateApi(Resource):
@console_ns.route("/datasets/<uuid:dataset_id>/external-hit-testing")
class ExternalKnowledgeHitTestingApi(Resource):
@api.doc("test_external_knowledge_retrieval")
@api.doc(description="Test external knowledge retrieval for dataset")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.expect(
api.model(
@console_ns.doc("test_external_knowledge_retrieval")
@console_ns.doc(description="Test external knowledge retrieval for dataset")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.expect(
console_ns.model(
"ExternalHitTestingRequest",
{
"query": fields.String(required=True, description="Query text for testing"),
@ -254,9 +309,9 @@ class ExternalKnowledgeHitTestingApi(Resource):
},
)
)
@api.response(200, "External hit testing completed successfully")
@api.response(404, "Dataset not found")
@api.response(400, "Invalid parameters")
@console_ns.response(200, "External hit testing completed successfully")
@console_ns.response(404, "Dataset not found")
@console_ns.response(400, "Invalid parameters")
@setup_required
@login_required
@account_initialization_required
@ -299,10 +354,10 @@ class ExternalKnowledgeHitTestingApi(Resource):
@console_ns.route("/test/retrieval")
class BedrockRetrievalApi(Resource):
# this api is only for internal testing
@api.doc("bedrock_retrieval_test")
@api.doc(description="Bedrock retrieval test (internal use only)")
@api.expect(
api.model(
@console_ns.doc("bedrock_retrieval_test")
@console_ns.doc(description="Bedrock retrieval test (internal use only)")
@console_ns.expect(
console_ns.model(
"BedrockRetrievalTestRequest",
{
"retrieval_setting": fields.Raw(required=True, description="Retrieval settings"),
@ -311,7 +366,7 @@ class BedrockRetrievalApi(Resource):
},
)
)
@api.response(200, "Bedrock retrieval test completed")
@console_ns.response(200, "Bedrock retrieval test completed")
def post(self):
parser = (
reqparse.RequestParser()

View File

@ -1,6 +1,6 @@
from flask_restx import Resource, fields
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.datasets.hit_testing_base import DatasetsHitTestingBase
from controllers.console.wraps import (
account_initialization_required,
@ -12,11 +12,11 @@ from libs.login import login_required
@console_ns.route("/datasets/<uuid:dataset_id>/hit-testing")
class HitTestingApi(Resource, DatasetsHitTestingBase):
@api.doc("test_dataset_retrieval")
@api.doc(description="Test dataset knowledge retrieval")
@api.doc(params={"dataset_id": "Dataset ID"})
@api.expect(
api.model(
@console_ns.doc("test_dataset_retrieval")
@console_ns.doc(description="Test dataset knowledge retrieval")
@console_ns.doc(params={"dataset_id": "Dataset ID"})
@console_ns.expect(
console_ns.model(
"HitTestingRequest",
{
"query": fields.String(required=True, description="Query text for testing"),
@ -26,9 +26,9 @@ class HitTestingApi(Resource, DatasetsHitTestingBase):
},
)
)
@api.response(200, "Hit testing completed successfully")
@api.response(404, "Dataset not found")
@api.response(400, "Invalid parameters")
@console_ns.response(200, "Hit testing completed successfully")
@console_ns.response(404, "Dataset not found")
@console_ns.response(400, "Invalid parameters")
@setup_required
@login_required
@account_initialization_required

View File

@ -121,8 +121,16 @@ class DatasourceOAuthCallback(Resource):
return redirect(f"{dify_config.CONSOLE_WEB_URL}/oauth-callback")
parser_datasource = (
reqparse.RequestParser()
.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json", default=None)
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>")
class DatasourceAuth(Resource):
@console_ns.expect(parser_datasource)
@setup_required
@login_required
@account_initialization_required
@ -130,14 +138,7 @@ class DatasourceAuth(Resource):
def post(self, provider_id: str):
_, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument(
"name", type=StrLen(max_length=100), required=False, nullable=True, location="json", default=None
)
.add_argument("credentials", type=dict, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_datasource.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
@ -168,8 +169,14 @@ class DatasourceAuth(Resource):
return {"result": datasources}, 200
parser_datasource_delete = reqparse.RequestParser().add_argument(
"credential_id", type=str, required=True, nullable=False, location="json"
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/delete")
class DatasourceAuthDeleteApi(Resource):
@console_ns.expect(parser_datasource_delete)
@setup_required
@login_required
@account_initialization_required
@ -181,10 +188,7 @@ class DatasourceAuthDeleteApi(Resource):
plugin_id = datasource_provider_id.plugin_id
provider_name = datasource_provider_id.provider_name
parser = reqparse.RequestParser().add_argument(
"credential_id", type=str, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_datasource_delete.parse_args()
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.remove_datasource_credentials(
tenant_id=current_tenant_id,
@ -195,8 +199,17 @@ class DatasourceAuthDeleteApi(Resource):
return {"result": "success"}, 200
parser_datasource_update = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=False, nullable=True, location="json")
.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json")
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/update")
class DatasourceAuthUpdateApi(Resource):
@console_ns.expect(parser_datasource_update)
@setup_required
@login_required
@account_initialization_required
@ -205,13 +218,7 @@ class DatasourceAuthUpdateApi(Resource):
_, current_tenant_id = current_account_with_tenant()
datasource_provider_id = DatasourceProviderID(provider_id)
parser = (
reqparse.RequestParser()
.add_argument("credentials", type=dict, required=False, nullable=True, location="json")
.add_argument("name", type=StrLen(max_length=100), required=False, nullable=True, location="json")
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_datasource_update.parse_args()
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.update_datasource_credentials(
@ -251,8 +258,16 @@ class DatasourceHardCodeAuthListApi(Resource):
return {"result": jsonable_encoder(datasources)}, 200
parser_datasource_custom = (
reqparse.RequestParser()
.add_argument("client_params", type=dict, required=False, nullable=True, location="json")
.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json")
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/custom-client")
class DatasourceAuthOauthCustomClient(Resource):
@console_ns.expect(parser_datasource_custom)
@setup_required
@login_required
@account_initialization_required
@ -260,12 +275,7 @@ class DatasourceAuthOauthCustomClient(Resource):
def post(self, provider_id: str):
_, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("client_params", type=dict, required=False, nullable=True, location="json")
.add_argument("enable_oauth_custom_client", type=bool, required=False, nullable=True, location="json")
)
args = parser.parse_args()
args = parser_datasource_custom.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.setup_oauth_custom_client_params(
@ -291,8 +301,12 @@ class DatasourceAuthOauthCustomClient(Resource):
return {"result": "success"}, 200
parser_default = reqparse.RequestParser().add_argument("id", type=str, required=True, nullable=False, location="json")
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/default")
class DatasourceAuthDefaultApi(Resource):
@console_ns.expect(parser_default)
@setup_required
@login_required
@account_initialization_required
@ -300,8 +314,7 @@ class DatasourceAuthDefaultApi(Resource):
def post(self, provider_id: str):
_, current_tenant_id = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
args = parser_default.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.set_default_datasource_provider(
@ -312,8 +325,16 @@ class DatasourceAuthDefaultApi(Resource):
return {"result": "success"}, 200
parser_update_name = (
reqparse.RequestParser()
.add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json")
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
@console_ns.route("/auth/plugin/datasource/<path:provider_id>/update-name")
class DatasourceUpdateProviderNameApi(Resource):
@console_ns.expect(parser_update_name)
@setup_required
@login_required
@account_initialization_required
@ -321,12 +342,7 @@ class DatasourceUpdateProviderNameApi(Resource):
def post(self, provider_id: str):
_, current_tenant_id = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("name", type=StrLen(max_length=100), required=True, nullable=False, location="json")
.add_argument("credential_id", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
args = parser_update_name.parse_args()
datasource_provider_id = DatasourceProviderID(provider_id)
datasource_provider_service = DatasourceProviderService()
datasource_provider_service.update_datasource_provider_name(

View File

@ -1,10 +1,10 @@
from flask_restx import ( # type: ignore
Resource, # type: ignore
reqparse,
)
from pydantic import BaseModel
from werkzeug.exceptions import Forbidden
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import current_user, login_required
@ -12,17 +12,21 @@ from models import Account
from models.dataset import Pipeline
from services.rag_pipeline.rag_pipeline import RagPipelineService
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("credential_id", type=str, required=False, location="json")
)
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
class Parser(BaseModel):
inputs: dict
datasource_type: str
credential_id: str | None = None
console_ns.schema_model(Parser.__name__, Parser.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0))
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/preview")
class DataSourceContentPreviewApi(Resource):
@api.expect(parser)
@console_ns.expect(console_ns.models[Parser.__name__], validate=True)
@setup_required
@login_required
@account_initialization_required
@ -34,15 +38,10 @@ class DataSourceContentPreviewApi(Resource):
if not isinstance(current_user, Account):
raise Forbidden()
args = parser.parse_args()
inputs = args.get("inputs")
if inputs is None:
raise ValueError("missing inputs")
datasource_type = args.get("datasource_type")
if datasource_type is None:
raise ValueError("missing datasource_type")
args = Parser.model_validate(console_ns.payload)
inputs = args.inputs
datasource_type = args.datasource_type
rag_pipeline_service = RagPipelineService()
preview_content = rag_pipeline_service.run_datasource_node_preview(
pipeline=pipeline,
@ -51,6 +50,6 @@ class DataSourceContentPreviewApi(Resource):
account=current_user,
datasource_type=datasource_type,
is_published=True,
credential_id=args.get("credential_id"),
credential_id=args.credential_id,
)
return preview_content, 200

View File

@ -1,11 +1,11 @@
from flask_restx import Resource, marshal_with, reqparse # type: ignore
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import console_ns
from controllers.console.datasets.wraps import get_rag_pipeline
from controllers.console.wraps import (
account_initialization_required,
edit_permission_required,
setup_required,
)
from extensions.ext_database import db
@ -21,12 +21,11 @@ class RagPipelineImportApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@marshal_with(pipeline_import_fields)
def post(self):
# Check user role first
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
@ -71,12 +70,10 @@ class RagPipelineImportConfirmApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@marshal_with(pipeline_import_fields)
def post(self, import_id):
current_user, _ = current_account_with_tenant()
# Check user role first
if not current_user.has_edit_permission:
raise Forbidden()
# Create service with session
with Session(db.engine) as session:
@ -98,12 +95,9 @@ class RagPipelineImportCheckDependenciesApi(Resource):
@login_required
@get_rag_pipeline
@account_initialization_required
@edit_permission_required
@marshal_with(pipeline_import_check_dependencies_fields)
def get(self, pipeline: Pipeline):
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
with Session(db.engine) as session:
import_service = RagPipelineDslService(session)
result = import_service.check_dependencies(pipeline=pipeline)
@ -117,12 +111,9 @@ class RagPipelineExportApi(Resource):
@login_required
@get_rag_pipeline
@account_initialization_required
@edit_permission_required
def get(self, pipeline: Pipeline):
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
# Add include_secret params
# Add include_secret params
parser = reqparse.RequestParser().add_argument("include_secret", type=str, default="false", location="args")
args = parser.parse_args()

View File

@ -148,8 +148,12 @@ class DraftRagPipelineApi(Resource):
}
parser_run = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/iteration/nodes/<string:node_id>/run")
class RagPipelineDraftRunIterationNodeApi(Resource):
@console_ns.expect(parser_run)
@setup_required
@login_required
@account_initialization_required
@ -162,8 +166,7 @@ class RagPipelineDraftRunIterationNodeApi(Resource):
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
args = parser_run.parse_args()
try:
response = PipelineGenerateService.generate_single_iteration(
@ -184,9 +187,11 @@ class RagPipelineDraftRunIterationNodeApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/loop/nodes/<string:node_id>/run")
class RagPipelineDraftRunLoopNodeApi(Resource):
@console_ns.expect(parser_run)
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
def post(self, pipeline: Pipeline, node_id: str):
"""
@ -194,11 +199,8 @@ class RagPipelineDraftRunLoopNodeApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser().add_argument("inputs", type=dict, location="json")
args = parser.parse_args()
args = parser_run.parse_args()
try:
response = PipelineGenerateService.generate_single_loop(
@ -217,11 +219,22 @@ class RagPipelineDraftRunLoopNodeApi(Resource):
raise InternalServerError()
parser_draft_run = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info_list", type=list, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/run")
class DraftRagPipelineRunApi(Resource):
@console_ns.expect(parser_draft_run)
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
def post(self, pipeline: Pipeline):
"""
@ -229,17 +242,8 @@ class DraftRagPipelineRunApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info_list", type=list, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_draft_run.parse_args()
try:
response = PipelineGenerateService.generate(
@ -255,11 +259,25 @@ class DraftRagPipelineRunApi(Resource):
raise InvokeRateLimitHttpError(ex.description)
parser_published_run = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info_list", type=list, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
.add_argument("is_preview", type=bool, required=True, location="json", default=False)
.add_argument("response_mode", type=str, required=True, location="json", default="streaming")
.add_argument("original_document_id", type=str, required=False, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/run")
class PublishedRagPipelineRunApi(Resource):
@console_ns.expect(parser_published_run)
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
def post(self, pipeline: Pipeline):
"""
@ -267,20 +285,8 @@ class PublishedRagPipelineRunApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info_list", type=list, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
.add_argument("is_preview", type=bool, required=True, location="json", default=False)
.add_argument("response_mode", type=str, required=True, location="json", default="streaming")
.add_argument("original_document_id", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_published_run.parse_args()
streaming = args["response_mode"] == "streaming"
@ -381,11 +387,21 @@ class PublishedRagPipelineRunApi(Resource):
#
# return result
#
parser_rag_run = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("credential_id", type=str, required=False, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/datasource/nodes/<string:node_id>/run")
class RagPipelinePublishedDatasourceNodeRunApi(Resource):
@console_ns.expect(parser_rag_run)
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
def post(self, pipeline: Pipeline, node_id: str):
"""
@ -393,16 +409,8 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("credential_id", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_rag_run.parse_args()
inputs = args.get("inputs")
if inputs is None:
@ -429,8 +437,10 @@ class RagPipelinePublishedDatasourceNodeRunApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/nodes/<string:node_id>/run")
class RagPipelineDraftDatasourceNodeRunApi(Resource):
@console_ns.expect(parser_rag_run)
@setup_required
@login_required
@edit_permission_required
@account_initialization_required
@get_rag_pipeline
def post(self, pipeline: Pipeline, node_id: str):
@ -439,16 +449,8 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("credential_id", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_rag_run.parse_args()
inputs = args.get("inputs")
if inputs is None:
@ -473,10 +475,17 @@ class RagPipelineDraftDatasourceNodeRunApi(Resource):
)
parser_run_api = reqparse.RequestParser().add_argument(
"inputs", type=dict, required=True, nullable=False, location="json"
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/nodes/<string:node_id>/run")
class RagPipelineDraftNodeRunApi(Resource):
@console_ns.expect(parser_run_api)
@setup_required
@login_required
@edit_permission_required
@account_initialization_required
@get_rag_pipeline
@marshal_with(workflow_run_node_execution_fields)
@ -486,13 +495,8 @@ class RagPipelineDraftNodeRunApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser().add_argument(
"inputs", type=dict, required=True, nullable=False, location="json"
)
args = parser.parse_args()
args = parser_run_api.parse_args()
inputs = args.get("inputs")
if inputs == None:
@ -513,6 +517,7 @@ class RagPipelineDraftNodeRunApi(Resource):
class RagPipelineTaskStopApi(Resource):
@setup_required
@login_required
@edit_permission_required
@account_initialization_required
@get_rag_pipeline
def post(self, pipeline: Pipeline, task_id: str):
@ -521,8 +526,6 @@ class RagPipelineTaskStopApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
AppQueueManager.set_stop_flag(task_id, InvokeFrom.DEBUGGER, current_user.id)
@ -534,6 +537,7 @@ class PublishedRagPipelineApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
@marshal_with(workflow_fields)
def get(self, pipeline: Pipeline):
@ -541,9 +545,6 @@ class PublishedRagPipelineApi(Resource):
Get published pipeline
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
if not pipeline.is_published:
return None
# fetch published workflow by pipeline
@ -556,6 +557,7 @@ class PublishedRagPipelineApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
def post(self, pipeline: Pipeline):
"""
@ -563,9 +565,6 @@ class PublishedRagPipelineApi(Resource):
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
rag_pipeline_service = RagPipelineService()
with Session(db.engine) as session:
pipeline = session.merge(pipeline)
@ -592,38 +591,33 @@ class DefaultRagPipelineBlockConfigsApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
def get(self, pipeline: Pipeline):
"""
Get default block config
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
# Get default block configs
rag_pipeline_service = RagPipelineService()
return rag_pipeline_service.get_default_block_configs()
parser_default = reqparse.RequestParser().add_argument("q", type=str, location="args")
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/default-workflow-block-configs/<string:block_type>")
class DefaultRagPipelineBlockConfigApi(Resource):
@console_ns.expect(parser_default)
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
def get(self, pipeline: Pipeline, block_type: str):
"""
Get default block config
"""
# The role of the current user in the ta table must be admin, owner, or editor
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = reqparse.RequestParser().add_argument("q", type=str, location="args")
args = parser.parse_args()
args = parser_default.parse_args()
q = args.get("q")
@ -639,11 +633,22 @@ class DefaultRagPipelineBlockConfigApi(Resource):
return rag_pipeline_service.get_default_block_config(node_type=block_type, filters=filters)
parser_wf = (
reqparse.RequestParser()
.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=10, location="args")
.add_argument("user_id", type=str, required=False, location="args")
.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows")
class PublishedAllRagPipelineApi(Resource):
@console_ns.expect(parser_wf)
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
@marshal_with(workflow_pagination_fields)
def get(self, pipeline: Pipeline):
@ -651,19 +656,10 @@ class PublishedAllRagPipelineApi(Resource):
Get published workflows
"""
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("page", type=inputs.int_range(1, 99999), required=False, default=1, location="args")
.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
.add_argument("user_id", type=str, required=False, location="args")
.add_argument("named_only", type=inputs.boolean, required=False, default=False, location="args")
)
args = parser.parse_args()
page = int(args.get("page", 1))
limit = int(args.get("limit", 10))
args = parser_wf.parse_args()
page = args["page"]
limit = args["limit"]
user_id = args.get("user_id")
named_only = args.get("named_only", False)
@ -691,11 +687,20 @@ class PublishedAllRagPipelineApi(Resource):
}
parser_wf_id = (
reqparse.RequestParser()
.add_argument("marked_name", type=str, required=False, location="json")
.add_argument("marked_comment", type=str, required=False, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/<string:workflow_id>")
class RagPipelineByIdApi(Resource):
@console_ns.expect(parser_wf_id)
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
@get_rag_pipeline
@marshal_with(workflow_fields)
def patch(self, pipeline: Pipeline, workflow_id: str):
@ -704,22 +709,14 @@ class RagPipelineByIdApi(Resource):
"""
# Check permission
current_user, _ = current_account_with_tenant()
if not current_user.has_edit_permission:
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("marked_name", type=str, required=False, location="json")
.add_argument("marked_comment", type=str, required=False, location="json")
)
args = parser.parse_args()
args = parser_wf_id.parse_args()
# Validate name and comment length
if args.marked_name and len(args.marked_name) > 20:
raise ValueError("Marked name cannot exceed 20 characters")
if args.marked_comment and len(args.marked_comment) > 100:
raise ValueError("Marked comment cannot exceed 100 characters")
args = parser.parse_args()
# Prepare update data
update_data = {}
@ -752,8 +749,12 @@ class RagPipelineByIdApi(Resource):
return workflow
parser_parameters = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/processing/parameters")
class PublishedRagPipelineSecondStepApi(Resource):
@console_ns.expect(parser_parameters)
@setup_required
@login_required
@account_initialization_required
@ -763,8 +764,7 @@ class PublishedRagPipelineSecondStepApi(Resource):
"""
Get second step parameters of rag pipeline
"""
parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
args = parser_parameters.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
@ -777,6 +777,7 @@ class PublishedRagPipelineSecondStepApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/published/pre-processing/parameters")
class PublishedRagPipelineFirstStepApi(Resource):
@console_ns.expect(parser_parameters)
@setup_required
@login_required
@account_initialization_required
@ -786,8 +787,7 @@ class PublishedRagPipelineFirstStepApi(Resource):
"""
Get first step parameters of rag pipeline
"""
parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
args = parser_parameters.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
@ -800,6 +800,7 @@ class PublishedRagPipelineFirstStepApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/pre-processing/parameters")
class DraftRagPipelineFirstStepApi(Resource):
@console_ns.expect(parser_parameters)
@setup_required
@login_required
@account_initialization_required
@ -809,8 +810,7 @@ class DraftRagPipelineFirstStepApi(Resource):
"""
Get first step parameters of rag pipeline
"""
parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
args = parser_parameters.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
@ -823,6 +823,7 @@ class DraftRagPipelineFirstStepApi(Resource):
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/processing/parameters")
class DraftRagPipelineSecondStepApi(Resource):
@console_ns.expect(parser_parameters)
@setup_required
@login_required
@account_initialization_required
@ -832,8 +833,7 @@ class DraftRagPipelineSecondStepApi(Resource):
"""
Get second step parameters of rag pipeline
"""
parser = reqparse.RequestParser().add_argument("node_id", type=str, required=True, location="args")
args = parser.parse_args()
args = parser_parameters.parse_args()
node_id = args.get("node_id")
if not node_id:
raise ValueError("Node ID is required")
@ -845,8 +845,16 @@ class DraftRagPipelineSecondStepApi(Resource):
}
parser_wf_run = (
reqparse.RequestParser()
.add_argument("last_id", type=uuid_value, location="args")
.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflow-runs")
class RagPipelineWorkflowRunListApi(Resource):
@console_ns.expect(parser_wf_run)
@setup_required
@login_required
@account_initialization_required
@ -856,12 +864,7 @@ class RagPipelineWorkflowRunListApi(Resource):
"""
Get workflow run list
"""
parser = (
reqparse.RequestParser()
.add_argument("last_id", type=uuid_value, location="args")
.add_argument("limit", type=int_range(1, 100), required=False, default=20, location="args")
)
args = parser.parse_args()
args = parser_wf_run.parse_args()
rag_pipeline_service = RagPipelineService()
result = rag_pipeline_service.get_rag_pipeline_paginate_workflow_runs(pipeline=pipeline, args=args)
@ -961,8 +964,18 @@ class RagPipelineTransformApi(Resource):
return result
parser_var = (
reqparse.RequestParser()
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info", type=dict, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
.add_argument("start_node_title", type=str, required=True, location="json")
)
@console_ns.route("/rag/pipelines/<uuid:pipeline_id>/workflows/draft/datasource/variables-inspect")
class RagPipelineDatasourceVariableApi(Resource):
@console_ns.expect(parser_var)
@setup_required
@login_required
@account_initialization_required
@ -974,14 +987,7 @@ class RagPipelineDatasourceVariableApi(Resource):
Set datasource variables
"""
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("datasource_type", type=str, required=True, location="json")
.add_argument("datasource_info", type=dict, required=True, location="json")
.add_argument("start_node_id", type=str, required=True, location="json")
.add_argument("start_node_title", type=str, required=True, location="json")
)
args = parser.parse_args()
args = parser_var.parse_args()
rag_pipeline_service = RagPipelineService()
workflow_node_execution = rag_pipeline_service.set_datasource_variables(

View File

@ -1,6 +1,6 @@
from flask_restx import Resource, fields, reqparse
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.datasets.error import WebsiteCrawlError
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import login_required
@ -9,10 +9,10 @@ from services.website_service import WebsiteCrawlApiRequest, WebsiteCrawlStatusA
@console_ns.route("/website/crawl")
class WebsiteCrawlApi(Resource):
@api.doc("crawl_website")
@api.doc(description="Crawl website content")
@api.expect(
api.model(
@console_ns.doc("crawl_website")
@console_ns.doc(description="Crawl website content")
@console_ns.expect(
console_ns.model(
"WebsiteCrawlRequest",
{
"provider": fields.String(
@ -25,8 +25,8 @@ class WebsiteCrawlApi(Resource):
},
)
)
@api.response(200, "Website crawl initiated successfully")
@api.response(400, "Invalid crawl parameters")
@console_ns.response(200, "Website crawl initiated successfully")
@console_ns.response(400, "Invalid crawl parameters")
@setup_required
@login_required
@account_initialization_required
@ -62,12 +62,12 @@ class WebsiteCrawlApi(Resource):
@console_ns.route("/website/crawl/status/<string:job_id>")
class WebsiteCrawlStatusApi(Resource):
@api.doc("get_crawl_status")
@api.doc(description="Get website crawl status")
@api.doc(params={"job_id": "Crawl job ID", "provider": "Crawl provider (firecrawl/watercrawl/jinareader)"})
@api.response(200, "Crawl status retrieved successfully")
@api.response(404, "Crawl job not found")
@api.response(400, "Invalid provider")
@console_ns.doc("get_crawl_status")
@console_ns.doc(description="Get website crawl status")
@console_ns.doc(params={"job_id": "Crawl job ID", "provider": "Crawl provider (firecrawl/watercrawl/jinareader)"})
@console_ns.response(200, "Crawl status retrieved successfully")
@console_ns.response(404, "Crawl job not found")
@console_ns.response(400, "Invalid provider")
@setup_required
@login_required
@account_initialization_required

View File

@ -1,44 +1,40 @@
from collections.abc import Callable
from functools import wraps
from typing import ParamSpec, TypeVar
from controllers.console.datasets.error import PipelineNotFoundError
from extensions.ext_database import db
from libs.login import current_account_with_tenant
from models.dataset import Pipeline
P = ParamSpec("P")
R = TypeVar("R")
def get_rag_pipeline(
view: Callable | None = None,
):
def decorator(view_func):
@wraps(view_func)
def decorated_view(*args, **kwargs):
if not kwargs.get("pipeline_id"):
raise ValueError("missing pipeline_id in path parameters")
_, current_tenant_id = current_account_with_tenant()
def get_rag_pipeline(view_func: Callable[P, R]):
@wraps(view_func)
def decorated_view(*args: P.args, **kwargs: P.kwargs):
if not kwargs.get("pipeline_id"):
raise ValueError("missing pipeline_id in path parameters")
pipeline_id = kwargs.get("pipeline_id")
pipeline_id = str(pipeline_id)
_, current_tenant_id = current_account_with_tenant()
del kwargs["pipeline_id"]
pipeline_id = kwargs.get("pipeline_id")
pipeline_id = str(pipeline_id)
pipeline = (
db.session.query(Pipeline)
.where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_tenant_id)
.first()
)
del kwargs["pipeline_id"]
if not pipeline:
raise PipelineNotFoundError()
pipeline = (
db.session.query(Pipeline)
.where(Pipeline.id == pipeline_id, Pipeline.tenant_id == current_tenant_id)
.first()
)
kwargs["pipeline"] = pipeline
if not pipeline:
raise PipelineNotFoundError()
return view_func(*args, **kwargs)
kwargs["pipeline"] = pipeline
return decorated_view
return view_func(*args, **kwargs)
if view is None:
return decorator
else:
return decorator(view)
return decorated_view

View File

@ -1,43 +0,0 @@
from flask import request
from flask_restx import Resource
from controllers.console import api
from controllers.console.explore.wraps import explore_banner_enabled
from extensions.ext_database import db
from models.model import ExporleBanner
class BannerApi(Resource):
"""Resource for banner list."""
@explore_banner_enabled
def get(self):
"""Get banner list."""
language = request.args.get("language", "en-US")
# Build base query for enabled banners
base_query = db.session.query(ExporleBanner).where(ExporleBanner.status == "enabled")
# Try to get banners in the requested language
banners = base_query.where(ExporleBanner.language == language).order_by(ExporleBanner.sort).all()
# Fallback to en-US if no banners found and language is not en-US
if not banners and language != "en-US":
banners = base_query.where(ExporleBanner.language == "en-US").order_by(ExporleBanner.sort).all()
# Convert banners to serializable format
result = []
for banner in banners:
banner_data = {
"id": banner.id,
"content": banner.content, # Already parsed as JSON by SQLAlchemy
"link": banner.link,
"sort": banner.sort,
"status": banner.status,
"created_at": banner.created_at.isoformat() if banner.created_at else None,
}
result.append(banner_data)
return result
api.add_resource(BannerApi, "/explore/banners")

View File

@ -15,7 +15,6 @@ from controllers.console.app.error import (
from controllers.console.explore.error import NotChatAppError, NotCompletionAppError
from controllers.console.explore.wraps import InstalledAppResource
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.errors.error import (
ModelCurrentlyNotSupportError,
@ -31,6 +30,7 @@ from libs.login import current_user
from models import Account
from models.model import AppMode
from services.app_generate_service import AppGenerateService
from services.app_task_service import AppTaskService
from services.errors.llm import InvokeRateLimitError
from .. import console_ns
@ -46,7 +46,7 @@ logger = logging.getLogger(__name__)
class CompletionApi(InstalledAppResource):
def post(self, installed_app):
app_model = installed_app.app
if app_model.mode != "completion":
if app_model.mode != AppMode.COMPLETION:
raise NotCompletionAppError()
parser = (
@ -102,12 +102,18 @@ class CompletionApi(InstalledAppResource):
class CompletionStopApi(InstalledAppResource):
def post(self, installed_app, task_id):
app_model = installed_app.app
if app_model.mode != "completion":
if app_model.mode != AppMode.COMPLETION:
raise NotCompletionAppError()
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
AppQueueManager.set_stop_flag(task_id, InvokeFrom.EXPLORE, current_user.id)
AppTaskService.stop_task(
task_id=task_id,
invoke_from=InvokeFrom.EXPLORE,
user_id=current_user.id,
app_mode=AppMode.value_of(app_model.mode),
)
return {"result": "success"}, 200
@ -184,6 +190,12 @@ class ChatStopApi(InstalledAppResource):
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
AppQueueManager.set_stop_flag(task_id, InvokeFrom.EXPLORE, current_user.id)
AppTaskService.stop_task(
task_id=task_id,
invoke_from=InvokeFrom.EXPLORE,
user_id=current_user.id,
app_mode=app_mode,
)
return {"result": "success"}, 200

View File

@ -29,25 +29,3 @@ class AppAccessDeniedError(BaseHTTPException):
error_code = "access_denied"
description = "App access denied."
code = 403
class TrialAppNotAllowed(BaseHTTPException):
"""*403* `Trial App Not Allowed`
Raise if the user has reached the trial app limit.
"""
error_code = "trial_app_not_allowed"
code = 403
description = "the app is not allowed to be trial."
class TrialAppLimitExceeded(BaseHTTPException):
"""*403* `Trial App Limit Exceeded`
Raise if the user has exceeded the trial app limit.
"""
error_code = "trial_app_limit_exceeded"
code = 403
description = "The user has exceeded the trial app limit."

View File

@ -27,7 +27,6 @@ recommended_app_fields = {
"category": fields.String,
"position": fields.Integer,
"is_listed": fields.Boolean,
"can_trial": fields.Boolean,
}
recommended_app_list_fields = {
@ -36,15 +35,18 @@ recommended_app_list_fields = {
}
parser_apps = reqparse.RequestParser().add_argument("language", type=str, location="args")
@console_ns.route("/explore/apps")
class RecommendedAppListApi(Resource):
@console_ns.expect(parser_apps)
@login_required
@account_initialization_required
@marshal_with(recommended_app_list_fields)
def get(self):
# language args
parser = reqparse.RequestParser().add_argument("language", type=str, location="args")
args = parser.parse_args()
args = parser_apps.parse_args()
language = args.get("language")
if language and language in languages:

View File

@ -1,514 +0,0 @@
import logging
from typing import Any, cast
from flask import request
from flask_restx import Resource, marshal, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, InternalServerError, NotFound
import services
from controllers.common import fields
from controllers.common.fields import build_site_model
from controllers.console import api
from controllers.console.app.error import (
AppUnavailableError,
AudioTooLargeError,
CompletionRequestError,
ConversationCompletedError,
NeedAddIdsError,
NoAudioUploadedError,
ProviderModelCurrentlyNotSupportError,
ProviderNotInitializeError,
ProviderNotSupportSpeechToTextError,
ProviderQuotaExceededError,
UnsupportedAudioTypeError,
)
from controllers.console.app.wraps import get_app_model_with_trial
from controllers.console.explore.error import (
AppSuggestedQuestionsAfterAnswerDisabledError,
NotChatAppError,
NotCompletionAppError,
NotWorkflowAppError,
)
from controllers.console.explore.wraps import TrialAppResource, trial_feature_enable
from controllers.service_api import service_api_ns
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
from core.app.app_config.common.parameters_mapping import get_parameters_from_feature_dict
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.errors.error import (
ModelCurrentlyNotSupportError,
ProviderTokenNotInitError,
QuotaExceededError,
)
from core.model_runtime.errors.invoke import InvokeError
from core.workflow.graph_engine.manager import GraphEngineManager
from extensions.ext_database import db
from fields.app_fields import app_detail_fields_with_site
from fields.dataset_fields import dataset_fields
from fields.workflow_fields import workflow_fields
from libs import helper
from libs.helper import uuid_value
from libs.login import current_user
from models import Account
from models.account import TenantStatus
from models.model import AppMode, Site
from models.workflow import Workflow
from services.app_generate_service import AppGenerateService
from services.app_service import AppService
from services.audio_service import AudioService
from services.dataset_service import DatasetService
from services.errors.audio import (
AudioTooLargeServiceError,
NoAudioUploadedServiceError,
ProviderNotSupportSpeechToTextServiceError,
UnsupportedAudioTypeServiceError,
)
from services.errors.conversation import ConversationNotExistsError
from services.errors.llm import InvokeRateLimitError
from services.errors.message import (
MessageNotExistsError,
SuggestedQuestionsAfterAnswerDisabledError,
)
from services.message_service import MessageService
from services.recommended_app_service import RecommendedAppService
logger = logging.getLogger(__name__)
class TrialAppWorkflowRunApi(TrialAppResource):
def post(self, trial_app):
"""
Run workflow
"""
app_model = trial_app
if not app_model:
raise NotWorkflowAppError()
app_mode = AppMode.value_of(app_model.mode)
if app_mode != AppMode.WORKFLOW:
raise NotWorkflowAppError()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
parser.add_argument("files", type=list, required=False, location="json")
args = parser.parse_args()
assert current_user is not None
try:
app_id = app_model.id
user_id = current_user.id
response = AppGenerateService.generate(
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.EXPLORE, streaming=True
)
RecommendedAppService.add_trial_app_record(app_id, user_id)
return helper.compact_generate_response(response)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except InvokeRateLimitError as ex:
raise InvokeRateLimitHttpError(ex.description)
except ValueError as e:
raise e
except Exception:
logger.exception("internal server error.")
raise InternalServerError()
class TrialAppWorkflowTaskStopApi(TrialAppResource):
def post(self, trial_app, task_id: str):
"""
Stop workflow task
"""
app_model = trial_app
if not app_model:
raise NotWorkflowAppError()
app_mode = AppMode.value_of(app_model.mode)
if app_mode != AppMode.WORKFLOW:
raise NotWorkflowAppError()
assert current_user is not None
# Stop using both mechanisms for backward compatibility
# Legacy stop flag mechanism (without user check)
AppQueueManager.set_stop_flag_no_user_check(task_id)
# New graph engine command channel mechanism
GraphEngineManager.send_stop_command(task_id)
return {"result": "success"}
class TrialChatApi(TrialAppResource):
@trial_feature_enable
def post(self, trial_app):
app_model = trial_app
app_mode = AppMode.value_of(app_model.mode)
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
raise NotChatAppError()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, location="json")
parser.add_argument("query", type=str, required=True, location="json")
parser.add_argument("files", type=list, required=False, location="json")
parser.add_argument("conversation_id", type=uuid_value, location="json")
parser.add_argument("parent_message_id", type=uuid_value, required=False, location="json")
parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json")
args = parser.parse_args()
args["auto_generate_name"] = False
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
# Get IDs before they might be detached from session
app_id = app_model.id
user_id = current_user.id
response = AppGenerateService.generate(
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.EXPLORE, streaming=True
)
RecommendedAppService.add_trial_app_record(app_id, user_id)
return helper.compact_generate_response(response)
except services.errors.conversation.ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
except services.errors.conversation.ConversationCompletedError:
raise ConversationCompletedError()
except services.errors.app_model_config.AppModelConfigBrokenError:
logger.exception("App model config broken.")
raise AppUnavailableError()
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except InvokeRateLimitError as ex:
raise InvokeRateLimitHttpError(ex.description)
except ValueError as e:
raise e
except Exception:
logger.exception("internal server error.")
raise InternalServerError()
class TrialMessageSuggestedQuestionApi(TrialAppResource):
@trial_feature_enable
def get(self, trial_app, message_id):
app_model = trial_app
app_mode = AppMode.value_of(app_model.mode)
if app_mode not in {AppMode.CHAT, AppMode.AGENT_CHAT, AppMode.ADVANCED_CHAT}:
raise NotChatAppError()
message_id = str(message_id)
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
questions = MessageService.get_suggested_questions_after_answer(
app_model=app_model, user=current_user, message_id=message_id, invoke_from=InvokeFrom.EXPLORE
)
except MessageNotExistsError:
raise NotFound("Message not found")
except ConversationNotExistsError:
raise NotFound("Conversation not found")
except SuggestedQuestionsAfterAnswerDisabledError:
raise AppSuggestedQuestionsAfterAnswerDisabledError()
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except Exception:
logger.exception("internal server error.")
raise InternalServerError()
return {"data": questions}
class TrialChatAudioApi(TrialAppResource):
@trial_feature_enable
def post(self, trial_app):
app_model = trial_app
file = request.files["file"]
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
# Get IDs before they might be detached from session
app_id = app_model.id
user_id = current_user.id
response = AudioService.transcript_asr(app_model=app_model, file=file, end_user=None)
RecommendedAppService.add_trial_app_record(app_id, user_id)
return response
except services.errors.app_model_config.AppModelConfigBrokenError:
logger.exception("App model config broken.")
raise AppUnavailableError()
except NoAudioUploadedServiceError:
raise NoAudioUploadedError()
except AudioTooLargeServiceError as e:
raise AudioTooLargeError(str(e))
except UnsupportedAudioTypeServiceError:
raise UnsupportedAudioTypeError()
except ProviderNotSupportSpeechToTextServiceError:
raise ProviderNotSupportSpeechToTextError()
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except ValueError as e:
raise e
except Exception as e:
logger.exception("internal server error.")
raise InternalServerError()
class TrialChatTextApi(TrialAppResource):
@trial_feature_enable
def post(self, trial_app):
app_model = trial_app
try:
parser = reqparse.RequestParser()
parser.add_argument("message_id", type=str, required=False, location="json")
parser.add_argument("voice", type=str, location="json")
parser.add_argument("text", type=str, location="json")
parser.add_argument("streaming", type=bool, location="json")
args = parser.parse_args()
message_id = args.get("message_id", None)
text = args.get("text", None)
voice = args.get("voice", None)
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
# Get IDs before they might be detached from session
app_id = app_model.id
user_id = current_user.id
response = AudioService.transcript_tts(app_model=app_model, text=text, voice=voice, message_id=message_id)
RecommendedAppService.add_trial_app_record(app_id, user_id)
return response
except services.errors.app_model_config.AppModelConfigBrokenError:
logger.exception("App model config broken.")
raise AppUnavailableError()
except NoAudioUploadedServiceError:
raise NoAudioUploadedError()
except AudioTooLargeServiceError as e:
raise AudioTooLargeError(str(e))
except UnsupportedAudioTypeServiceError:
raise UnsupportedAudioTypeError()
except ProviderNotSupportSpeechToTextServiceError:
raise ProviderNotSupportSpeechToTextError()
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except ValueError as e:
raise e
except Exception as e:
logger.exception("internal server error.")
raise InternalServerError()
class TrialCompletionApi(TrialAppResource):
@trial_feature_enable
def post(self, trial_app):
app_model = trial_app
if app_model.mode != "completion":
raise NotCompletionAppError()
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, location="json")
parser.add_argument("query", type=str, location="json", default="")
parser.add_argument("files", type=list, required=False, location="json")
parser.add_argument("response_mode", type=str, choices=["blocking", "streaming"], location="json")
parser.add_argument("retriever_from", type=str, required=False, default="explore_app", location="json")
args = parser.parse_args()
streaming = args["response_mode"] == "streaming"
args["auto_generate_name"] = False
try:
if not isinstance(current_user, Account):
raise ValueError("current_user must be an Account instance")
# Get IDs before they might be detached from session
app_id = app_model.id
user_id = current_user.id
response = AppGenerateService.generate(
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.EXPLORE, streaming=streaming
)
RecommendedAppService.add_trial_app_record(app_id, user_id)
return helper.compact_generate_response(response)
except services.errors.conversation.ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
except services.errors.conversation.ConversationCompletedError:
raise ConversationCompletedError()
except services.errors.app_model_config.AppModelConfigBrokenError:
logger.exception("App model config broken.")
raise AppUnavailableError()
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
except ValueError as e:
raise e
except Exception:
logger.exception("internal server error.")
raise InternalServerError()
class TrialSitApi(Resource):
"""Resource for trial app sites."""
@trial_feature_enable
@get_app_model_with_trial
@service_api_ns.marshal_with(build_site_model(service_api_ns))
def get(self, app_model):
"""Retrieve app site info.
Returns the site configuration for the application including theme, icons, and text.
"""
site = db.session.query(Site).where(Site.app_id == app_model.id).first()
if not site:
raise Forbidden()
assert app_model.tenant
if app_model.tenant.status == TenantStatus.ARCHIVE:
raise Forbidden()
return site
class TrialAppParameterApi(Resource):
"""Resource for app variables."""
@trial_feature_enable
@get_app_model_with_trial
@marshal_with(fields.parameters_fields)
def get(self, app_model):
"""Retrieve app parameters."""
if app_model is None:
raise AppUnavailableError()
if app_model.mode in {AppMode.ADVANCED_CHAT, AppMode.WORKFLOW}:
workflow = app_model.workflow
if workflow is None:
raise AppUnavailableError()
features_dict = workflow.features_dict
user_input_form = workflow.user_input_form(to_old_structure=True)
else:
app_model_config = app_model.app_model_config
if app_model_config is None:
raise AppUnavailableError()
features_dict = app_model_config.to_dict()
user_input_form = features_dict.get("user_input_form", [])
return get_parameters_from_feature_dict(features_dict=features_dict, user_input_form=user_input_form)
class AppApi(Resource):
@trial_feature_enable
@get_app_model_with_trial
@marshal_with(app_detail_fields_with_site)
def get(self, app_model):
"""Get app detail"""
app_service = AppService()
app_model = app_service.get_app(app_model)
return app_model
class AppWorkflowApi(Resource):
@trial_feature_enable
@get_app_model_with_trial
@marshal_with(workflow_fields)
def get(self, app_model):
"""Get workflow detail"""
if not app_model.workflow_id:
raise AppUnavailableError()
workflow = (
db.session.query(Workflow)
.where(
Workflow.id == app_model.workflow_id,
)
.first()
)
return workflow
class DatasetListApi(Resource):
@trial_feature_enable
@get_app_model_with_trial
def get(self, app_model):
page = request.args.get("page", default=1, type=int)
limit = request.args.get("limit", default=20, type=int)
ids = request.args.getlist("ids")
tenant_id = app_model.tenant_id
if ids:
datasets, total = DatasetService.get_datasets_by_ids(ids, tenant_id)
else:
raise NeedAddIdsError()
data = cast(list[dict[str, Any]], marshal(datasets, dataset_fields))
response = {"data": data, "has_more": len(datasets) == limit, "limit": limit, "total": total, "page": page}
return response
api.add_resource(TrialChatApi, "/trial-apps/<uuid:app_id>/chat-messages", endpoint="trial_app_chat_completion")
api.add_resource(
TrialMessageSuggestedQuestionApi,
"/trial-apps/<uuid:app_id>/messages/<uuid:message_id>/suggested-questions",
endpoint="trial_app_suggested_question",
)
api.add_resource(TrialChatAudioApi, "/trial-apps/<uuid:app_id>/audio-to-text", endpoint="trial_app_audio")
api.add_resource(TrialChatTextApi, "/trial-apps/<uuid:app_id>/text-to-audio", endpoint="trial_app_text")
api.add_resource(TrialCompletionApi, "/trial-apps/<uuid:app_id>/completion-messages", endpoint="trial_app_completion")
api.add_resource(TrialSitApi, "/trial-apps/<uuid:app_id>/site")
api.add_resource(TrialAppParameterApi, "/trial-apps/<uuid:app_id>/parameters", endpoint="trial_app_parameters")
api.add_resource(AppApi, "/trial-apps/<uuid:app_id>", endpoint="trial_app")
api.add_resource(TrialAppWorkflowRunApi, "/trial-apps/<uuid:app_id>/workflows/run", endpoint="trial_app_workflow_run")
api.add_resource(TrialAppWorkflowTaskStopApi, "/trial-apps/<uuid:app_id>/workflows/tasks/<string:task_id>/stop")
api.add_resource(AppWorkflowApi, "/trial-apps/<uuid:app_id>/workflows", endpoint="trial_app_workflow")
api.add_resource(DatasetListApi, "/trial-apps/<uuid:app_id>/datasets", endpoint="trial_app_datasets")

View File

@ -2,16 +2,14 @@ from collections.abc import Callable
from functools import wraps
from typing import Concatenate, ParamSpec, TypeVar
from flask import abort
from flask_login import current_user
from flask_restx import Resource
from werkzeug.exceptions import NotFound
from controllers.console.explore.error import AppAccessDeniedError, TrialAppLimitExceeded, TrialAppNotAllowed
from controllers.console.explore.error import AppAccessDeniedError
from controllers.console.wraps import account_initialization_required
from extensions.ext_database import db
from libs.login import current_account_with_tenant, login_required
from models import AccountTrialAppRecord, App, InstalledApp, TrialApp
from models import InstalledApp
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import FeatureService
@ -73,59 +71,6 @@ def user_allowed_to_access_app(view: Callable[Concatenate[InstalledApp, P], R] |
return decorator
def trial_app_required(view: Callable[Concatenate[App, P], R] | None = None):
def decorator(view: Callable[Concatenate[App, P], R]):
@wraps(view)
def decorated(app_id: str, *args: P.args, **kwargs: P.kwargs):
trial_app = db.session.query(TrialApp).where(TrialApp.app_id == str(app_id)).first()
if trial_app is None:
raise TrialAppNotAllowed()
app = trial_app.app
if app is None:
raise TrialAppNotAllowed()
account_trial_app_record = (
db.session.query(AccountTrialAppRecord)
.where(AccountTrialAppRecord.account_id == current_user.id, AccountTrialAppRecord.app_id == app_id)
.first()
)
if account_trial_app_record:
if account_trial_app_record.count >= trial_app.trial_limit:
raise TrialAppLimitExceeded()
return view(app, *args, **kwargs)
return decorated
if view:
return decorator(view)
return decorator
def trial_feature_enable(view: Callable[..., R]) -> Callable[..., R]:
@wraps(view)
def decorated(*args, **kwargs):
features = FeatureService.get_system_features()
if not features.enable_trial_app:
abort(403, "Trial app feature is not enabled.")
return view(*args, **kwargs)
return decorated
def explore_banner_enabled(view: Callable[..., R]) -> Callable[..., R]:
@wraps(view)
def decorated(*args, **kwargs):
features = FeatureService.get_system_features()
if not features.enable_explore_banner:
abort(403, "Explore banner feature is not enabled.")
return view(*args, **kwargs)
return decorated
class InstalledAppResource(Resource):
# must be reversed if there are multiple decorators
@ -135,13 +80,3 @@ class InstalledAppResource(Resource):
account_initialization_required,
login_required,
]
class TrialAppResource(Resource):
# must be reversed if there are multiple decorators
method_decorators = [
trial_app_required,
account_initialization_required,
login_required,
]

View File

@ -1,7 +1,7 @@
from flask_restx import Resource, fields, marshal_with, reqparse
from constants import HIDDEN_VALUE
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from fields.api_based_extension_fields import api_based_extension_fields
from libs.login import current_account_with_tenant, login_required
@ -9,18 +9,24 @@ from models.api_based_extension import APIBasedExtension
from services.api_based_extension_service import APIBasedExtensionService
from services.code_based_extension_service import CodeBasedExtensionService
api_based_extension_model = console_ns.model("ApiBasedExtensionModel", api_based_extension_fields)
api_based_extension_list_model = fields.List(fields.Nested(api_based_extension_model))
@console_ns.route("/code-based-extension")
class CodeBasedExtensionAPI(Resource):
@api.doc("get_code_based_extension")
@api.doc(description="Get code-based extension data by module name")
@api.expect(
api.parser().add_argument("module", type=str, required=True, location="args", help="Extension module name")
@console_ns.doc("get_code_based_extension")
@console_ns.doc(description="Get code-based extension data by module name")
@console_ns.expect(
console_ns.parser().add_argument(
"module", type=str, required=True, location="args", help="Extension module name"
)
)
@api.response(
@console_ns.response(
200,
"Success",
api.model(
console_ns.model(
"CodeBasedExtensionResponse",
{"module": fields.String(description="Module name"), "data": fields.Raw(description="Extension data")},
),
@ -37,21 +43,21 @@ class CodeBasedExtensionAPI(Resource):
@console_ns.route("/api-based-extension")
class APIBasedExtensionAPI(Resource):
@api.doc("get_api_based_extensions")
@api.doc(description="Get all API-based extensions for current tenant")
@api.response(200, "Success", fields.List(fields.Nested(api_based_extension_fields)))
@console_ns.doc("get_api_based_extensions")
@console_ns.doc(description="Get all API-based extensions for current tenant")
@console_ns.response(200, "Success", api_based_extension_list_model)
@setup_required
@login_required
@account_initialization_required
@marshal_with(api_based_extension_fields)
@marshal_with(api_based_extension_model)
def get(self):
_, tenant_id = current_account_with_tenant()
return APIBasedExtensionService.get_all_by_tenant_id(tenant_id)
@api.doc("create_api_based_extension")
@api.doc(description="Create a new API-based extension")
@api.expect(
api.model(
@console_ns.doc("create_api_based_extension")
@console_ns.doc(description="Create a new API-based extension")
@console_ns.expect(
console_ns.model(
"CreateAPIBasedExtensionRequest",
{
"name": fields.String(required=True, description="Extension name"),
@ -60,13 +66,13 @@ class APIBasedExtensionAPI(Resource):
},
)
)
@api.response(201, "Extension created successfully", api_based_extension_fields)
@console_ns.response(201, "Extension created successfully", api_based_extension_model)
@setup_required
@login_required
@account_initialization_required
@marshal_with(api_based_extension_fields)
@marshal_with(api_based_extension_model)
def post(self):
args = api.payload
args = console_ns.payload
_, current_tenant_id = current_account_with_tenant()
extension_data = APIBasedExtension(
@ -81,25 +87,25 @@ class APIBasedExtensionAPI(Resource):
@console_ns.route("/api-based-extension/<uuid:id>")
class APIBasedExtensionDetailAPI(Resource):
@api.doc("get_api_based_extension")
@api.doc(description="Get API-based extension by ID")
@api.doc(params={"id": "Extension ID"})
@api.response(200, "Success", api_based_extension_fields)
@console_ns.doc("get_api_based_extension")
@console_ns.doc(description="Get API-based extension by ID")
@console_ns.doc(params={"id": "Extension ID"})
@console_ns.response(200, "Success", api_based_extension_model)
@setup_required
@login_required
@account_initialization_required
@marshal_with(api_based_extension_fields)
@marshal_with(api_based_extension_model)
def get(self, id):
api_based_extension_id = str(id)
_, tenant_id = current_account_with_tenant()
return APIBasedExtensionService.get_with_tenant_id(tenant_id, api_based_extension_id)
@api.doc("update_api_based_extension")
@api.doc(description="Update API-based extension")
@api.doc(params={"id": "Extension ID"})
@api.expect(
api.model(
@console_ns.doc("update_api_based_extension")
@console_ns.doc(description="Update API-based extension")
@console_ns.doc(params={"id": "Extension ID"})
@console_ns.expect(
console_ns.model(
"UpdateAPIBasedExtensionRequest",
{
"name": fields.String(required=True, description="Extension name"),
@ -108,18 +114,18 @@ class APIBasedExtensionDetailAPI(Resource):
},
)
)
@api.response(200, "Extension updated successfully", api_based_extension_fields)
@console_ns.response(200, "Extension updated successfully", api_based_extension_model)
@setup_required
@login_required
@account_initialization_required
@marshal_with(api_based_extension_fields)
@marshal_with(api_based_extension_model)
def post(self, id):
api_based_extension_id = str(id)
_, current_tenant_id = current_account_with_tenant()
extension_data_from_db = APIBasedExtensionService.get_with_tenant_id(current_tenant_id, api_based_extension_id)
args = api.payload
args = console_ns.payload
extension_data_from_db.name = args["name"]
extension_data_from_db.api_endpoint = args["api_endpoint"]
@ -129,10 +135,10 @@ class APIBasedExtensionDetailAPI(Resource):
return APIBasedExtensionService.save(extension_data_from_db)
@api.doc("delete_api_based_extension")
@api.doc(description="Delete API-based extension")
@api.doc(params={"id": "Extension ID"})
@api.response(204, "Extension deleted successfully")
@console_ns.doc("delete_api_based_extension")
@console_ns.doc(description="Delete API-based extension")
@console_ns.doc(params={"id": "Extension ID"})
@console_ns.response(204, "Extension deleted successfully")
@setup_required
@login_required
@account_initialization_required

View File

@ -3,18 +3,18 @@ from flask_restx import Resource, fields
from libs.login import current_account_with_tenant, login_required
from services.feature_service import FeatureService
from . import api, console_ns
from . import console_ns
from .wraps import account_initialization_required, cloud_utm_record, setup_required
@console_ns.route("/features")
class FeatureApi(Resource):
@api.doc("get_tenant_features")
@api.doc(description="Get feature configuration for current tenant")
@api.response(
@console_ns.doc("get_tenant_features")
@console_ns.doc(description="Get feature configuration for current tenant")
@console_ns.response(
200,
"Success",
api.model("FeatureResponse", {"features": fields.Raw(description="Feature configuration object")}),
console_ns.model("FeatureResponse", {"features": fields.Raw(description="Feature configuration object")}),
)
@setup_required
@login_required
@ -29,12 +29,14 @@ class FeatureApi(Resource):
@console_ns.route("/system-features")
class SystemFeatureApi(Resource):
@api.doc("get_system_features")
@api.doc(description="Get system-wide feature configuration")
@api.response(
@console_ns.doc("get_system_features")
@console_ns.doc(description="Get system-wide feature configuration")
@console_ns.response(
200,
"Success",
api.model("SystemFeatureResponse", {"features": fields.Raw(description="System feature configuration object")}),
console_ns.model(
"SystemFeatureResponse", {"features": fields.Raw(description="System feature configuration object")}
),
)
def get(self):
"""Get system-wide feature configuration"""

View File

@ -11,19 +11,19 @@ from libs.helper import StrLen
from models.model import DifySetup
from services.account_service import TenantService
from . import api, console_ns
from . import console_ns
from .error import AlreadySetupError, InitValidateFailedError
from .wraps import only_edition_self_hosted
@console_ns.route("/init")
class InitValidateAPI(Resource):
@api.doc("get_init_status")
@api.doc(description="Get initialization validation status")
@api.response(
@console_ns.doc("get_init_status")
@console_ns.doc(description="Get initialization validation status")
@console_ns.response(
200,
"Success",
model=api.model(
model=console_ns.model(
"InitStatusResponse",
{"status": fields.String(description="Initialization status", enum=["finished", "not_started"])},
),
@ -35,20 +35,20 @@ class InitValidateAPI(Resource):
return {"status": "finished"}
return {"status": "not_started"}
@api.doc("validate_init_password")
@api.doc(description="Validate initialization password for self-hosted edition")
@api.expect(
api.model(
@console_ns.doc("validate_init_password")
@console_ns.doc(description="Validate initialization password for self-hosted edition")
@console_ns.expect(
console_ns.model(
"InitValidateRequest",
{"password": fields.String(required=True, description="Initialization password", max_length=30)},
)
)
@api.response(
@console_ns.response(
201,
"Success",
model=api.model("InitValidateResponse", {"result": fields.String(description="Operation result")}),
model=console_ns.model("InitValidateResponse", {"result": fields.String(description="Operation result")}),
)
@api.response(400, "Already setup or validation failed")
@console_ns.response(400, "Already setup or validation failed")
@only_edition_self_hosted
def post(self):
"""Validate initialization password"""

View File

@ -1,16 +1,16 @@
from flask_restx import Resource, fields
from . import api, console_ns
from . import console_ns
@console_ns.route("/ping")
class PingApi(Resource):
@api.doc("health_check")
@api.doc(description="Health check endpoint for connection testing")
@api.response(
@console_ns.doc("health_check")
@console_ns.doc(description="Health check endpoint for connection testing")
@console_ns.response(
200,
"Success",
api.model("PingResponse", {"result": fields.String(description="Health check result", example="pong")}),
console_ns.model("PingResponse", {"result": fields.String(description="Health check result", example="pong")}),
)
def get(self):
"""Health check endpoint for connection testing"""

View File

@ -36,12 +36,15 @@ class RemoteFileInfoApi(Resource):
}
parser_upload = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required")
@console_ns.route("/remote-files/upload")
class RemoteFileUploadApi(Resource):
@console_ns.expect(parser_upload)
@marshal_with(file_fields_with_signed_url)
def post(self):
parser = reqparse.RequestParser().add_argument("url", type=str, required=True, help="URL is required")
args = parser.parse_args()
args = parser_upload.parse_args()
url = args["url"]

View File

@ -7,7 +7,7 @@ from libs.password import valid_password
from models.model import DifySetup, db
from services.account_service import RegisterService, TenantService
from . import api, console_ns
from . import console_ns
from .error import AlreadySetupError, NotInitValidateError
from .init_validate import get_init_validate_status
from .wraps import only_edition_self_hosted
@ -15,12 +15,12 @@ from .wraps import only_edition_self_hosted
@console_ns.route("/setup")
class SetupApi(Resource):
@api.doc("get_setup_status")
@api.doc(description="Get system setup status")
@api.response(
@console_ns.doc("get_setup_status")
@console_ns.doc(description="Get system setup status")
@console_ns.response(
200,
"Success",
api.model(
console_ns.model(
"SetupStatusResponse",
{
"step": fields.String(description="Setup step status", enum=["not_started", "finished"]),
@ -40,20 +40,23 @@ class SetupApi(Resource):
return {"step": "not_started"}
return {"step": "finished"}
@api.doc("setup_system")
@api.doc(description="Initialize system setup with admin account")
@api.expect(
api.model(
@console_ns.doc("setup_system")
@console_ns.doc(description="Initialize system setup with admin account")
@console_ns.expect(
console_ns.model(
"SetupRequest",
{
"email": fields.String(required=True, description="Admin email address"),
"name": fields.String(required=True, description="Admin name (max 30 characters)"),
"password": fields.String(required=True, description="Admin password"),
"language": fields.String(required=False, description="Admin language"),
},
)
)
@api.response(201, "Success", api.model("SetupResponse", {"result": fields.String(description="Setup result")}))
@api.response(400, "Already setup or validation failed")
@console_ns.response(
201, "Success", console_ns.model("SetupResponse", {"result": fields.String(description="Setup result")})
)
@console_ns.response(400, "Already setup or validation failed")
@only_edition_self_hosted
def post(self):
"""Initialize system setup with admin account"""

View File

@ -3,7 +3,7 @@ from flask_restx import Resource, marshal_with, reqparse
from werkzeug.exceptions import Forbidden
from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.console.wraps import account_initialization_required, edit_permission_required, setup_required
from fields.tag_fields import dataset_tag_fields
from libs.login import current_account_with_tenant, login_required
from models.model import Tag
@ -16,6 +16,19 @@ def _validate_name(name):
return name
parser_tags = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
required=True,
help="Name must be between 1 to 50 characters.",
type=_validate_name,
)
.add_argument("type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type.")
)
@console_ns.route("/tags")
class TagListApi(Resource):
@setup_required
@ -30,6 +43,7 @@ class TagListApi(Resource):
return tags, 200
@console_ns.expect(parser_tags)
@setup_required
@login_required
@account_initialization_required
@ -39,20 +53,7 @@ class TagListApi(Resource):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument(
"name",
nullable=False,
required=True,
help="Name must be between 1 to 50 characters.",
type=_validate_name,
)
.add_argument(
"type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type."
)
)
args = parser.parse_args()
args = parser_tags.parse_args()
tag = TagService.save_tags(args)
response = {"id": tag.id, "name": tag.name, "type": tag.type, "binding_count": 0}
@ -60,8 +61,14 @@ class TagListApi(Resource):
return response, 200
parser_tag_id = reqparse.RequestParser().add_argument(
"name", nullable=False, required=True, help="Name must be between 1 to 50 characters.", type=_validate_name
)
@console_ns.route("/tags/<uuid:tag_id>")
class TagUpdateDeleteApi(Resource):
@console_ns.expect(parser_tag_id)
@setup_required
@login_required
@account_initialization_required
@ -72,10 +79,7 @@ class TagUpdateDeleteApi(Resource):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = reqparse.RequestParser().add_argument(
"name", nullable=False, required=True, help="Name must be between 1 to 50 characters.", type=_validate_name
)
args = parser.parse_args()
args = parser_tag_id.parse_args()
tag = TagService.update_tags(args, tag_id)
binding_count = TagService.get_tag_binding_count(tag_id)
@ -87,20 +91,26 @@ class TagUpdateDeleteApi(Resource):
@setup_required
@login_required
@account_initialization_required
@edit_permission_required
def delete(self, tag_id):
current_user, _ = current_account_with_tenant()
tag_id = str(tag_id)
# The role of the current user in the ta table must be admin, owner, or editor
if not current_user.has_edit_permission:
raise Forbidden()
TagService.delete_tag(tag_id)
return 204
parser_create = (
reqparse.RequestParser()
.add_argument("tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required.")
.add_argument("target_id", type=str, nullable=False, required=True, location="json", help="Target ID is required.")
.add_argument("type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type.")
)
@console_ns.route("/tag-bindings/create")
class TagBindingCreateApi(Resource):
@console_ns.expect(parser_create)
@setup_required
@login_required
@account_initialization_required
@ -110,26 +120,23 @@ class TagBindingCreateApi(Resource):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument(
"tag_ids", type=list, nullable=False, required=True, location="json", help="Tag IDs is required."
)
.add_argument(
"target_id", type=str, nullable=False, required=True, location="json", help="Target ID is required."
)
.add_argument(
"type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type."
)
)
args = parser.parse_args()
args = parser_create.parse_args()
TagService.save_tag_binding(args)
return {"result": "success"}, 200
parser_remove = (
reqparse.RequestParser()
.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.")
.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.")
.add_argument("type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type.")
)
@console_ns.route("/tag-bindings/remove")
class TagBindingDeleteApi(Resource):
@console_ns.expect(parser_remove)
@setup_required
@login_required
@account_initialization_required
@ -139,15 +146,7 @@ class TagBindingDeleteApi(Resource):
if not (current_user.has_edit_permission or current_user.is_dataset_editor):
raise Forbidden()
parser = (
reqparse.RequestParser()
.add_argument("tag_id", type=str, nullable=False, required=True, help="Tag ID is required.")
.add_argument("target_id", type=str, nullable=False, required=True, help="Target ID is required.")
.add_argument(
"type", type=str, location="json", choices=Tag.TAG_TYPE_LIST, nullable=True, help="Invalid tag type."
)
)
args = parser.parse_args()
args = parser_remove.parse_args()
TagService.delete_tag_binding(args)
return {"result": "success"}, 200

View File

@ -7,24 +7,24 @@ from packaging import version
from configs import dify_config
from . import api, console_ns
from . import console_ns
logger = logging.getLogger(__name__)
parser = reqparse.RequestParser().add_argument(
"current_version", type=str, required=True, location="args", help="Current application version"
)
@console_ns.route("/version")
class VersionApi(Resource):
@api.doc("check_version_update")
@api.doc(description="Check for application version updates")
@api.expect(
api.parser().add_argument(
"current_version", type=str, required=True, location="args", help="Current application version"
)
)
@api.response(
@console_ns.doc("check_version_update")
@console_ns.doc(description="Check for application version updates")
@console_ns.expect(parser)
@console_ns.response(
200,
"Success",
api.model(
console_ns.model(
"VersionResponse",
{
"version": fields.String(description="Latest version number"),
@ -37,7 +37,6 @@ class VersionApi(Resource):
)
def get(self):
"""Check for application version updates"""
parser = reqparse.RequestParser().add_argument("current_version", type=str, required=True, location="args")
args = parser.parse_args()
check_update_url = dify_config.CHECK_UPDATE_URL

View File

@ -1,8 +1,10 @@
from datetime import datetime
from typing import Literal
import pytz
from flask import request
from flask_restx import Resource, fields, marshal_with, reqparse
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field, field_validator, model_validator
from sqlalchemy import select
from sqlalchemy.orm import Session
@ -32,7 +34,6 @@ from controllers.console.wraps import (
only_edition_cloud,
setup_required,
)
from core.file import helpers as file_helpers
from extensions.ext_database import db
from fields.member_fields import account_fields
from libs.datetime_utils import naive_utc_now
@ -43,9 +44,198 @@ from services.account_service import AccountService
from services.billing_service import BillingService
from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError
DEFAULT_REF_TEMPLATE_SWAGGER_2_0 = "#/definitions/{model}"
class AccountInitPayload(BaseModel):
interface_language: str
timezone: str
invitation_code: str | None = None
@field_validator("interface_language")
@classmethod
def validate_language(cls, value: str) -> str:
return supported_language(value)
@field_validator("timezone")
@classmethod
def validate_timezone(cls, value: str) -> str:
return timezone(value)
class AccountNamePayload(BaseModel):
name: str = Field(min_length=3, max_length=30)
class AccountAvatarPayload(BaseModel):
avatar: str
class AccountInterfaceLanguagePayload(BaseModel):
interface_language: str
@field_validator("interface_language")
@classmethod
def validate_language(cls, value: str) -> str:
return supported_language(value)
class AccountInterfaceThemePayload(BaseModel):
interface_theme: Literal["light", "dark"]
class AccountTimezonePayload(BaseModel):
timezone: str
@field_validator("timezone")
@classmethod
def validate_timezone(cls, value: str) -> str:
return timezone(value)
class AccountPasswordPayload(BaseModel):
password: str | None = None
new_password: str
repeat_new_password: str
@model_validator(mode="after")
def check_passwords_match(self) -> "AccountPasswordPayload":
if self.new_password != self.repeat_new_password:
raise RepeatPasswordNotMatchError()
return self
class AccountDeletePayload(BaseModel):
token: str
code: str
class AccountDeletionFeedbackPayload(BaseModel):
email: str
feedback: str
@field_validator("email")
@classmethod
def validate_email(cls, value: str) -> str:
return email(value)
class EducationActivatePayload(BaseModel):
token: str
institution: str
role: str
class EducationAutocompleteQuery(BaseModel):
keywords: str
page: int = 0
limit: int = 20
class ChangeEmailSendPayload(BaseModel):
email: str
language: str | None = None
phase: str | None = None
token: str | None = None
@field_validator("email")
@classmethod
def validate_email(cls, value: str) -> str:
return email(value)
class ChangeEmailValidityPayload(BaseModel):
email: str
code: str
token: str
@field_validator("email")
@classmethod
def validate_email(cls, value: str) -> str:
return email(value)
class ChangeEmailResetPayload(BaseModel):
new_email: str
token: str
@field_validator("new_email")
@classmethod
def validate_email(cls, value: str) -> str:
return email(value)
class CheckEmailUniquePayload(BaseModel):
email: str
@field_validator("email")
@classmethod
def validate_email(cls, value: str) -> str:
return email(value)
console_ns.schema_model(
AccountInitPayload.__name__, AccountInitPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
)
console_ns.schema_model(
AccountNamePayload.__name__, AccountNamePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
)
console_ns.schema_model(
AccountAvatarPayload.__name__, AccountAvatarPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0)
)
console_ns.schema_model(
AccountInterfaceLanguagePayload.__name__,
AccountInterfaceLanguagePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
AccountInterfaceThemePayload.__name__,
AccountInterfaceThemePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
AccountTimezonePayload.__name__,
AccountTimezonePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
AccountPasswordPayload.__name__,
AccountPasswordPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
AccountDeletePayload.__name__,
AccountDeletePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
AccountDeletionFeedbackPayload.__name__,
AccountDeletionFeedbackPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
EducationActivatePayload.__name__,
EducationActivatePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
EducationAutocompleteQuery.__name__,
EducationAutocompleteQuery.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
ChangeEmailSendPayload.__name__,
ChangeEmailSendPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
ChangeEmailValidityPayload.__name__,
ChangeEmailValidityPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
ChangeEmailResetPayload.__name__,
ChangeEmailResetPayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
console_ns.schema_model(
CheckEmailUniquePayload.__name__,
CheckEmailUniquePayload.model_json_schema(ref_template=DEFAULT_REF_TEMPLATE_SWAGGER_2_0),
)
@console_ns.route("/account/init")
class AccountInitApi(Resource):
@console_ns.expect(console_ns.models[AccountInitPayload.__name__])
@setup_required
@login_required
def post(self):
@ -54,24 +244,18 @@ class AccountInitApi(Resource):
if account.status == "active":
raise AccountAlreadyInitedError()
parser = reqparse.RequestParser()
payload = console_ns.payload or {}
args = AccountInitPayload.model_validate(payload)
if dify_config.EDITION == "CLOUD":
parser.add_argument("invitation_code", type=str, location="json")
parser.add_argument("interface_language", type=supported_language, required=True, location="json").add_argument(
"timezone", type=timezone, required=True, location="json"
)
args = parser.parse_args()
if dify_config.EDITION == "CLOUD":
if not args["invitation_code"]:
if not args.invitation_code:
raise ValueError("invitation_code is required")
# check invitation code
invitation_code = (
db.session.query(InvitationCode)
.where(
InvitationCode.code == args["invitation_code"],
InvitationCode.code == args.invitation_code,
InvitationCode.status == "unused",
)
.first()
@ -85,8 +269,8 @@ class AccountInitApi(Resource):
invitation_code.used_by_tenant_id = account.current_tenant_id
invitation_code.used_by_account_id = account.id
account.interface_language = args["interface_language"]
account.timezone = args["timezone"]
account.interface_language = args.interface_language
account.timezone = args.timezone
account.interface_theme = "light"
account.status = "active"
account.initialized_at = naive_utc_now()
@ -109,128 +293,102 @@ class AccountProfileApi(Resource):
@console_ns.route("/account/name")
class AccountNameApi(Resource):
@console_ns.expect(console_ns.models[AccountNamePayload.__name__])
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("name", type=str, required=True, location="json")
args = parser.parse_args()
# Validate account name length
if len(args["name"]) < 3 or len(args["name"]) > 30:
raise ValueError("Account name must be between 3 and 30 characters.")
updated_account = AccountService.update_account(current_user, name=args["name"])
payload = console_ns.payload or {}
args = AccountNamePayload.model_validate(payload)
updated_account = AccountService.update_account(current_user, name=args.name)
return updated_account
@console_ns.route("/account/avatar")
class AccountAvatarApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self):
parser = reqparse.RequestParser()
parser.add_argument("avatar", type=str, required=True, location="args")
args = parser.parse_args()
avatar_url = file_helpers.get_signed_file_url(args["avatar"])
return {"avatar_url": avatar_url}
@console_ns.expect(console_ns.models[AccountAvatarPayload.__name__])
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("avatar", type=str, required=True, location="json")
args = parser.parse_args()
payload = console_ns.payload or {}
args = AccountAvatarPayload.model_validate(payload)
updated_account = AccountService.update_account(current_user, avatar=args["avatar"])
updated_account = AccountService.update_account(current_user, avatar=args.avatar)
return updated_account
@console_ns.route("/account/interface-language")
class AccountInterfaceLanguageApi(Resource):
@console_ns.expect(console_ns.models[AccountInterfaceLanguagePayload.__name__])
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"interface_language", type=supported_language, required=True, location="json"
)
args = parser.parse_args()
payload = console_ns.payload or {}
args = AccountInterfaceLanguagePayload.model_validate(payload)
updated_account = AccountService.update_account(current_user, interface_language=args["interface_language"])
updated_account = AccountService.update_account(current_user, interface_language=args.interface_language)
return updated_account
@console_ns.route("/account/interface-theme")
class AccountInterfaceThemeApi(Resource):
@console_ns.expect(console_ns.models[AccountInterfaceThemePayload.__name__])
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument(
"interface_theme", type=str, choices=["light", "dark"], required=True, location="json"
)
args = parser.parse_args()
payload = console_ns.payload or {}
args = AccountInterfaceThemePayload.model_validate(payload)
updated_account = AccountService.update_account(current_user, interface_theme=args["interface_theme"])
updated_account = AccountService.update_account(current_user, interface_theme=args.interface_theme)
return updated_account
@console_ns.route("/account/timezone")
class AccountTimezoneApi(Resource):
@console_ns.expect(console_ns.models[AccountTimezonePayload.__name__])
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = reqparse.RequestParser().add_argument("timezone", type=str, required=True, location="json")
args = parser.parse_args()
payload = console_ns.payload or {}
args = AccountTimezonePayload.model_validate(payload)
# Validate timezone string, e.g. America/New_York, Asia/Shanghai
if args["timezone"] not in pytz.all_timezones:
raise ValueError("Invalid timezone string.")
updated_account = AccountService.update_account(current_user, timezone=args["timezone"])
updated_account = AccountService.update_account(current_user, timezone=args.timezone)
return updated_account
@console_ns.route("/account/password")
class AccountPasswordApi(Resource):
@console_ns.expect(console_ns.models[AccountPasswordPayload.__name__])
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("password", type=str, required=False, location="json")
.add_argument("new_password", type=str, required=True, location="json")
.add_argument("repeat_new_password", type=str, required=True, location="json")
)
args = parser.parse_args()
if args["new_password"] != args["repeat_new_password"]:
raise RepeatPasswordNotMatchError()
payload = console_ns.payload or {}
args = AccountPasswordPayload.model_validate(payload)
try:
AccountService.update_account_password(current_user, args["password"], args["new_password"])
AccountService.update_account_password(current_user, args.password, args.new_password)
except ServiceCurrentPasswordIncorrectError:
raise CurrentPasswordIncorrectError()
@ -308,20 +466,17 @@ class AccountDeleteVerifyApi(Resource):
@console_ns.route("/account/delete")
class AccountDeleteApi(Resource):
@console_ns.expect(console_ns.models[AccountDeletePayload.__name__])
@setup_required
@login_required
@account_initialization_required
def post(self):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("token", type=str, required=True, location="json")
.add_argument("code", type=str, required=True, location="json")
)
args = parser.parse_args()
payload = console_ns.payload or {}
args = AccountDeletePayload.model_validate(payload)
if not AccountService.verify_account_deletion_code(args["token"], args["code"]):
if not AccountService.verify_account_deletion_code(args.token, args.code):
raise InvalidAccountDeletionCodeError()
AccountService.delete_account(account)
@ -331,16 +486,13 @@ class AccountDeleteApi(Resource):
@console_ns.route("/account/delete/feedback")
class AccountDeleteUpdateFeedbackApi(Resource):
@console_ns.expect(console_ns.models[AccountDeletionFeedbackPayload.__name__])
@setup_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=str, required=True, location="json")
.add_argument("feedback", type=str, required=True, location="json")
)
args = parser.parse_args()
payload = console_ns.payload or {}
args = AccountDeletionFeedbackPayload.model_validate(payload)
BillingService.update_account_deletion_feedback(args["email"], args["feedback"])
BillingService.update_account_deletion_feedback(args.email, args.feedback)
return {"result": "success"}
@ -372,6 +524,7 @@ class EducationApi(Resource):
"allow_refresh": fields.Boolean,
}
@console_ns.expect(console_ns.models[EducationActivatePayload.__name__])
@setup_required
@login_required
@account_initialization_required
@ -380,15 +533,10 @@ class EducationApi(Resource):
def post(self):
account, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("token", type=str, required=True, location="json")
.add_argument("institution", type=str, required=True, location="json")
.add_argument("role", type=str, required=True, location="json")
)
args = parser.parse_args()
payload = console_ns.payload or {}
args = EducationActivatePayload.model_validate(payload)
return BillingService.EducationIdentity.activate(account, args["token"], args["institution"], args["role"])
return BillingService.EducationIdentity.activate(account, args.token, args.institution, args.role)
@setup_required
@login_required
@ -414,6 +562,7 @@ class EducationAutoCompleteApi(Resource):
"has_next": fields.Boolean,
}
@console_ns.expect(console_ns.models[EducationAutocompleteQuery.__name__])
@setup_required
@login_required
@account_initialization_required
@ -421,49 +570,39 @@ class EducationAutoCompleteApi(Resource):
@cloud_edition_billing_enabled
@marshal_with(data_fields)
def get(self):
parser = (
reqparse.RequestParser()
.add_argument("keywords", type=str, required=True, location="args")
.add_argument("page", type=int, required=False, location="args", default=0)
.add_argument("limit", type=int, required=False, location="args", default=20)
)
args = parser.parse_args()
payload = request.args.to_dict(flat=True) # type: ignore
args = EducationAutocompleteQuery.model_validate(payload)
return BillingService.EducationIdentity.autocomplete(args["keywords"], args["page"], args["limit"])
return BillingService.EducationIdentity.autocomplete(args.keywords, args.page, args.limit)
@console_ns.route("/account/change-email")
class ChangeEmailSendEmailApi(Resource):
@console_ns.expect(console_ns.models[ChangeEmailSendPayload.__name__])
@enable_change_email
@setup_required
@login_required
@account_initialization_required
def post(self):
current_user, _ = current_account_with_tenant()
parser = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("language", type=str, required=False, location="json")
.add_argument("phase", type=str, required=False, location="json")
.add_argument("token", type=str, required=False, location="json")
)
args = parser.parse_args()
payload = console_ns.payload or {}
args = ChangeEmailSendPayload.model_validate(payload)
ip_address = extract_remote_ip(request)
if AccountService.is_email_send_ip_limit(ip_address):
raise EmailSendIpLimitError()
if args["language"] is not None and args["language"] == "zh-Hans":
if args.language is not None and args.language == "zh-Hans":
language = "zh-Hans"
else:
language = "en-US"
account = None
user_email = args["email"]
if args["phase"] is not None and args["phase"] == "new_email":
if args["token"] is None:
user_email = args.email
if args.phase is not None and args.phase == "new_email":
if args.token is None:
raise InvalidTokenError()
reset_data = AccountService.get_change_email_data(args["token"])
reset_data = AccountService.get_change_email_data(args.token)
if reset_data is None:
raise InvalidTokenError()
user_email = reset_data.get("email", "")
@ -472,96 +611,89 @@ class ChangeEmailSendEmailApi(Resource):
raise InvalidEmailError()
else:
with Session(db.engine) as session:
account = session.execute(select(Account).filter_by(email=args["email"])).scalar_one_or_none()
account = session.execute(select(Account).filter_by(email=args.email)).scalar_one_or_none()
if account is None:
raise AccountNotFound()
token = AccountService.send_change_email_email(
account=account, email=args["email"], old_email=user_email, language=language, phase=args["phase"]
account=account, email=args.email, old_email=user_email, language=language, phase=args.phase
)
return {"result": "success", "data": token}
@console_ns.route("/account/change-email/validity")
class ChangeEmailCheckApi(Resource):
@console_ns.expect(console_ns.models[ChangeEmailValidityPayload.__name__])
@enable_change_email
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("email", type=email, required=True, location="json")
.add_argument("code", type=str, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
payload = console_ns.payload or {}
args = ChangeEmailValidityPayload.model_validate(payload)
user_email = args["email"]
user_email = args.email
is_change_email_error_rate_limit = AccountService.is_change_email_error_rate_limit(args["email"])
is_change_email_error_rate_limit = AccountService.is_change_email_error_rate_limit(args.email)
if is_change_email_error_rate_limit:
raise EmailChangeLimitError()
token_data = AccountService.get_change_email_data(args["token"])
token_data = AccountService.get_change_email_data(args.token)
if token_data is None:
raise InvalidTokenError()
if user_email != token_data.get("email"):
raise InvalidEmailError()
if args["code"] != token_data.get("code"):
AccountService.add_change_email_error_rate_limit(args["email"])
if args.code != token_data.get("code"):
AccountService.add_change_email_error_rate_limit(args.email)
raise EmailCodeError()
# Verified, revoke the first token
AccountService.revoke_change_email_token(args["token"])
AccountService.revoke_change_email_token(args.token)
# Refresh token data by generating a new token
_, new_token = AccountService.generate_change_email_token(
user_email, code=args["code"], old_email=token_data.get("old_email"), additional_data={}
user_email, code=args.code, old_email=token_data.get("old_email"), additional_data={}
)
AccountService.reset_change_email_error_rate_limit(args["email"])
AccountService.reset_change_email_error_rate_limit(args.email)
return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
@console_ns.route("/account/change-email/reset")
class ChangeEmailResetApi(Resource):
@console_ns.expect(console_ns.models[ChangeEmailResetPayload.__name__])
@enable_change_email
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
parser = (
reqparse.RequestParser()
.add_argument("new_email", type=email, required=True, location="json")
.add_argument("token", type=str, required=True, nullable=False, location="json")
)
args = parser.parse_args()
payload = console_ns.payload or {}
args = ChangeEmailResetPayload.model_validate(payload)
if AccountService.is_account_in_freeze(args["new_email"]):
if AccountService.is_account_in_freeze(args.new_email):
raise AccountInFreezeError()
if not AccountService.check_email_unique(args["new_email"]):
if not AccountService.check_email_unique(args.new_email):
raise EmailAlreadyInUseError()
reset_data = AccountService.get_change_email_data(args["token"])
reset_data = AccountService.get_change_email_data(args.token)
if not reset_data:
raise InvalidTokenError()
AccountService.revoke_change_email_token(args["token"])
AccountService.revoke_change_email_token(args.token)
old_email = reset_data.get("old_email", "")
current_user, _ = current_account_with_tenant()
if current_user.email != old_email:
raise AccountNotFound()
updated_account = AccountService.update_account_email(current_user, email=args["new_email"])
updated_account = AccountService.update_account_email(current_user, email=args.new_email)
AccountService.send_change_email_completed_notify_email(
email=args["new_email"],
email=args.new_email,
)
return updated_account
@ -569,12 +701,13 @@ class ChangeEmailResetApi(Resource):
@console_ns.route("/account/change-email/check-email-unique")
class CheckEmailUnique(Resource):
@console_ns.expect(console_ns.models[CheckEmailUniquePayload.__name__])
@setup_required
def post(self):
parser = reqparse.RequestParser().add_argument("email", type=email, required=True, location="json")
args = parser.parse_args()
if AccountService.is_account_in_freeze(args["email"]):
payload = console_ns.payload or {}
args = CheckEmailUniquePayload.model_validate(payload)
if AccountService.is_account_in_freeze(args.email):
raise AccountInFreezeError()
if not AccountService.check_email_unique(args["email"]):
if not AccountService.check_email_unique(args.email):
raise EmailAlreadyInUseError()
return {"result": "success"}

View File

@ -1,6 +1,6 @@
from flask_restx import Resource, fields
from controllers.console import api, console_ns
from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from core.model_runtime.utils.encoders import jsonable_encoder
from libs.login import current_account_with_tenant, login_required
@ -9,9 +9,9 @@ from services.agent_service import AgentService
@console_ns.route("/workspaces/current/agent-providers")
class AgentProviderListApi(Resource):
@api.doc("list_agent_providers")
@api.doc(description="Get list of available agent providers")
@api.response(
@console_ns.doc("list_agent_providers")
@console_ns.doc(description="Get list of available agent providers")
@console_ns.response(
200,
"Success",
fields.List(fields.Raw(description="Agent provider information")),
@ -31,10 +31,10 @@ class AgentProviderListApi(Resource):
@console_ns.route("/workspaces/current/agent-provider/<path:provider_name>")
class AgentProviderApi(Resource):
@api.doc("get_agent_provider")
@api.doc(description="Get specific agent provider details")
@api.doc(params={"provider_name": "Agent provider name"})
@api.response(
@console_ns.doc("get_agent_provider")
@console_ns.doc(description="Get specific agent provider details")
@console_ns.doc(params={"provider_name": "Agent provider name"})
@console_ns.response(
200,
"Success",
fields.Raw(description="Agent provider details"),

View File

@ -1,8 +1,7 @@
from flask_restx import Resource, fields, reqparse
from werkzeug.exceptions import Forbidden
from controllers.console import api, console_ns
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.console import console_ns
from controllers.console.wraps import account_initialization_required, is_admin_or_owner_required, setup_required
from core.model_runtime.utils.encoders import jsonable_encoder
from core.plugin.impl.exc import PluginPermissionDeniedError
from libs.login import current_account_with_tenant, login_required
@ -11,10 +10,10 @@ from services.plugin.endpoint_service import EndpointService
@console_ns.route("/workspaces/current/endpoints/create")
class EndpointCreateApi(Resource):
@api.doc("create_endpoint")
@api.doc(description="Create a new plugin endpoint")
@api.expect(
api.model(
@console_ns.doc("create_endpoint")
@console_ns.doc(description="Create a new plugin endpoint")
@console_ns.expect(
console_ns.model(
"EndpointCreateRequest",
{
"plugin_unique_identifier": fields.String(required=True, description="Plugin unique identifier"),
@ -23,19 +22,18 @@ class EndpointCreateApi(Resource):
},
)
)
@api.response(
@console_ns.response(
200,
"Endpoint created successfully",
api.model("EndpointCreateResponse", {"success": fields.Boolean(description="Operation success")}),
console_ns.model("EndpointCreateResponse", {"success": fields.Boolean(description="Operation success")}),
)
@api.response(403, "Admin privileges required")
@console_ns.response(403, "Admin privileges required")
@setup_required
@login_required
@is_admin_or_owner_required
@account_initialization_required
def post(self):
user, tenant_id = current_account_with_tenant()
if not user.is_admin_or_owner:
raise Forbidden()
parser = (
reqparse.RequestParser()
@ -65,17 +63,19 @@ class EndpointCreateApi(Resource):
@console_ns.route("/workspaces/current/endpoints/list")
class EndpointListApi(Resource):
@api.doc("list_endpoints")
@api.doc(description="List plugin endpoints with pagination")
@api.expect(
api.parser()
@console_ns.doc("list_endpoints")
@console_ns.doc(description="List plugin endpoints with pagination")
@console_ns.expect(
console_ns.parser()
.add_argument("page", type=int, required=True, location="args", help="Page number")
.add_argument("page_size", type=int, required=True, location="args", help="Page size")
)
@api.response(
@console_ns.response(
200,
"Success",
api.model("EndpointListResponse", {"endpoints": fields.List(fields.Raw(description="Endpoint information"))}),
console_ns.model(
"EndpointListResponse", {"endpoints": fields.List(fields.Raw(description="Endpoint information"))}
),
)
@setup_required
@login_required
@ -107,18 +107,18 @@ class EndpointListApi(Resource):
@console_ns.route("/workspaces/current/endpoints/list/plugin")
class EndpointListForSinglePluginApi(Resource):
@api.doc("list_plugin_endpoints")
@api.doc(description="List endpoints for a specific plugin")
@api.expect(
api.parser()
@console_ns.doc("list_plugin_endpoints")
@console_ns.doc(description="List endpoints for a specific plugin")
@console_ns.expect(
console_ns.parser()
.add_argument("page", type=int, required=True, location="args", help="Page number")
.add_argument("page_size", type=int, required=True, location="args", help="Page size")
.add_argument("plugin_id", type=str, required=True, location="args", help="Plugin ID")
)
@api.response(
@console_ns.response(
200,
"Success",
api.model(
console_ns.model(
"PluginEndpointListResponse", {"endpoints": fields.List(fields.Raw(description="Endpoint information"))}
),
)
@ -155,19 +155,22 @@ class EndpointListForSinglePluginApi(Resource):
@console_ns.route("/workspaces/current/endpoints/delete")
class EndpointDeleteApi(Resource):
@api.doc("delete_endpoint")
@api.doc(description="Delete a plugin endpoint")
@api.expect(
api.model("EndpointDeleteRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")})
@console_ns.doc("delete_endpoint")
@console_ns.doc(description="Delete a plugin endpoint")
@console_ns.expect(
console_ns.model(
"EndpointDeleteRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")}
)
)
@api.response(
@console_ns.response(
200,
"Endpoint deleted successfully",
api.model("EndpointDeleteResponse", {"success": fields.Boolean(description="Operation success")}),
console_ns.model("EndpointDeleteResponse", {"success": fields.Boolean(description="Operation success")}),
)
@api.response(403, "Admin privileges required")
@console_ns.response(403, "Admin privileges required")
@setup_required
@login_required
@is_admin_or_owner_required
@account_initialization_required
def post(self):
user, tenant_id = current_account_with_tenant()
@ -175,9 +178,6 @@ class EndpointDeleteApi(Resource):
parser = reqparse.RequestParser().add_argument("endpoint_id", type=str, required=True)
args = parser.parse_args()
if not user.is_admin_or_owner:
raise Forbidden()
endpoint_id = args["endpoint_id"]
return {
@ -187,10 +187,10 @@ class EndpointDeleteApi(Resource):
@console_ns.route("/workspaces/current/endpoints/update")
class EndpointUpdateApi(Resource):
@api.doc("update_endpoint")
@api.doc(description="Update a plugin endpoint")
@api.expect(
api.model(
@console_ns.doc("update_endpoint")
@console_ns.doc(description="Update a plugin endpoint")
@console_ns.expect(
console_ns.model(
"EndpointUpdateRequest",
{
"endpoint_id": fields.String(required=True, description="Endpoint ID"),
@ -199,14 +199,15 @@ class EndpointUpdateApi(Resource):
},
)
)
@api.response(
@console_ns.response(
200,
"Endpoint updated successfully",
api.model("EndpointUpdateResponse", {"success": fields.Boolean(description="Operation success")}),
console_ns.model("EndpointUpdateResponse", {"success": fields.Boolean(description="Operation success")}),
)
@api.response(403, "Admin privileges required")
@console_ns.response(403, "Admin privileges required")
@setup_required
@login_required
@is_admin_or_owner_required
@account_initialization_required
def post(self):
user, tenant_id = current_account_with_tenant()
@ -223,9 +224,6 @@ class EndpointUpdateApi(Resource):
settings = args["settings"]
name = args["name"]
if not user.is_admin_or_owner:
raise Forbidden()
return {
"success": EndpointService.update_endpoint(
tenant_id=tenant_id,
@ -239,19 +237,22 @@ class EndpointUpdateApi(Resource):
@console_ns.route("/workspaces/current/endpoints/enable")
class EndpointEnableApi(Resource):
@api.doc("enable_endpoint")
@api.doc(description="Enable a plugin endpoint")
@api.expect(
api.model("EndpointEnableRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")})
@console_ns.doc("enable_endpoint")
@console_ns.doc(description="Enable a plugin endpoint")
@console_ns.expect(
console_ns.model(
"EndpointEnableRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")}
)
)
@api.response(
@console_ns.response(
200,
"Endpoint enabled successfully",
api.model("EndpointEnableResponse", {"success": fields.Boolean(description="Operation success")}),
console_ns.model("EndpointEnableResponse", {"success": fields.Boolean(description="Operation success")}),
)
@api.response(403, "Admin privileges required")
@console_ns.response(403, "Admin privileges required")
@setup_required
@login_required
@is_admin_or_owner_required
@account_initialization_required
def post(self):
user, tenant_id = current_account_with_tenant()
@ -261,9 +262,6 @@ class EndpointEnableApi(Resource):
endpoint_id = args["endpoint_id"]
if not user.is_admin_or_owner:
raise Forbidden()
return {
"success": EndpointService.enable_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id)
}
@ -271,19 +269,22 @@ class EndpointEnableApi(Resource):
@console_ns.route("/workspaces/current/endpoints/disable")
class EndpointDisableApi(Resource):
@api.doc("disable_endpoint")
@api.doc(description="Disable a plugin endpoint")
@api.expect(
api.model("EndpointDisableRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")})
@console_ns.doc("disable_endpoint")
@console_ns.doc(description="Disable a plugin endpoint")
@console_ns.expect(
console_ns.model(
"EndpointDisableRequest", {"endpoint_id": fields.String(required=True, description="Endpoint ID")}
)
)
@api.response(
@console_ns.response(
200,
"Endpoint disabled successfully",
api.model("EndpointDisableResponse", {"success": fields.Boolean(description="Operation success")}),
console_ns.model("EndpointDisableResponse", {"success": fields.Boolean(description="Operation success")}),
)
@api.response(403, "Admin privileges required")
@console_ns.response(403, "Admin privileges required")
@setup_required
@login_required
@is_admin_or_owner_required
@account_initialization_required
def post(self):
user, tenant_id = current_account_with_tenant()
@ -293,9 +294,6 @@ class EndpointDisableApi(Resource):
endpoint_id = args["endpoint_id"]
if not user.is_admin_or_owner:
raise Forbidden()
return {
"success": EndpointService.disable_endpoint(tenant_id=tenant_id, user_id=user.id, endpoint_id=endpoint_id)
}

Some files were not shown because too many files have changed in this diff Show More