Compare commits

..

803 Commits

Author SHA1 Message Date
66d67185a0 fix: array number validation in conversation variable 2025-08-19 14:53:30 +08:00
5f0b52c017 Fix number input in tool configure form of agent node tool item (#24154) 2025-08-19 14:26:09 +08:00
c2606f9062 fix: correct behaviour of code fix (#24152)
Co-authored-by: Joel <iamjoel007@gmail.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-19 14:18:49 +08:00
70da81d0e5 try ast-grep (#24149) 2025-08-19 13:41:52 +08:00
75199442c1 feat: Implements periodic deletion of workflow run logs that exceed t… (#23881)
Co-authored-by: shiyun.li973792 <shiyun.li@seres.cn>
Co-authored-by: 1wangshu <suewangswu@gmail.com>
Co-authored-by: Blackoutta <hyytez@gmail.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-19 09:47:34 +08:00
60cc82aff1 feat: add testcontainers based tests for feature service (#24026) 2025-08-19 09:32:47 +08:00
ebd2c8236d an example of suppress (#24136) 2025-08-19 00:21:26 +08:00
a2537ba4fd chore: translate i18n files (#24131)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-18 23:46:23 +08:00
a3a041ef6f feat: add delete avatar functionality with confirmation modal (#24127)
Co-authored-by: crazywoola <427733928@qq.com>
2025-08-18 21:35:20 +08:00
c0702aacac Use typing.Literal to replace str places (#24099)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-18 21:34:13 +08:00
670d479e32 Bump pyobvector to 0.2.15 (#24120) 2025-08-18 17:36:27 +08:00
ae7de7d36b fix: treat default template of code as empty (#24106)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-18 16:52:27 +08:00
ef5decc98a Chore: remove some dead code in experience-enhance-group (#24110)
Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
2025-08-18 16:51:43 +08:00
4445460eca fix: validate checklist before publishing workflow (#24104) 2025-08-18 16:46:22 +08:00
8288b1dcab Revert "fix pg_vector extension requires SUPERUSER, but not availabl… (#24108) 2025-08-18 16:46:15 +08:00
16d1289a0a fix pg_vector extension requires SUPERUSER, but not available on Huawei Cloud RDS (#24093)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-18 16:29:36 +08:00
ba775a1c90 chore: translate i18n files (#24102)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-18 16:18:45 +08:00
b0e58f9da7 Feature/improve goto anything commands (#24091) 2025-08-18 16:07:54 +08:00
531e784a92 feat: no longer enable auto upgrade when marketplace is disabled (#24… (#24101) 2025-08-18 15:57:33 +08:00
5e8fe30035 fix(ui): Optimize UI component styles and layouts (#24090) (#24092) 2025-08-18 15:56:10 +08:00
f5033c5a0e fix: no current code caused code generation show error (#24086) 2025-08-18 14:18:08 +08:00
26d7654851 chore: translate i18n files (#24081)
Co-authored-by: Stream29 <36751053+Stream29@users.noreply.github.com>
2025-08-18 12:45:17 +08:00
790a6ec203 fix: return empty list instead of raising exception for qdrant search when score_threshold is 1 (#24032) 2025-08-18 12:44:05 +08:00
de9c5f10b3 feat: enchance prompt and code (#23633)
Co-authored-by: stream <stream@dify.ai>
Co-authored-by: Stream <1542763342@qq.com>
Co-authored-by: Stream <Stream_2@qq.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-18 12:29:12 +08:00
a7fe0e3f87 fix(oauth): redis compatibility (#23959) 2025-08-18 11:14:08 +08:00
218e247fd2 refactor: improve loading animation and debug panel styles (#24075) 2025-08-18 11:12:47 +08:00
6b51530e21 fix: update first_id logic to use the oldest answer item in chat messages (#23992)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-18 11:04:18 +08:00
9755564a05 Fix sticky table header transparency with backdrop-filter blur in dark mode (#23999) 2025-08-18 11:03:15 +08:00
052d0e059e feat: add Redis SSL/TLS certificate authentication support (#23624) 2025-08-18 10:59:07 +08:00
fa4d3bba86 feat: add CLAUDE.md for LLM-assisted development guidance (#23946) 2025-08-18 10:13:44 +08:00
f8fc9f8c79 feat: add select input support to the conversation opener (#24043) 2025-08-18 10:01:29 +08:00
80f0594f4b feat: add testcontainers based tests for model loadbalancing service (#24066) 2025-08-18 09:54:22 +08:00
97b24f48d5 feat: add testcontainers based tests for metadata service (#24048) 2025-08-18 09:43:20 +08:00
b475a6b257 chore: synchronize translations (#24044) 2025-08-18 09:29:52 +08:00
ff52a54fef Restore useLabelStore mistakenly removed in commit 403e2d58 (#24052)
Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-18 09:22:59 +08:00
c69634085d Revert "Fix: Correct file variable handling for custom tools in workflow (#24061) 2025-08-17 23:14:37 +08:00
0a9af45194 no used function for message_queue. (#24027)
Signed-off-by: zhanluxianshen <zhanluxianshen@163.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-17 09:33:22 +08:00
d92ddc4dd4 chore(i18n): correct japanese translation (#24041) 2025-08-17 09:32:57 +08:00
32fa817eaa Update mypy.ini (#24014)
Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-16 19:29:19 +08:00
af10b3c5fa Fix: add 'api_key' alias for backward compatibility (#24022)
Signed-off-by: Yongtao Huang <yongtaoh@gmail.com>
Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
2025-08-16 19:28:31 +08:00
8b601a983c Fix missing user_id in trace_manager (#24024) 2025-08-16 11:08:30 +08:00
4b9812ce6a fix: move database service call inside session context in workflow draft variable API (#23996) 2025-08-15 18:23:42 +08:00
c9e18346ce Chore: remove empty files and unused code (#23990)
Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
2025-08-15 16:28:41 +08:00
462ba8f416 chore: add configurable stdio buffer sizes for plugins in compose file (#23980) 2025-08-15 15:25:11 +08:00
658157e9a1 chore: improved type annotations in MCP-related codes (#23984) 2025-08-15 15:19:30 +08:00
4031a46572 doc: add deployment pattern using Amazon ECS and CDK (#23985) 2025-08-15 15:18:53 +08:00
821fe26b56 fix comparison with callable (#23978) 2025-08-15 15:03:00 +08:00
352776ba77 update: GitHub star fallback count to current value (#23957) 2025-08-15 11:25:50 +08:00
f560116fb2 fix: 504 Gateway Time-out error on /console/api/version endpoint (#23961) 2025-08-15 11:25:25 +08:00
e7a5268fdd Fix hover button contrast in dark mode for app and dataset cards (#23955) 2025-08-15 10:38:03 +08:00
aa71173dbb Feat: External_trace_id compatible with OpenTelemetry (#23918)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-15 09:13:41 +08:00
11fdcb18c6 feature: add test for tool engine serialization (#23951) 2025-08-15 09:12:29 +08:00
62c34c4bc2 refactor: unify pnpm version management with packageManager field (#23943) 2025-08-15 09:01:18 +08:00
01f2333381 chore: remove redundant .env.example from root directory (#23948) 2025-08-15 08:59:49 +08:00
8d47213529 fix(workflow/if-else): keep conditions in sync on variable rename (#23611)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: crazywoola <427733928@qq.com>
2025-08-14 19:55:18 +08:00
f40e2cf98a Fix: remove redundant allowed_keys check in jsonable_encoder (#23931)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-14 19:52:07 +08:00
05e071bf2f fix: resolve user profile dropdown cache sync issue across layouts (#23937) 2025-08-14 19:51:28 +08:00
e340fccafb feat: integrate flask-orjson for improved JSON serialization performance (#23935) 2025-08-14 19:50:59 +08:00
4a2e6af9b5 Fixes #23921 (#23924)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-14 15:54:25 +08:00
acba135de1 Revert "feat: support to upload files for visual model call when running LLM node for debugging in a single step" (#23922) 2025-08-14 15:52:19 +08:00
2476511368 Fix: replace deprecated String.prototype.substr with slice (#23915)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
2025-08-14 13:47:23 +08:00
c39dfad7b6 fix: mime_type could be None (#23880)
Co-authored-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-14 13:40:06 +08:00
9e29309ffd fix: ensure custom headers are ignored when using bearer or basic authorization (#23584)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-14 10:18:12 +08:00
e0f0813b7c Add Test Containers Based Tests for App Generation Services (#23899) 2025-08-14 10:16:41 +08:00
caf50ea01e fix: resolve text clipping issues in overview chart components (#23907) 2025-08-14 10:16:20 +08:00
d4756ba659 Fix multipart/form-data boundary issue in HTTP Call node (#23903)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-14 10:01:27 +08:00
cc4d82f932 fix(web): resolve TypeScript errors in app overview components (#23901) 2025-08-14 09:22:27 +08:00
7286b4ad06 fix(api): resolve "Message not exists" error in admin feedback creation (#23232)
Fix regression introduced in PR #22580 where admin users encountered 
"Message not exists" errors when creating feedback on messages created 
by other users.

The issue was caused by `MessageService.create_feedback()` incorrectly 
filtering messages by the current user's ID, preventing admins from 
accessing messages created by end users. 

Reverts: #22580
2025-08-13 23:57:25 +08:00
02194db0c6 Fix: narrow beforeRequest hook type to avoid boolean in array (#23860)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
2025-08-13 23:28:13 +08:00
b6bd145130 Remove redundant acceptedKeys check (#23891)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-08-13 23:24:23 +08:00
5dcbc9861b fix: add dark mode support for embedded modal option icons (#23893) 2025-08-13 23:20:33 +08:00
3ac4e122eb Update use-document-title.ts to fix favicon.ico path (#23872) 2025-08-13 08:08:32 -07:00
e336a8666a fix translation (#23873) 2025-08-13 17:17:16 +08:00
a36fdf6a7d chore(cmdk): Resolve default option selection issue in GotoAnything component (#23878) (#23813) 2025-08-13 17:14:21 +08:00
bf2f03f911 Restructure the File errors in controller (#23801)
Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-13 17:06:07 +08:00
5aaa47e25d fix: messages updated_at (#23869) 2025-08-13 15:34:52 +08:00
f884886ef4 style: update dark and light theme colors and add new color variables (#23865) 2025-08-13 14:50:41 +08:00
4da6ec787e feat: support to upload files for visual model call when running LLM node for debugging in a single step (#23521)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-13 14:07:13 +08:00
2da00bb4ec Fix misleading Studio button in account header (#23842) 2025-08-13 13:56:06 +08:00
e0fe0e1a3e fix: goto-anything command filter should only match shortcut (#23862) 2025-08-13 13:55:25 +08:00
e11a334c9b fix: SimpleSelect chevron icon state sync and add notClearable to monitoring selector (#23858) 2025-08-13 13:55:10 +08:00
21e1b825fe fix: optimize dataset cleanup task (#23828)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-08-13 11:22:03 +08:00
1cf7c3430a Add more comprehensive Test Containers Based Tests for advanced prompt service (#23850) 2025-08-13 11:21:32 +08:00
ad2bc7f8ac fix: update modal component to use relative positioning (#23855) 2025-08-13 11:20:40 +08:00
e600070a61 feat(api): auto-delete WorkflowDraftVariable when app is deleted (#23737)
This commit introduces a background task that automatically deletes `WorkflowDraftVariable` records when
their associated workflow apps are deleted.

Additionally, it adds a new cleanup script
`cleanup-orphaned-draft-variables` to remove existing orphaned draft variables from the database.
2025-08-13 11:13:08 +08:00
854c1aa37d fix: goto-anything highlighting consistency improvements (#23843) 2025-08-13 10:15:45 +08:00
74ab057f56 refactor: improve Redis wrapper type hints and fix None value handling (#23845) 2025-08-13 09:46:02 +08:00
ccc6d5975f chore: rename misleading 'chore.yaml' issue template to 'refactor.yml' (#23847) 2025-08-13 09:28:42 +08:00
b3399642c5 feat: Add an asynchronous repository to improve workflow performance (#20050)
Co-authored-by: liangxin <liangxin@shein.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: liangxin <xinlmain@gmail.com>
2025-08-13 02:28:06 +08:00
6e6389c930 chore: translate i18n files (#23841)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-12 23:56:50 +08:00
973a390298 Feature/run cmd (#23822) 2025-08-12 23:47:50 +08:00
a77dfb69b0 chore: update uv to 0.8.9 (#23833) 2025-08-12 23:41:39 +08:00
9769318875 Fix missing import in app.ts (#23831) 2025-08-12 23:27:31 +08:00
c7f36d1a5a chore: goto anything mouse keyboard interaction (#23805) 2025-08-12 18:17:19 +08:00
cb46726fa4 Add Test Containers Based Tests for File Service (#23771) 2025-08-12 18:16:07 +08:00
7820e31a92 fix: add missing translation keys for goto anything command selector (#23815) 2025-08-12 18:14:57 +08:00
de0dae9d9b Fix node search (#23795) 2025-08-12 14:48:35 +08:00
a09935d9b9 chore: restore @mdx-js dependencies in package.json and pnpm-lock.yaml (#23792) 2025-08-12 14:45:56 +08:00
a62371940f fix: remove misleading clear buttons and improve SimpleSelect UX (#23791) 2025-08-12 13:29:51 +08:00
02f7677d92 chore: translate i18n files (#23789)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-12 13:29:33 +08:00
1ffe190557 Feat/improved mcp timeout configs (#23605)
Co-authored-by: crazywoola <427733928@qq.com>
2025-08-12 13:14:00 +08:00
d3eff9b1a3 fix: prevent X button flying to screen corners in dataset settings modal (#23788) 2025-08-12 12:03:04 +08:00
66232792a2 fix: add MAX_TREE_DEPTH in env.service.web (#23785)
Co-authored-by: crazywoola <427733928@qq.com>
2025-08-12 12:01:51 +08:00
dc2aaae414 feat: add highPriority option to Modal for goto-anything layering (#23783) 2025-08-12 10:37:11 +08:00
c0bb2ec851 feat: If combining text and files, place the text prompt after the fi… (#23779)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-08-12 10:36:55 +08:00
7566d90dfe fix issue #23758 (#23764)
Co-authored-by: root <root@thinkpad-pc.localdomain>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-12 10:26:13 +08:00
b38f195a0d test: add comprehensive test suite for rate limiting module (#23765) 2025-08-12 10:05:30 +08:00
a6c5b7414d Fix: expose MAX_TREE_DEPTH in env (#23743)
Co-authored-by: crazywoola <427733928@qq.com>
2025-08-12 09:56:31 +08:00
cdee5aab3a fix: update integration tests to use 2-element variable selectors (#23766) 2025-08-12 09:33:09 +08:00
4240e2dd29 fix(api): fix flaky tests by generating unique variable names (#23768) 2025-08-12 09:31:15 +08:00
e298eee822 feat: add select-none class to tag filter components to prevent text selection (#23774) 2025-08-12 09:23:59 +08:00
bc1cfd4373 hotfix: fix translation (#23757) 2025-08-11 22:36:39 +08:00
2944a4fd43 feat: add filtering support for @ command selector in goto-anything (#23763) 2025-08-11 22:21:37 +08:00
a44ca29717 Chore: remove unused var in ModelProviderFactory (#23690) 2025-08-11 22:04:11 +08:00
332e8e68ee refactor: Change _queue_manager to public attribute queue_manager in task pipelines (#23747) 2025-08-11 18:18:07 +08:00
577062b93a refactor: simplify variable pool key structure and improve type safety (#23732)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-08-11 18:10:04 +08:00
223c1a8089 test(api): fix flaky tests in TestWorkflowDraftVariableService (#23749)
Fix flaky test
`TestWorkflowDraftVariableService.test_list_variables_without_values_success`
caused by low entropy in test data generation that led to
duplicate values violating unique constraints.

Also improve data generation in other tests within
`TestWorkflowDraftVariableService` to reduce the likelihood of
generating duplicate test data.
2025-08-11 17:39:58 +08:00
0baccb9e82 chore(version): bump version to 1.7.2 (#23740) 2025-08-11 17:12:44 +08:00
2c81db5a1c feat: enhance GotoAnything UX with @ command selector (#23738) 2025-08-11 15:47:19 +08:00
43411d7a9e chore: remove debug log statements from DifyAPIRepositoryFactory (#23734) 2025-08-11 15:39:20 +08:00
2dbf20a3e9 fix: resolve circular import in AppGenerateEntity (#23731) 2025-08-11 15:38:28 +08:00
aaf9fc1562 fix: add @property decorator to pydantic computed_field for compatibility (#23728) 2025-08-11 15:34:19 +08:00
d30f898274 fix: model selector language undefined error (#23723) 2025-08-11 14:39:22 +08:00
4a72fa6268 fix: Enhance doc_form null checking, exception handling, and rollback logic (#23713) 2025-08-11 13:53:40 +08:00
0c5e66bccb fix: unified error handling for GotoAnything search actions (#23715) 2025-08-11 11:57:06 +08:00
ff791efe18 fix: Optimize the event handling for inserting variable shortcuts, resolving incorrect blur issues (#22981) (#23707) 2025-08-11 11:08:12 +08:00
6083b1d618 Feat add testcontainers test for message service (#23703)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-11 10:49:32 +08:00
69c3439c3a chore: translate i18n files (#23704)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-11 10:35:29 +08:00
7ee170f0a7 Feat node search (#23685)
Co-authored-by: GuanMu <ballmanjq@gmail.com>
Co-authored-by: zhujiruo <zhujiruo@foxmail.com>
Co-authored-by: Matri Qi <matrixdom@126.com>
Co-authored-by: croatialu <wuli.croatia@foxmail.com>
Co-authored-by: HyaCinth <88471803+HyaCiovo@users.noreply.github.com>
Co-authored-by: lyzno1 <92089059+lyzno1@users.noreply.github.com>
2025-08-11 10:19:52 +08:00
36b221b170 Fix model_setting_map key mismatch (#23699)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-08-11 09:33:26 +08:00
d1fc98200c fix: update HTTP request timeout fields to use Field with default values (#23694) 2025-08-11 09:30:16 +08:00
bb852ef6d2 fix: improve dark mode UI consistency in signin page (#23684) 2025-08-10 17:21:05 +08:00
a17b7b3d89 fix: correct File Preview API position in Japanese advanced chat template (#23645) 2025-08-10 11:17:38 +08:00
dc65a72d93 chore: translate i18n files (#23679)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-10 11:17:14 +08:00
ea502d36a9 lint: fix sonarjs/no-dead-store issues (#23653) 2025-08-10 11:16:43 +08:00
79a3c1618f fix: support custom file types in workflow Start node (#23672)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-10 11:09:47 +08:00
0be3b4e7a6 fix: Add internationalization support for date input formatting Fixes #23266 (#23678) 2025-08-10 11:05:55 +08:00
5f8967918e Feat add testcontainers test for app dsl service (#23675) 2025-08-10 11:03:46 +08:00
6900b08134 fix: sync missing conversation variables for existing conversations (#23649) 2025-08-09 22:42:18 +08:00
dc641348f6 fix: resolve datasets container rounded corners disappearing during scroll (#23667) 2025-08-09 22:41:42 +08:00
431e0105de Fix bare raise in if blocks (#23671)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-08-09 22:40:55 +08:00
cbe0d9d053 fix: conversation pinned filter returns incorrect results when no conversations are pinned (#23670) 2025-08-09 22:40:28 +08:00
f9abcfd789 fix: change i18n workflow trigger mechanism to run in main repository (#23662) 2025-08-09 16:31:24 +08:00
5a0a2b7e44 Allow to export full screen image of workflow (#23655) 2025-08-09 15:31:32 +08:00
41345199d8 Feat add testcontainers test for api base extendsion service (#23652) 2025-08-09 12:57:24 +08:00
8362365eae Fix file type misclassification in logs interface (#23641) 2025-08-08 22:58:52 +08:00
14e1c16cf2 Fix ClickZetta stability and reduce logging noise (#23632)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-08 22:57:47 +08:00
b32b7712e2 fix: i18n options too long breaks plugin setting ui (#23640) 2025-08-08 18:34:12 +08:00
5cf55fcbab feat: Enhance the alignment logic of the nodes in the context menu (#23617) 2025-08-08 15:20:24 +08:00
57c4fc6bf8 Fix X button animation glitches in secret key modals (#23614) 2025-08-08 14:47:15 +08:00
92fcf0854b fix: eliminate AppInfo sidebar animation glitches and layout jumps (#23609) 2025-08-08 12:55:52 +08:00
f73ec60311 Bug Fix: Fix workflow knowledge retrieval cache bug (#23597)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-08 11:42:20 +08:00
1c60b7f070 Revert "feat: improved MCP timeout" (#23602) 2025-08-08 11:20:53 +08:00
znn
084dcd1a50 using message_file event instead of message event (#22988) 2025-08-08 09:50:20 +08:00
fd536a943a word extractor cleans. (#20926)
Signed-off-by: zhanluxianshen <zhanluxianshen@163.com>
2025-08-08 09:37:51 +08:00
6f80fb72cb feat: tools json response support datetime uuid etc (#22738)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-08-08 09:26:50 +08:00
cb5e2ad9b2 lint: fix tailwind lint issues (#23367)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-08 09:25:41 +08:00
62772e8871 fix: ensure vector database cleanup on dataset deletion regardless of document presence (affects all 33 vector databases) (#23574)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-08 09:18:43 +08:00
4b0480c8b3 feat: improved MCP timeout (#23546) 2025-08-08 09:08:14 +08:00
c8c591d73c Fix incorrect exception handling in db query (#23582)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-08-08 09:07:59 +08:00
2edd32fdea fix: resolve AppCard description overlap with tag area (#23585)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-08 09:05:55 +08:00
5889059ce4 Feat add testcontainers test for annnotation service (#23593) 2025-08-08 09:03:50 +08:00
7230497bf4 fix: empty arrays should convert to empty string in LLM prompts (#23590) 2025-08-08 08:50:37 +08:00
d98071a088 feat: add Service API file preview endpoint (#23534)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-07 21:44:29 +08:00
ac02c12e49 fix: resolve Windows path separator issue in Huawei OBS operations (#23475)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-07 21:17:46 +08:00
11d29e8d3e fix: update invite settings page style in dark mode (#23571) 2025-08-07 21:01:01 +08:00
b44ecf9bf7 fix: hide opensource license link when custom branding is enabled (#23569) 2025-08-07 05:37:05 -07:00
e60f148824 minor fix translation (#23568) 2025-08-07 05:36:52 -07:00
e9045a8838 Fix: Apply Metadata Filters Correctly in Full-Text Search Mode for Tencent Cloud Vector Database (#23564) 2025-08-07 05:36:06 -07:00
55487ba0c6 fix: exclude dev dependencies from production Docker image (#23562) 2025-08-07 05:35:32 -07:00
305ea0a2d5 Fix/footer behavior (#23555) 2025-08-07 16:55:23 +08:00
a5ca76befb Fixes #23536 (#23542) 2025-08-07 14:42:34 +08:00
e01510e2a6 feat: Add Clickzetta Lakehouse vector database integration (#22551)
Co-authored-by: Claude <noreply@anthropic.com>
2025-08-07 14:21:46 +08:00
2931c891a7 chore: Optimize component styles and interactions (#23250) (#23543) 2025-08-07 14:19:38 +08:00
ad1b1193fa fix localtime_to_timestamp tool throws 'no attribute localize error' when it executes without specifying a timezone parameter (#23517) 2025-08-07 11:14:45 +08:00
85f33fb73d chore: add template for required fields (#23533) 2025-08-07 10:55:41 +08:00
f3c98a274b fix: update the guiding text in the upload component (#23509) 2025-08-07 10:06:04 +08:00
f6c7175828 fix: make TagSelector always visible for accessibility and mobile support (#23515) 2025-08-07 09:25:26 +08:00
d253ca192a Feat add testcontainers test for app service (#23523) 2025-08-07 09:13:30 +08:00
e072b7dafa Chore: remove unused variable pruned_memory (#23514) 2025-08-07 09:06:17 +08:00
3ff52f1809 feat: Enhance response validation and parsing in tool.py (#23456)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-07 09:04:51 +08:00
ad61b42494 fix(node): fix unexpected extra equals sign in HTTP params (#23474) 2025-08-07 09:04:09 +08:00
c95761f4e6 fix: resolve i18n workflow permissions and naming issues (#23494) 2025-08-06 15:59:26 +08:00
1f15cba9a0 Enhance API documentation TOC with modern design and improved UX (#23490)
Co-authored-by: crazywoola <427733928@qq.com>
2025-08-06 14:45:06 +08:00
3344aaabb6 minor fix: translation for contentEnableLabel (#23483) 2025-08-06 14:20:32 +08:00
b8ef0c84e6 Fix: moved file = after file presence validation (#23453)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-08-06 13:44:12 +08:00
6b8b31ff64 Remove unnecessary issubclass check (#23455)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-06 13:43:55 +08:00
823872d294 Fix API documentation layout and dark mode contrast issues (#23462) 2025-08-06 11:19:47 +08:00
724ec12bf3 Feat workflow node align (#23451) 2025-08-06 11:01:10 +08:00
8aac402b24 fix: can not find file (#23472) 2025-08-06 10:39:19 +08:00
eb12fd9461 fix: fix wrong unicodechar regx (#23468) 2025-08-06 09:48:25 +08:00
ad622cea9e Feat add testcontainers test for workflow draft variable service (#23466) 2025-08-06 09:47:56 +08:00
2575eaf1d6 Update milvus version to LTS (#23393)
Co-authored-by: crazywoola <427733928@qq.com>
2025-08-05 21:29:38 +08:00
fc5ed9f316 Feat add testcontainers test for account service (#23380) 2025-08-05 20:15:39 +08:00
40a11b6942 feat(api):Enhance the scope of expired data cleanup table in the Dify… (#23414) 2025-08-05 19:57:43 +08:00
84543a591a i18n/sync (#23429) 2025-08-05 19:36:25 +08:00
2cd3fe0dce fix: Multiple UI component improvements and code quality enhancements (#23446) 2025-08-05 19:36:07 +08:00
5eb061466f chore: update tmpl (#23438) 2025-08-05 19:35:30 +08:00
52050d3dff feat(workflow): add support for release/e-* tag in build-push workflow (#23418) 2025-08-05 16:42:07 +08:00
904af20023 Feat/webapp opt (#23283) 2025-08-05 16:07:48 +08:00
4934dbd0e6 feat(workflow): add relations panel to visualize dependencies (#21998)
Co-authored-by: crazywoola <427733928@qq.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-08-05 15:08:23 +08:00
d080bea20b fix: resolve sidebar animation issues and improve app detail page UX (#23407) 2025-08-05 15:06:40 +08:00
607dfc8be7 fix: remove redundant useEffect from TagSelector component (#23406) 2025-08-05 13:15:26 +08:00
3b5130b03d add otel instrument for redis and http request (#23371)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-05 11:14:51 +08:00
75f722a959 lint: fix issue of no-unused-vars (#23375) 2025-08-05 11:12:30 +08:00
b946378b38 fix: installed apps preview language error (#23397) 2025-08-05 11:01:31 +08:00
0cee57acca chore: add Template (#23395) 2025-08-05 10:43:51 +08:00
znn
ab78e12089 enhancing logging (#23332)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-05 10:36:48 +08:00
7fe23a0ca6 remove useless Tool class attributes (#23389) 2025-08-05 10:20:28 +08:00
yyh
d8584dc03a feat: enhance document list navigation and sorting functionality (#23383) 2025-08-05 10:19:47 +08:00
a724f35672 fix: fetchAppWithTags may return empty when apps is over 100 (#23350) 2025-08-04 20:20:43 +08:00
60c7663a80 Feat add testcontainers test (#23269) 2025-08-04 19:27:36 +08:00
8041808b53 fix: diplay all helpfields (#23348)
Signed-off-by: jingfelix <jingfelix@outlook.com>
2025-08-04 14:39:54 +08:00
146d870098 Fix: avoid Flask route conflict by merging DocumentDetailApi and DocumentDeleteApi (#23333)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-04 14:37:36 +08:00
yyh
78d2f49e01 feat(i18n): enhance auto-gen script for template literals and add app-debug translations (#23344) 2025-08-04 13:30:11 +08:00
yyh
54c8bd29ee security: Fix XSS vulnerability in authentication check-code pages (#23295) 2025-08-04 12:48:38 +08:00
406c1952b8 Fix version comparison with imported_version (#23326)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-08-04 10:40:49 +08:00
znn
0ebcee9a6b fixing footer (#22927) 2025-08-04 10:14:57 +08:00
964fa132cb Chore: fix typo, no code change (#23331) 2025-08-03 22:30:28 +08:00
yyh
dedd5f571c fix(ui): temporarily remove TagSelector from app sidebar per design review (#23329) 2025-08-03 22:30:21 +08:00
90373c7165 fix(i18n): correct translation errors across multiple languages (#23328) 2025-08-03 18:31:50 +08:00
znn
d470120a60 retention of data filled on tab switch for tool plugin (#23323) 2025-08-03 18:31:15 +08:00
0c925bd088 feat(ui): unify tag editing in app sidebar and add management entry to TagFilter (#23325) 2025-08-03 18:30:47 +08:00
76d123fe19 Fix segment query tenant bug and variable naming typo (#23321)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-08-03 18:30:09 +08:00
20f0238aab feat: support workflow version specification in workflow and chat APIs (#23188)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-08-03 16:27:12 +08:00
6d5a7684b4 feat: Add Download Button to UI for Knowledge Resource Source Files (#23320) 2025-08-03 16:26:11 +08:00
znn
7831d44099 fixing chat window api recall fix on changing browser tab or window (#23301) 2025-08-03 10:14:17 +08:00
fbf844efd5 Chore: replace deprecated datetime.utcnow() with naive_utc_now() (#23312)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-08-03 10:11:47 +08:00
99a4bd82b5 chore: fix function name typo (#23306) 2025-08-03 10:09:26 +08:00
58608f51da replace db with sa to get typing support (#23240) 2025-08-02 23:54:23 +08:00
ff9fd0cdb2 fix: fix wrong css class (#23299) 2025-08-02 17:36:15 +08:00
aac849d4f4 Fix/variable input validation issue (#23300) 2025-08-02 17:35:51 +08:00
688d07e9c3 fix the error of unable to retrieve url from file (#13603) 2025-08-02 17:15:55 +08:00
f9b3cd1b68 i18n: enhance check-i18n script with precise filtering and multiline support (#23298) 2025-08-02 12:52:12 +08:00
b2c8718f35 Update metadata_service.py (#23272) 2025-08-02 12:15:23 +08:00
46ba0a8781 Update metadata_service.py (#23271) 2025-08-02 12:14:43 +08:00
bc18d4d1b9 Fix: Correct file variable handling for custom tools in workflow (#18427) 2025-08-02 03:36:04 +08:00
a4b14fc992 fix video and audio extension, keep consistent with the web page. (#23287)
Co-authored-by: wangjialei <wangjialei@xiaomi.com>
2025-08-01 22:59:38 +08:00
be914438a5 Fix: incorrect array element validation in SegmentType (#23289) 2025-08-01 22:46:50 +08:00
ec488a4c43 fix: type for ProviderConfig.default (#23282)
Signed-off-by: jingfelix <jingfelix@outlook.com>
2025-08-01 17:37:22 +08:00
f78b903a49 Chore/variable label (#23270) 2025-08-01 15:43:36 +08:00
fd086b06a6 CI: restrict autofix.ci to run only in official repo (#23267) 2025-08-01 15:21:31 +08:00
759ded3e3a minor fix: fix default for status of TidbAuthBinding in compatible with various versions (#22288) 2025-08-01 14:51:16 +08:00
05b002a8b7 Add a practical AKS one-click deployment Helm (#23253) 2025-08-01 14:22:59 +08:00
f7016fd922 chore: Optimize component styles and interactions (#23250) (#23251) 2025-08-01 14:18:38 +08:00
da5c003f97 chore: tablestore full text search support score normalization (#23255)
Co-authored-by: xiaozhiqing.xzq <xiaozhiqing.xzq@alibaba-inc.com>
2025-08-01 14:14:11 +08:00
c33741a5e9 fix: improve boolean field handling in plugin configuration forms (#23160)
Co-authored-by: crazywoola <427733928@qq.com>
2025-08-01 10:34:46 +08:00
872ff3f1d4 fix: resolve multipart/form-data boundary issue in HTTP Request compo nent #22880 (#23008)
Co-authored-by: crazywoola <427733928@qq.com>
2025-08-01 10:26:50 +08:00
8ab3fda5a8 [Enhancement] Allow modify conversation variable via api (#23112)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-08-01 09:34:56 +08:00
1821726d4f fix celery backend when use rabbitmq (#23238)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-08-01 09:26:05 +08:00
98aea05ad2 Fix: Update the background color and hover effect of secondary buttons (#23186) 2025-07-31 21:19:34 +08:00
79ea94483e refine some orm types (#22885) 2025-07-31 18:43:04 +08:00
a0a30bfdcc fix: set default value to avoid initializing with empty value (#23220) 2025-07-31 17:50:53 +08:00
caa5928ac4 chore: Optimize dark mode styles (#23222) (#23231) 2025-07-31 17:43:08 +08:00
9400832b2b fix: correct plugin indentation (#23228) 2025-07-31 17:35:39 +08:00
a82b55005b fix: resolve sidebar animation glitches and layout shifts in app detail page (#23216) (#23221) 2025-07-31 16:04:49 +08:00
a434f6240f Chroe: some misc cleanup (#23203)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-31 15:33:39 +08:00
f5e177db89 fix: call checkOrSetAccessToken when app access mode is PUBLIC (#23195)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-31 14:18:54 +08:00
5febd66808 Fix: Fix style issues (#23209) 2025-07-31 11:47:34 +08:00
afac1fe590 Add comprehensive security tests for file upload controller (#23102)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-31 10:32:16 +08:00
4251515b4e fix remote file (#23127)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-07-31 10:30:54 +08:00
1b2046da3f fix: prevent client-side crashes from null/undefined plugin data in workflow (#23154) (#23182) 2025-07-31 10:03:33 +08:00
znn
646900b00c fixing embedded chat styling (#23198) 2025-07-31 10:03:03 +08:00
142ab74784 feat: Enable Tracing Support For Phoenix Cloud Instance (#23196) 2025-07-30 15:58:26 -07:00
ffddabde43 feat(notion): Notion Database extracts Rows content in row order and appends Row Page URL (#22646)
Co-authored-by: Aurelius Huang <cm.huang@aftership.com>
2025-07-30 21:35:20 +08:00
8c6d87f08a chore: Update vulnerable eslint dependencies (#23192) 2025-07-30 21:31:23 +08:00
270dd955d0 chore(i18n): sync missing keys in zh-Hans and ja-JP (#23175) 2025-07-30 18:00:41 +08:00
4e2129d74f fix: Error processing trace tasks (#23170) 2025-07-30 18:00:15 +08:00
07cff1ed2c minor fix: fix flask api resources only accept one resource for same url (#23168) 2025-07-30 17:05:02 +08:00
070379a900 minor fix: fix wrong check of annotation_ids (#23164) 2025-07-30 17:04:31 +08:00
bbdeb15501 fix: Support URL-encoded passwords with special characters in CELERY_BROKER_URL (#23163)
Signed-off-by: Sn0rt <wangguohao.2009@gmail.com>
2025-07-30 16:39:54 +08:00
28478cdc41 feat: support metadata condition filter string array (#23111)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-07-30 16:13:45 +08:00
11ec62ca70 fix: element of Array[string] and Array[number] and size attribution (#23074)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-30 15:21:50 +08:00
4499cda186 Feat annotations panel (#22968) 2025-07-30 13:40:48 +08:00
c05c5953a8 fix: disabled auto update but still show in plugin detail (#23150) 2025-07-30 11:15:06 +08:00
eee576355b Fix: Support for Elasticsearch Cloud Connector (#23017)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-30 11:12:16 +08:00
a3ef869db6 fix(i18n): clean up unused keys and fix nesting & placeholder issues (#23123) 2025-07-30 10:37:44 +08:00
a51998e4aa Fix: prevent KeyError in validate_api_list by correcting logical check (#23126) 2025-07-30 10:37:06 +08:00
znn
0b44edaca9 request fail when no api key (#23135) 2025-07-30 10:36:03 +08:00
ab163a5f75 Chore: use Workflow.VERSION_DRAFT instead of hardcoded draft (#23136)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-30 10:34:51 +08:00
f17ca26b10 Fix: add missing db.session.close() to ensure proper session cleanup (#23122) 2025-07-30 10:34:24 +08:00
0ea010d7ee fix: metadata API nullable validation consistency issue (#23133)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-30 10:33:24 +08:00
72a2c3decf Fix/http node timeout validation#23077 (#23117)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-29 21:40:15 +08:00
ab7c2cf000 minor fix: Object of type int64 is not JSON serializable (#23109) 2025-07-29 21:40:03 +08:00
6914c1c85e fix(web): make iteration panel respect MAX_PARALLEL_LIMIT environment variable (#23083) (#23104) 2025-07-29 21:39:40 +08:00
ea542d42ca fix: i18n link in README.md (#23121) 2025-07-29 21:36:32 +08:00
cba5bd588c minor fix: wrong position of retry_document_indexing_task time elapsed (#23099) 2025-07-29 20:54:37 +08:00
00cb1c26a1 refactor: pass external_trace_id to message trace (#23089)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-29 19:34:46 +08:00
f4d4a32af2 Feat/enhance i18n scripts (#23114) 2025-07-29 18:24:57 +08:00
1bf0df03b5 minor fix: fix some translation (#23105) 2025-07-29 16:36:29 +08:00
ae28ca0b8d minor fix: wrong assignment (#23103) 2025-07-29 16:36:21 +08:00
51a6b9dc57 hotfix: clear_all_annotations should also execute delete_annotation_index_task just like delete_app_annotation (#23093)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-29 16:35:33 +08:00
4c65a8091a chore: base form (#23101) 2025-07-29 15:37:16 +08:00
27f400e13f feat: update banner (#23095) 2025-07-29 14:05:59 +08:00
7721648867 Fix variable config (#23070) 2025-07-29 11:24:59 +08:00
47cc951841 Fix Empty Collection WHERE Filter Issue (#23086) 2025-07-29 11:17:50 +08:00
63b6026e6e minor fix: fix error messages (#23081) 2025-07-29 10:59:43 +08:00
84aa38586f Fix: number input can display 0 (#23084) 2025-07-29 10:59:12 +08:00
znn
a70d59d4a6 ability to click classifier during workflow execution (#23079) 2025-07-29 09:45:49 +08:00
57e0a12ccd Refactor: remove redundant full module paths in exception handlers (#23076)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-29 09:40:51 +08:00
f5e1fa4bd2 fix(scripts): resolve i18n check script path and logic issues (#23069) 2025-07-29 09:39:10 +08:00
znn
a7ce1e5789 dark mode for overlay (#23078) 2025-07-29 09:37:40 +08:00
5f550126b3 Fix/23066 i18n related commands are broken (#23071)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-28 21:25:18 +08:00
572a2bbe53 fix(i18n): Complete missing translations and clean up legacy keys in app-debug across all locales (#23062) (#23065) 2025-07-28 19:46:27 +08:00
537c04745d minor fix: using the same AccountInFreezeError (#23061)
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
2025-07-28 18:24:27 +08:00
7f004e2f41 fix: web SSO login redirect to correct basePath and origin (#23058) 2025-07-28 18:17:01 +08:00
7c6415551d Fixed code formatting issues in the comment header option component (#23060) 2025-07-28 16:44:50 +08:00
0d2d349ab3 chore(version): bump version to 1.7.1 (#23048) 2025-07-28 16:42:39 +08:00
de28bb1075 fix: changed email login use oauth will create an new account (#23057) 2025-07-28 16:32:25 +08:00
d409fb5f58 refactor(web): Optimize workflow operation tip popup (#23051) (#23052) 2025-07-28 16:29:45 +08:00
3248d728ac fix retry document db session issue (#23046) 2025-07-28 15:56:49 +08:00
beebcae0a1 fix(web): Fix issues with workflow as tool configuration button (#23039) (#23050) 2025-07-28 15:49:19 +08:00
15757110cf feat: default value option for select input fields (#21192)
Co-authored-by: crazywoola <427733928@qq.com>
Co-authored-by: GuanMu <ballmanjq@gmail.com>
2025-07-28 15:37:23 +08:00
fce126b206 fix(api): fix incorrect path handling in Langfuse integration (#22766)
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
2025-07-28 15:37:13 +08:00
5c5f61b2aa fix(dataset): CELERY_BROKER uses amqp rabbitmq. When adding document segments in batches and uploading large files, the status will always remain stuck at "In batch processing" #22709 (#23038) 2025-07-28 14:24:13 +08:00
3f8fb18c89 fix: delete the old provider_config_cache after refresh_credentials (#23033)
Signed-off-by: jingfelix <jingfelix@outlook.com>
2025-07-28 14:07:51 +08:00
f72c03a174 feat: Support selecting variables in conditional filtering in list operations. (#23029)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-28 13:59:34 +08:00
ee731c7810 chore: Updata eslint config dependencies (#23040) 2025-07-28 13:58:21 +08:00
bd5b938571 feat: Support allOf in OpenAPI properties inside schema #22946 (#22975) 2025-07-28 11:03:19 +08:00
7eb707f811 Rollback Aliyun Trace Icon File (#23027) 2025-07-28 11:01:38 +08:00
0546351d3e refactor(web): Optimize the interaction effect of ToolTip component in menu items (#23020) (#23023) 2025-07-28 10:34:11 +08:00
1c05491f1c Chore: remove duplicate TYPE_CHECKING import (#23013)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-07-28 10:04:45 +08:00
znn
177b0fb5e8 ability to select same type sub item by preserving children of both f… (#23002) 2025-07-28 10:04:31 +08:00
67a0751cf3 fix: Improve create_agent_thought and save_agent_thought Logic (#21263) 2025-07-27 11:06:37 +08:00
665fcad655 fix: resolve cross-page document selection issue in metadata batch edit (#23000)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-27 09:22:36 +08:00
znn
d776a7cde7 adding LANG LC_ALL PYTHONIOENCODING UTF-8 (#22928)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-27 09:19:13 +08:00
znn
e0fe158f0b node title number on copied iteration node (#23004) 2025-07-27 09:10:04 +08:00
5411fd3757 Fix: correct misplaced ensure_ascii=False (#22997)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-07-26 18:29:03 +08:00
3328addb58 fix: eliminate dark mode flicker by moving ThemeProvider to root level (#22996) 2025-07-26 18:28:28 +08:00
1446f19709 fix: Update trigger styles for disabled state in PureSelect component (#22986) 2025-07-26 10:53:59 +08:00
znn
773932b1e7 adding mcp error in toast (#22987) 2025-07-26 10:37:52 +08:00
faaf828dff Remove redundant condition check (#22983)
Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com>
2025-07-25 23:38:16 +08:00
ee50a2bcd5 feat: clear all annotation (#22878)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-25 18:06:03 +08:00
31985d94fa Fix incorrect assert type in the AgentNode class (#22964) 2025-07-25 16:31:21 +08:00
9c5b1e7012 fix: Update the scheduling method for timed tasks, (#22779) 2025-07-25 16:27:35 +08:00
ad472d59e0 fix: Refactor i18n config and fix plugin search box styling issue (#22945) 2025-07-25 15:01:28 +08:00
32df3b68c7 fix: correct typo in function name paser_docx_part -> parser_docx_part (#22936) 2025-07-25 10:33:55 +08:00
a189d293f8 make logging not use f-str, change others to f-str (#22882) 2025-07-25 10:32:48 +08:00
570aee5fe6 fix: Optimize AppInfo component styles and fix CustomizeModal step display (#22930) (#22935) 2025-07-25 10:30:52 +08:00
88952cbb7a test: add comprehensive tests for file_factory build_from_mapping (#22926) 2025-07-25 10:30:19 +08:00
89415ac453 fix: improve PostgreSQL healthcheck cmd to avoid fatal log errors (#22749) (#22917) 2025-07-25 09:05:47 +08:00
8bbed5aeea chore: translate i18n files (#22934)
Co-authored-by: JzoNgKVO <27049666+JzoNgKVO@users.noreply.github.com>
2025-07-25 09:04:36 +08:00
8340d775bd Improve: support custom model parameters in auto-generator (#22924) 2025-07-25 09:00:26 +08:00
cff6a488f8 fix: unexpected redirection when landing at workflow (#22932) 2025-07-25 08:48:47 +08:00
d6b980a2dd Feat: change user email freezes limit (#22912)
Co-authored-by: Yansong Zhang <916125788@qq.com>
2025-07-25 08:48:23 +08:00
206bc4b36d chore: enhance error message when handling PluginInvokeError (#22908) 2025-07-24 21:58:39 +08:00
45cebf09b0 fix: Optimize input variable retrieval logic (#22888) (#22914) 2025-07-24 21:57:55 +08:00
bd43ca6275 fix: rounded (#22909) 2025-07-24 16:40:37 +08:00
9237976988 fix: refine handling of constant and mixed input types in ToolManager and ToolNodeData (#22903) 2025-07-24 16:14:08 +08:00
6ac06486e3 Feat/change user email freezes limit (#22900) 2025-07-24 15:36:53 +08:00
061d4c8ea0 fix(plugins_select): Adjust z-index, fix issue where options cannot be displayed (#22873) (#22893) 2025-07-24 15:14:30 +08:00
aca8b83669 fix: support authorization using session and user_id in URL. (#22898) 2025-07-24 15:10:15 +08:00
a8f09ad43f refactor(i18next): streamline fallback translation handling and initi… (#22894) 2025-07-24 14:40:37 +08:00
de611ab344 Feat: add notification for change email completed (#22812)
Co-authored-by: Yansong Zhang <916125788@qq.com>
2025-07-24 14:16:39 +08:00
371fe7a700 fix: type error in list-operator (#22803) 2025-07-24 12:21:20 +08:00
c6d7328e15 feat: revamp tool list page (#22879) 2025-07-24 11:51:39 +08:00
a327d024e9 fix: improved conversation name (#22840) 2025-07-24 11:28:05 +08:00
b8504ac7d0 refactor(dayjs): Refactor internationalized time formatting feature (#22870) (#22872) 2025-07-24 10:42:33 +08:00
bb33335dd4 add autofix (#22785) 2025-07-24 10:12:59 +08:00
5a02e599e1 chore: code format model-selector use enum (#22787)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-07-24 10:11:24 +08:00
d1572f47a0 feat: Add user variable processing function to chat history (#22863) 2025-07-24 10:06:49 +08:00
ef51678c73 orm filter -> where (#22801)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: Claude <noreply@anthropic.com>
2025-07-24 00:57:45 +08:00
e64e7563f6 feat(k8s): Add pure migration option for api component (#22750) 2025-07-23 23:38:18 +08:00
0731db8c22 fix: private.pem keyPath error in windows (#22814)
Co-authored-by: songkunling <songkunling@cabrtech.com>
2025-07-23 23:29:46 +08:00
8c3e390172 test: add comprehensive integration tests for API key authentication system (#22856) 2025-07-23 23:14:40 +08:00
8278b39f85 fix tablestore full text search bug (#22853) 2025-07-23 19:31:47 +08:00
1c3c40db69 fix: tablestore TypeError when vector is missing (#22843)
Co-authored-by: xiaozhiqing.xzq <xiaozhiqing.xzq@alibaba-inc.com>
2025-07-23 18:59:16 +08:00
7ec94eb83c chore(version): bump to 1.7.0 (#22830)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: -LAN- <laipz8200@outlook.com>
2025-07-23 18:37:30 +08:00
79ab8b205f fix: improve max active requests calculation logic (#22847)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-07-23 18:36:24 +08:00
882f8bdd2c fix: update @headlessui/react to version 2.2.1 (#22839) 2025-07-23 17:29:03 +08:00
a366de26c4 feat: performance optimization (#22810) 2025-07-23 16:04:46 +08:00
cf07189bd2 chore: translate i18n files (#22824)
Co-authored-by: Nov1c444 <66365942+Nov1c444@users.noreply.github.com>
2025-07-23 15:42:59 +08:00
b4e152f775 FEAT: Tencent Vector search supports backward compatibility with the previous score calculation approach. (#22820)
Co-authored-by: wlleiiwang <wlleiiwang@tencent.com>
2025-07-23 15:38:31 +08:00
eaae79a581 feat: plugin auto upgrade strategy (#19758)
Co-authored-by: Joel <iamjoel007@gmail.com>
Co-authored-by: crazywoola <427733928@qq.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Novice <novice12185727@gmail.com>
2025-07-23 15:33:39 +08:00
e6913744ae fix: database lock timeout by separating external MCP calls from transactions (#22821) 2025-07-23 14:58:50 +08:00
6b544aa0b9 feat: decouple WorkflowAppRunner from AppRunner (#21739)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-07-23 14:56:06 +08:00
60c37fe492 Added a check to ensure the input text is a string before proceeding with parsing (#22809)
Co-authored-by: -LAN- <laipz8200@outlook.com>
2025-07-23 13:53:27 +08:00
82cc37bf51 chore(tests): Remove outdated tests (#22816)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-07-23 13:22:15 +08:00
ad67094e54 feat: oauth refresh token (#22744)
Co-authored-by: Yeuoly <admin@srmxy.cn>
2025-07-23 13:12:39 +08:00
6d3e198c3c Mapped column (#22644)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-23 00:39:59 +08:00
0f4809b9b8 refactor: centralize email internationalization handling (#22752)
Co-authored-by: Claude <noreply@anthropic.com>
2025-07-23 00:26:00 +08:00
5c7f0a533a fix: #22784 (#22795) 2025-07-22 22:28:45 +08:00
095bae01b2 fix: Invalid expected metadata value type error (#22793) 2025-07-22 22:28:16 +08:00
366d42d9c0 Fix invite member template (#22782) 2025-07-22 16:49:50 +08:00
a2048fd0f4 fix: tablestore vdb support metadata filter (#22774)
Co-authored-by: xiaozhiqing.xzq <xiaozhiqing.xzq@alibaba-inc.com>
2025-07-22 16:48:59 +08:00
ea069cc2d4 chore: translate i18n files (#22756)
Co-authored-by: RockChinQ <45992437+RockChinQ@users.noreply.github.com>
2025-07-22 15:40:20 +08:00
90c4df7919 fix(web): add dayjs relativeTime plugin (#22761) 2025-07-22 15:35:23 +08:00
ce15ca8929 feat: custom components in markdown comp (#22776) 2025-07-22 15:30:22 +08:00
qfl
841e53dbbe feat(trace): support external trace id propagation (#22623)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-22 15:17:43 +08:00
c987001a19 fix: add missing translation function to deprecation notice component (#22767) 2025-07-22 13:40:30 +08:00
e9c9c5d8f1 fix: single step node execution init error (#22764)
LGTM
2025-07-22 13:00:24 +08:00
c2c69ffb82 fix import error in marketplace (#22759) 2025-07-22 10:58:08 +08:00
2d8eace34b feat: plugin deprecation notice (#22685)
Co-authored-by: Wu Tianwei <30284043+WTW0313@users.noreply.github.com>
Co-authored-by: twwu <twwu@dify.ai>
2025-07-22 10:27:35 +08:00
eb06de0921 refactor: Modify the triggering method of the variable selector in the modification object subtree panel(#22237) (#22238) 2025-07-22 08:24:54 +08:00
58d92970a9 Optimize tencent_vector knowledge base deletion error handling with batch processing support (#22726)
Co-authored-by: liuchen15 <liuchen15@gaotu.cn>
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-22 08:21:41 +08:00
db09e7386f test: add comprehensive unit tests for AuthType (#22742) 2025-07-22 08:12:38 +08:00
b5599b2945 fix: prevent panel width localStorage pollution during viewport compression (#22745) (#22747) 2025-07-22 08:11:01 +08:00
f70ff72a58 chore: Add fonts-noto-cjk dependency for pypdfium2 (#22359)
Co-authored-by: kentaka347 <kentaka347@gmail.com>
2025-07-22 02:43:12 +08:00
e9893f1518 chore: use 'json_list' instead of 'json' to prevent ambiguity (#22739)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-07-22 02:40:40 +08:00
29f0a9ab94 Fix incorrect mcp method_name (#22736) 2025-07-21 21:14:38 +08:00
308f1340dd fix: migrations circle dependency (#22731) 2025-07-21 20:18:19 +08:00
5d5fa88857 fix: the text/icon shows wrong color in darkmode (#22724) 2025-07-21 18:07:49 +08:00
659d51a2da fix: complete file_upload schema in OpenAPI templates (#22700) (#22719) 2025-07-21 17:43:49 +08:00
8246f946c2 fix: Update the style of the batch operation component (#22716) 2025-07-21 17:39:40 +08:00
62b29b3d76 feat: update file manager and file factory implementations (#22704)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: Claude <noreply@anthropic.com>
2025-07-21 17:37:08 +08:00
8fa3b3f931 fix: prevent app type description from overflowing the card (#22711) 2025-07-21 16:36:12 +08:00
a83e4ed9a4 Perf: remove user profile loading (#22710) 2025-07-21 16:35:52 +08:00
ab012fe1a2 fix: improve document filtering in full text search(elasticsearch) (#22683) 2025-07-21 15:59:37 +08:00
c7382150b5 test: add comprehensive unit tests for Firecrawl and Watercrawl auth providers (#22705) 2025-07-21 15:58:36 +08:00
74981a65c6 fix: Adjust tool selector popup styles (#22622) (#22697) 2025-07-21 15:04:01 +08:00
9251a66a10 fix: update analyticdb vector to do filter by metadata (#22698)
Co-authored-by: xiaozeyu <xiaozeyu.xzy@alibaba-inc.com>
2025-07-21 15:03:37 +08:00
3b23fc5ad8 fix: Correct and enhance the doc on CELERY_BROKER_URL in .env.example (#22693)
Co-authored-by: Jianheng Hou <jianhenh@example.com>
2025-07-21 13:55:16 +08:00
bddeebd4c9 refactor: remove unused dissolve_tenant static method (#22690) 2025-07-21 12:40:47 +08:00
d45e48eed7 fix: knowledge retrieval validation error (#22682) 2025-07-21 11:22:32 +08:00
cbc3474bbb minor fix: fix dissolve tenant check permission always failed (#22292) 2025-07-21 11:20:05 +08:00
383a79772c Increased the character limitation (#22679)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-21 09:58:10 +08:00
f8c7b28da7 oxlint (#22584) 2025-07-21 09:55:04 +08:00
74940ad3f2 chore: code improvement for mcp_client and mcp_tools_manage_service (#22645) 2025-07-21 09:52:55 +08:00
17a8f1a0f1 fix: avoid using node_data.version for judgement tool node version (#22462)
Co-authored-by: JzoNg <jzongcode@gmail.com>
2025-07-21 09:28:47 +08:00
f9f46bfcbe fix(i18n) update Japanese translation for "optional" (#22667) 2025-07-21 09:26:39 +08:00
bd2014d13b fix(i18n): "道具" into "ツール" (#22666) 2025-07-20 21:23:08 +08:00
cb660e8104 fix(i18n): standardize template variable names across all languages {{count}} (#22670) 2025-07-20 21:22:30 +08:00
a4a67ef1ec fix(i18n): improve Japanese translations for technical terms "dupulicate" (#22669) 2025-07-20 21:22:15 +08:00
09abc9951d chore: update pnpm version to 10.13.1 (#22660) 2025-07-20 11:10:44 +08:00
znn
19c09d6111 enabling vector index prefix name via configuration files (#22661) 2025-07-20 11:10:08 +08:00
6248658c04 fix: resolve Redis mock import error in test configuration (#22663) 2025-07-20 11:06:38 +08:00
274142c4c2 test: add comprehensive unit tests for auth service module (#22662) 2025-07-20 11:06:32 +08:00
ce794335e9 Fix/replace datetime patterns with naive utc now (#22654) 2025-07-20 11:05:53 +08:00
5985055aef Fix: Remove ${basePath} from the <Link> tag's href attribute. (#22636) 2025-07-19 22:07:29 +08:00
ff8fc96ebb chore: skip SuperLinter check on .editorconfig when no changes (#22649) 2025-07-19 08:53:47 +08:00
c70b0cb730 fix(docs): unify workflow_run_id style with other languages (#22642)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-07-18 21:09:53 +08:00
dba42567b1 fix: restore globals dependency in package.json and pnpm-lock.yaml (#22625) 2025-07-18 15:14:21 +08:00
71d96b671b feat: update VECTOR_STORE supported types in api/.env.example (#22617)
Co-authored-by: nicksarno <nicksarno@ztm-tech.com>
2025-07-18 13:54:48 +08:00
a93db6d797 fix: correct tracing for workflows and chatflows for phoenix (#22547) 2025-07-18 13:54:18 +08:00
f2389771cf test: add comprehensive API key authentication service tests (#22572) 2025-07-18 13:52:22 +08:00
znn
ed263aed9f fix text splitter (#22596) 2025-07-18 13:51:58 +08:00
d37b6716cd Fix #22508 (#22590) 2025-07-18 13:43:58 +08:00
b035f3f884 feat: convert components to dynamic imports for improved performance (#22614) 2025-07-18 11:43:37 +08:00
1f9cd99bc2 refactor: elegant event dispatch patterns (92% complexity reduction) (#22600)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: Claude <noreply@anthropic.com>
2025-07-18 10:34:47 +08:00
ffee6f3288 fix: admin feedback uses the same method create_feedback (#22580)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-07-18 10:10:31 +08:00
460a825ef1 refactor: decouple Node and NodeData (#22581)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
2025-07-18 10:08:51 +08:00
54c56f2d05 chore: translate i18n files (#22563)
Co-authored-by: Yeuoly <45712896+Yeuoly@users.noreply.github.com>
2025-07-18 09:59:42 +08:00
61a5741c05 fix celery config (#22566)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-07-18 09:41:09 +08:00
0e235e5872 Fix valid check of mcp server address (#22510) 2025-07-18 09:40:16 +08:00
d0bece1679 fix(docs): correct workflow API parameter name from workflow_id to workflow_run_id (#22587) 2025-07-18 09:31:33 +08:00
1715dd4320 refactor: Fix some type error (#22594)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-07-18 09:26:29 +08:00
14513b7677 use nolyfill to reduce download size (#22589) 2025-07-18 09:26:14 +08:00
b88dd17fc1 feat(workflow_cycle_manager): Removes redundant repository methods and adds caching (#22597)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-07-18 09:26:05 +08:00
znn
3826b57424 remove node-info for non mcp (#22595) 2025-07-18 09:25:57 +08:00
62586719b3 fix: remove redundant partial member list retrieval in dataset API (#15492) 2025-07-17 22:56:18 +08:00
e7d80bf7bf Fix: the pict type picture was not processed in the docx (#19305)
Co-authored-by: zqgame <zqgame@zqgame.local>
2025-07-17 22:53:35 +08:00
7a69b57823 Fix jinja2 variable naming inconsistencies (#22578) 2025-07-17 22:16:47 +08:00
2423f97c72 remove overrides (#22575) 2025-07-17 21:47:48 +08:00
a4ef900916 Support OAuth Integration for Plugin Tools (#22550)
Co-authored-by: zxhlyh <jasonapring2015@outlook.com>
Co-authored-by: Yeuoly <admin@srmxy.cn>
2025-07-17 17:18:44 +08:00
965e952336 minor translation fix: fix translation duplicate and typo, fix date format (#22548) 2025-07-17 16:05:33 +08:00
znn
3cfba9e47b updating icon (#22485) 2025-07-17 15:10:36 +08:00
4b604bd79a fix: Python SDK WorkflowClient and KnowledgeBase client imports fixed. Added documentation for WorkflowClient. (#22476)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-17 15:09:14 +08:00
74caebac32 test: add comprehensive OAuth authentication unit tests (#22528) 2025-07-17 14:20:59 +08:00
fafb1d5fd7 feat: validate email according to RFC 5322 (#22540) 2025-07-17 14:20:44 +08:00
4b2baeea65 fix: use model provided by user in prompt generator (#22541) (#22542)
Co-authored-by: stream <stream@dify.ai>
2025-07-17 14:19:52 +08:00
93c27b134d minor typo fix: remove debug code and fix typo (#22539) 2025-07-17 13:52:15 +08:00
853c97a910 minor bug fix: wrong default metrics endpoint (#22535) 2025-07-17 13:49:41 +08:00
97f080fa03 fix: Japanese dateTimeFormat (#22516)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-07-17 13:46:40 +08:00
aeb68f99bd chore: translate i18n files (#22526)
Co-authored-by: JzoNgKVO <27049666+JzoNgKVO@users.noreply.github.com>
2025-07-17 13:25:39 +08:00
10e6b11ff6 fix: code node check decimal precision (#22522) 2025-07-17 13:21:17 +08:00
c3037c5491 minor code fix: remove duplicate type check branch (#22536) 2025-07-17 13:20:31 +08:00
e4ae1e2b94 fix (ci) : remove test_url_signer (#22525) 2025-07-17 11:12:14 +08:00
a4f421028c Feat/change user email (#22213)
Co-authored-by: NFish <douxc512@gmail.com>
Co-authored-by: JzoNg <jzongcode@gmail.com>
Co-authored-by: Garfield Dai <dai.hai@foxmail.com>
2025-07-17 10:55:59 +08:00
a324d3942e Perf/web app authrozation (#22524) 2025-07-17 10:52:10 +08:00
a3ced1b5a6 fix(signin): Improve login button UI (#22433) (#22514) 2025-07-17 10:15:24 +08:00
fb5c6dd644 chore: remove unused code (#22501)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-07-17 09:33:31 +08:00
d2933c2bfe fix: drop dead code phase2 unused class (#22042)
Signed-off-by: yihong0618 <zouzou0208@gmail.com>
2025-07-17 09:33:07 +08:00
3587bd4040 fix mcp error not input (#22505)
Signed-off-by: kenwoodjw <blackxin55+@gmail.com>
2025-07-17 09:32:42 +08:00
3aecceff27 Update bug_report.yml (#22502) 2025-07-16 21:34:14 +08:00
f082452c9b feat: add otel endpoint config (#22492) 2025-07-16 18:24:17 +08:00
30aa052a57 feat: Add Citations and Attributions to Agent Node (#18558)
Co-authored-by: oneness0 <2902216407@qq.com>
Co-authored-by: Novice <novice12185727@gmail.com>
2025-07-16 15:46:15 +08:00
qfl
bdb9f29948 feat(app): support custom max_active_requests per app (#22073) 2025-07-16 15:31:19 +08:00
66cc1b4308 feat(variable-list): add drag-and-drop functionality for variables in code node (#22127) 2025-07-16 15:24:19 +08:00
d52fb18457 feat: auto-fill MCP server description with app description #22443 (#22477) 2025-07-16 15:03:33 +08:00
4a2169bd5f Chore/update gh template (#22480) 2025-07-16 14:22:51 +08:00
2c9ee54a16 fix aliyun trace session_id (#22468) 2025-07-16 13:56:44 +08:00
aef67ed7ec fix: add background color for chat bubble in light and dark themes (#22472) 2025-07-16 13:36:51 +08:00
ddfd8c8525 feat(api): add UUIDv7 implementation in SQL and Python (#22058)
This PR introduces UUIDv7 implementations in both Python and SQL to establish the foundation for migrating from UUIDv4 to UUIDv7 as proposed in #19754.

ID generation algorithm of existing models are not changed, and new models should use UUIDv7 for ID generation.

Close #19754.
2025-07-16 13:07:08 +08:00
2c1ab4879f refactor(api): Separate SegmentType for Integer/Float to Enable Pydantic Serialization (#22025)
refactor(api): Separate SegmentType for Integer/Float to Enable Pydantic Serialization (#22025)

This PR addresses serialization issues in the VariablePool model by separating the `value_type` tags for `IntegerSegment`/`FloatSegment` and `IntegerVariable`/`FloatVariable`. Previously, both Integer and Float types shared the same `SegmentType.NUMBER` tag, causing conflicts during serialization.

Key changes:
- Introduce distinct `value_type` tags for Integer and Float segments/variables
- Add `VariableUnion` and `SegmentUnion` types for proper type discrimination
- Leverage Pydantic's discriminated union feature for seamless serialization/deserialization
- Enable accurate serialization of data structures containing these types

Closes #22024.
2025-07-16 12:31:37 +08:00
229b4d621e Improve Tooltip UX by enabling delay by default (#21383) 2025-07-16 11:26:54 +08:00
0dee41c074 fix: When var value changed, PromptEditor should be reset (#22219) 2025-07-16 11:22:54 +08:00
bf542233a9 minor fix: using Pydantic model_validate instead of deprecated parse_obj (#22239)
Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-07-16 10:57:08 +08:00
38106074b4 test: add comprehensive unit tests for console authentication and authorization decorators (#22439) 2025-07-16 10:07:01 +08:00
znn
1f4b3591ae adding tooltip for bindingCount (#22450)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-16 09:59:42 +08:00
7bf3d2c8bf fix(api): Fix potential thread leak in MCP BaseSession (#22169)
The `BaseSession` class in the `core/mcp/session` package uses `ThreadPoolExecutor` 
to run the receive loop but fails to properly clean up the executor and receiver 
future, leading to potential thread leaks.

This PR addresses this issue by:
- Initializing `_executor` and `_receiver_future` attributes to `None` for proper cleanup checks
- Adding graceful shutdown with a 5-second timeout in the `__exit__` method
- Ensuring the ThreadPoolExecutor is properly shut down to prevent resource leaks

This fix prevents memory leaks and hanging threads in long-running scenarios where 
multiple MCP sessions are created and destroyed.

Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-16 00:01:44 +08:00
da53bf511f chore: add SQLALCHEMY_POOL_USE_LIFO option and missing SQLALCHEMY_POOL_PRE_PING env default value. (#22371) 2025-07-15 19:46:48 +08:00
7388fd1ec6 fix: Disable question editing in chat history (#22438) 2025-07-15 19:41:51 +08:00
b803eeb528 fix: Update condition items to support variable type acquisition (#22414) 2025-07-15 19:38:13 +08:00
14f79ee652 fix: create api workflow run repository error (#22422) 2025-07-15 16:12:02 +08:00
df89629e04 fix: conversatino statistic including data from debugger (#22412)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-07-15 15:45:45 +08:00
d427088ab5 fix: remove PickerPanel padding (#22419) 2025-07-15 15:37:13 +08:00
32c541a9ed fix: generate deterministic operationId for root endpoints without one (#19888) 2025-07-15 14:19:55 +08:00
7e666dc3b1 fix(prompt-editor): show error warning for destructive env and conv var (#21802) 2025-07-15 14:10:50 +08:00
5247c19498 fix: code result included "error" field (#22392) 2025-07-15 13:55:00 +08:00
9823edd3a2 fix workflow node iterator . (#21008)
Signed-off-by: zhanluxianshen <zhanluxianshen@163.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-15 10:55:49 +08:00
88537991d6 fix: Metadata filtering with Manual option in Agent mode does not take effect when specifying input variables. (#20362) 2025-07-15 10:47:20 +08:00
8e910d8c59 fix(plugin): introduce response_type parameter in plugin list API to enable paginated response support (#22251) 2025-07-15 10:10:37 +08:00
a0b32b6027 feat(config-modal): add space to underscore conversion in variable name input of start node (#22284) 2025-07-15 10:00:19 +08:00
bf7b2c339b tablestore vector support more method (#22225)
Co-authored-by: xiaozhiqing.xzq <xiaozhiqing.xzq@alibaba-inc.com>
2025-07-15 09:58:48 +08:00
a1dfe6d402 chore: bump nextjs to 15.3 (#22262) 2025-07-15 09:35:17 +08:00
d2a3e8b9b1 Provides a set of Kubernetes manifests supporting version 1.6.0 (#22287) 2025-07-15 09:34:17 +08:00
ebb88bbe0b improve opik workflow_trace span name to node name (#22356) 2025-07-15 09:33:06 +08:00
b690a9d839 fix: aliyun trace title&description (#22347) 2025-07-14 17:14:24 +08:00
9d9423808e Update README.md (#22351) 2025-07-14 17:13:50 +08:00
3e96c0c468 fix: close session before doing long latency operation (#22306) 2025-07-14 15:16:10 +08:00
6eb155ae69 feat(api/repo): Allow to config repository implementation (#21458)
Signed-off-by: -LAN- <laipz8200@outlook.com>
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
2025-07-14 14:54:38 +08:00
b27c540379 Fix: Remove height and overflow style settings (#22327) 2025-07-14 13:57:53 +08:00
8b1f428ead Chore: Replace lodash/noop with lodash-es/noop (#22331) 2025-07-14 13:57:26 +08:00
1d54ffcf89 fix: error parsing object type parameters for code node (#22230) 2025-07-14 10:37:26 +08:00
d9eb5554b3 fix: prevent trigger form submit action when press 'enter' (#22313) 2025-07-14 09:59:20 +08:00
da94bdeb54 Update README.md (#22305) 2025-07-14 09:37:17 +08:00
27e5e2745b test: add comprehensive unit tests for login decorator (#22294) 2025-07-14 09:34:13 +08:00
znn
1b26f9a4c6 fixing Enum part in backend and making it same as front end (#22296) 2025-07-14 09:34:04 +08:00
df886259bd test(web): add password regexp test case (#22308) 2025-07-14 09:32:34 +08:00
016ff0feae fix(ui): prevent var icon hidden when only one var in list of start node (#22290) 2025-07-13 20:10:15 +08:00
aa6cad5f1d fix: tool's model selector and app selector not work (#22291) 2025-07-13 20:04:29 +08:00
e7388779a1 chore: bump ruff to 0.12.x (#22259) 2025-07-12 20:00:54 +08:00
6c233e05a9 minor fix: wrong and (#22242) 2025-07-12 19:59:07 +08:00
9f013f7644 Add unit test for account service (#22278) 2025-07-12 19:58:42 +08:00
253d8e5a5f test: add comprehensive unit tests for PassportService with exception handling optimization (#22268) 2025-07-12 19:56:20 +08:00
7f5087c6db reject whitespace characters in password regexp (#22232) 2025-07-11 19:18:18 +08:00
817071e448 fix: iteration itemType support conversation var (#22220) (#22236) 2025-07-11 19:16:30 +08:00
f193e9764e fix: Optimize the workspace panel width calculation (#22195) 2025-07-11 19:12:12 +08:00
5f9628e027 feat(workflow): add drag-and-drop support for variable list items for start node (#22150) 2025-07-11 18:53:29 +08:00
76d21743fd fix(web): Optimize AppInfo Component Layout (#22212) (#22218) 2025-07-11 17:54:09 +08:00
2d3c5b3b7c fix(emoji-picker): Adjust the style of the emoji picker (#22161) (#22231) 2025-07-11 17:52:16 +08:00
1d85979a74 chore:extract last run common logic (#22214) 2025-07-11 16:41:25 +08:00
2a85f28963 fix:Fixed the problem of plugin installation failure caused by incons… (#22156) 2025-07-11 15:18:42 +08:00
fe4e2f7921 feat: support var in suggested questions (#17340)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-11 15:07:32 +08:00
9a9ec0c99b feat: Add Audio configuration setting to app configuration UI (#21957) 2025-07-11 14:04:42 +08:00
K
d5624ba671 fix: resolve Docker file URL networking issue for plugins (#21334) (#21382)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-11 12:11:59 +08:00
c805238471 fix: adjust layout styles for header and dataset update (#22182) 2025-07-11 11:17:28 +08:00
e576b989b8 feat(tool): add support for API key authentication via query parameter (#21656) 2025-07-11 10:39:20 +08:00
f929bfb94c minor fix: remove duplicates, fix typo, and add restriction for get mcp server (#22170)
Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-07-11 09:40:17 +08:00
f4df80e093 fix(custom_tool): omit optional parameters instead of setting them to None (#22171) 2025-07-10 20:56:45 +08:00
390e4cc0bf chore(version): bump to 1.6.0 (#22136) 2025-07-10 17:49:32 +08:00
11f9a897e8 chore: fix schema editor can not hover item (#22155) 2025-07-10 17:33:11 +08:00
0e793a660d fix: add the default value to the dark icon (#22149) 2025-07-10 17:13:48 +08:00
7b2cab5767 feat: support ping method for MCP server (#22144) 2025-07-10 16:14:46 +08:00
c51b4290dc fix: mcp server card button display (#22141) 2025-07-10 16:14:18 +08:00
94a13d7d62 feat: add support for dark icons in provider and tool entities (#22081) 2025-07-10 14:43:31 +08:00
edf5fd28c9 update worklow events logs. (#19871)
Signed-off-by: zhanluxianshen <zhanluxianshen@163.com>
2025-07-10 14:21:34 +08:00
b834131f50 chore: translate i18n files (#22132)
Co-authored-by: iamjoel <2120155+iamjoel@users.noreply.github.com>
2025-07-10 14:19:26 +08:00
5375d9bb27 feat: the frontend part of mcp (#22131)
Co-authored-by: jZonG <jzongcode@gmail.com>
Co-authored-by: Novice <novice12185727@gmail.com>
Co-authored-by: nite-knite <nkCoding@gmail.com>
Co-authored-by: Hanqing Zhao <sherry9277@gmail.com>
2025-07-10 14:14:02 +08:00
535fff62f3 feat: add MCP support (#20716)
Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com>
2025-07-10 14:01:34 +08:00
18b58424ec Fix: Resolve issue with json_output (#22053) 2025-07-10 13:34:06 +08:00
10858ea1dc Chore: rm useless import and vars (#22108) 2025-07-10 11:47:43 +08:00
6f8c7a66c8 feat: add redis fallback mechanism #21043 (#21044)
Co-authored-by: tech <cto@sb>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-10 10:19:58 +08:00
a371390d6c optimize: batch embedding and qdrant write_consistency_factor parameter (#21776)
Co-authored-by: hobo.l <hobo.l@binance.com>
2025-07-10 10:16:59 +08:00
a316766ad7 chore: Update theme vars (#22113) 2025-07-10 10:11:31 +08:00
a9cc19f530 feat(question-classifier): add drag-and-drop sorting for topics list (#22066)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-10 10:03:11 +08:00
881a151d30 test: add comprehensive unit tests for encrypter module (#22102) 2025-07-10 10:01:15 +08:00
785c4caa67 fix: allow update plugin install settings (#22111) 2025-07-10 09:58:48 +08:00
4403bc67a1 fix(Drawer): add overflow hidden to ensure copy button is always clickable (#21992) (#22103)
Co-authored-by: wangheyang <wangheyang@corp.netease.com>
2025-07-10 09:20:02 +08:00
b237113311 Update clean_document_task.py (#22090) 2025-07-10 09:18:50 +08:00
4cb50f1809 feat(libs): Introduce extract_tenant_id (#22086)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-07-09 17:45:56 +08:00
1885426421 feat: Allow to change SSL verify in HTTP Node (#22052)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-09 15:53:24 +08:00
89b52471fb Optimize the memory usage of Tencent Vector Database (#22079)
Co-authored-by: wlleiiwang <wlleiiwang@tencent.com>
2025-07-09 15:53:06 +08:00
3643ed1014 Feat: description field for env variables (#21556) 2025-07-09 15:18:23 +08:00
e39236186d feat: introduce new env ALLOW_UNSAFE_DATA_SCHEME to allow rendering data uri scheme (#21321) 2025-07-09 10:12:40 +08:00
521488f926 Remove tow unused files (#22022) 2025-07-09 09:28:26 +08:00
d61ea5a2de test: add comprehensive unit tests for UrlSigner (#22030) 2025-07-08 21:22:37 +08:00
816210d744 Expose LLM usage in workflows (#21766)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-07-08 21:18:00 +08:00
f925869f61 fix(variable): ensure unique variable names in var-list (#22038) 2025-07-08 15:41:27 +08:00
f62b59a805 don't add search params when opening detail links from marketplace. (#22034) 2025-07-08 15:15:38 +08:00
a4bdeba60d feat(question-classifier): add instanceId to class-item editor (#22002) 2025-07-08 10:04:05 +08:00
5c0cb7f912 test: add unit tests for password validation and hashing (#22003) 2025-07-08 10:00:00 +08:00
2ffbf5435d minro fix: fix duplicate local import of ToolProviderType (#22013)
Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-07-08 09:49:53 +08:00
71385d594d fix(variables): Improve getNodeUsedVars implementation details (#21987) 2025-07-08 09:33:13 +08:00
53c4912cbb feat: add unit tests and validation for aliyun tracing (#22012)
Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-07-08 09:32:30 +08:00
1760179093 minro fix: fix a typo for aliyun (#22001)
Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-07-07 22:04:38 +08:00
aded30b664 fix: resolve dropdown menu visibility issue caused by z-index conflict (#22000) 2025-07-07 21:58:05 +08:00
de54f8d0ef Chore: remove unreachable code (#21986) 2025-07-07 21:55:34 +08:00
5b0b64c7e5 fix: document delete image files check file exist (#21991) 2025-07-07 21:53:40 +08:00
b654c852a5 chore(docker): increase NGINX_CLIENT_MAX_BODY_SIZE from 15M to 100M i… (#21995) 2025-07-07 21:51:49 +08:00
c48b32c9e3 ENH(ui): enhance check list (#21932)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-07 14:52:36 +08:00
8f723697ef refactor(graph_engine): Take GraphRuntimeState out of GraphEngine (#21882) 2025-07-07 13:15:18 +08:00
de22648b9f feat: Add support for type="hidden" input elements in Markdown forms (#21922)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-07 10:35:30 +08:00
b9f56852dc fix: resolve JSON.parse precision issue causing 'list index out of ra… (#21253) 2025-07-07 10:05:54 +08:00
108cc3486f fix(agent): show agent run steps, fixes #21718 (#21945)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-07 09:59:47 +08:00
ac69b8b191 refactor: extract common url validator for config_entity.py (#21934)
Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-07-07 09:34:13 +08:00
8288145ee4 chore(i18n): fix typos and improve Korean translation (#21955) 2025-07-07 09:33:09 +08:00
51f6095be7 minor fix: translation for pause (#21949)
Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-07-05 12:45:29 +08:00
a201e9faee feat: Add Aliyun LLM Observability Integration (#21471) 2025-07-04 21:54:33 +08:00
fec6bafcda refactor(web): Restructure the operation buttons layout in the app information component (#21742) (#21818) 2025-07-04 21:53:21 +08:00
2639f950cc minor fix: removes the duplicated handling logic for TracingProviderEnum.ARIZE and TracingProviderEnum.PHOENIX from the OpsTraceProviderConfigMap (#21927)
Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-07-04 16:46:48 +08:00
6663187eca test:add unit test for api version config (#21919) 2025-07-04 15:33:20 +08:00
13990f31a1 feat: update account menu style (#21916) 2025-07-04 14:52:30 +08:00
de39b737b6 Feat list query (#21907) 2025-07-04 14:18:31 +08:00
a66ed7157e feat: add document pause and resume functionality (#21894) 2025-07-04 14:06:47 +08:00
c9c49200e0 use repair_json fix json parse error of HTTPRequestNode (#21909)
Co-authored-by: lizb <lizb@sugon.com>
2025-07-04 14:01:17 +08:00
317d287458 fix(loop-variables): validate variable name input (#21888) 2025-07-03 23:30:56 +08:00
a79f37b686 fix: tts tool must choose a voice (#21877) 2025-07-03 17:10:01 +08:00
1c7404099d fix: prevent timeout in file encoding detection for large files (#21453)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-03 17:06:49 +08:00
ed54bd5121 fix: not search plugin if marketplace enabled (#21880) 2025-07-03 16:43:11 +08:00
06c3deff11 Fix: Add title attribute to edit time text for improved accessibility (#21871) 2025-07-03 16:07:07 +08:00
47954aa284 feat(api): validate and reject external datasets in document update (#21783) 2025-07-03 14:50:53 +08:00
f3c8625fe2 fix: The statistics page cannot display the tokens consumed by agent node (#21861) 2025-07-03 14:40:47 +08:00
ebc4fdc4b2 moving the MessageStatus class from the models.model module to models.enums module (#21867)
Signed-off-by: neatguycoding <15627489+NeatGuyCoding@users.noreply.github.com>
2025-07-03 13:56:23 +08:00
1af3d40c1a feat: Improve Observability with Arize & Phoenix Integration (#19840)
Co-authored-by: crazywoola <427733928@qq.com>
Co-authored-by: Gu <guchenhe@gmail.com>
2025-07-03 13:52:14 +08:00
31eb8548ef fix: Before publish the app, preview the voice of tts, it raise an er… (#21821)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-07-03 10:53:14 +08:00
a45aa1e505 feat(variables): auto replace spaces with underscores in variable name inputs (#21843) 2025-07-03 10:36:38 +08:00
cb0d4a1e15 style(config-var): update styling classes to use design system tokens (#21846) 2025-07-03 10:00:44 +08:00
21e68b9cf1 fix: nodeExtraData might be undefined (#21856) 2025-07-03 09:59:19 +08:00
a3654c8fe9 fix(web): adjust HTTP node method and input layout (#21834) (#21855) 2025-07-03 09:26:38 +08:00
980b0188d2 feat(tests): add structured output parser tests for LLM responses (#21838) 2025-07-03 09:10:04 +08:00
daab648c78 fix: plugin deamon start fail (#21841) 2025-07-03 09:09:02 +08:00
e17b33e004 chore: add message status enum (#21825)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-07-02 21:22:28 +08:00
4e7c9dd2ae feat: Retain llm setting for agent node (#21842) 2025-07-02 20:28:25 +08:00
5487463385 fix: add list contents handling in structured LLM output (#21837) 2025-07-02 19:14:21 +08:00
68f41bbaa8 Fix/workflow use nodes hooks (#21822) 2025-07-02 17:48:23 +08:00
3bfa9767c0 Chore/workflow last run (#21823)
Co-authored-by: Joel <iamjoel007@gmail.com>
2025-07-02 17:48:07 +08:00
8978b9d38b chore(version): Bump plugin daemon version to 0.1.3 (#21835)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-07-02 17:46:18 +08:00
cc89d7b1a5 remove unused config CURRENT_VERSION (#21832)
as API module's version code refactored into pyproject.toml file in refactor: define the Dify project version in pyproject.toml #20910, the deprecated CURRENT_VERSION is no longger used and should be removed.
2025-07-02 17:22:22 +08:00
bb955806e0 chore(version): bump to 1.5.1 (#21808)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-07-02 16:17:40 +08:00
0c39490bb1 chore: put new run var to the top (#21816) 2025-07-02 15:56:22 +08:00
826bf25abf Fix: prevent SQL errors when metadata filter Constant value is None or blank (#21803) 2025-07-02 14:43:01 +08:00
89250a36b7 fix(api): files not returned in the answer node (#21807) 2025-07-02 13:54:10 +08:00
c2e599cd85 fix(api): Fix resetting sys var causing internal server error (#21604)
and sorts draft variables by their creation time, ensures a consist order.
2025-07-02 13:36:35 +08:00
71d6cf1b1d fix: Make the latency and logs of web applications consistent. (#21578)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-07-02 12:04:33 +08:00
86179beaa5 FIX: dollar-sign escaping in preprocessLaTeX code‐block handling (#21796)
Co-authored-by: LinYing <linying@momenta.ai>
2025-07-02 11:32:23 +08:00
f53b177e1f chore: new inspected variable add to top position instead of bottom (#21793) 2025-07-02 11:07:43 +08:00
58dfe2ca03 fix: when config plugin endpoint choose no start form app cause page crashed (#21789) 2025-07-02 10:55:38 +08:00
c4b960cc1a Improve Langfuse trace readability (#21777) 2025-07-02 09:15:24 +08:00
70035aa9a9 fix: notion kownledge datasets can't add new page (#21779) 2025-07-02 09:14:48 +08:00
a82943a83d minor fix: add parameters in error msg of Plugin service returned no options (#21662) 2025-07-01 22:58:59 +08:00
9a4c1fe834 fix: if parameter is not required, continue (#21761)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-07-01 21:25:45 +08:00
b9ff716c18 fix: incorrect api module version in pyproject.toml (#21755)
Co-authored-by: crazywoola <427733928@qq.com>
2025-07-01 17:12:52 +08:00
8516d15a4e fix: handle configure button for notion internal integration (#21412) 2025-07-01 16:58:00 +08:00
4198a533ad fix: code Interpreter error handling not work (#21736) 2025-07-01 16:16:34 +08:00
a67441689a chore: upgrade package versions for security reason (#21751) 2025-07-01 16:02:04 +08:00
5c11c22302 fix: can not reset system variables (#21750) 2025-07-01 16:00:17 +08:00
1a7ad195f0 refactor: define the Dify project version in pyproject.toml (#20910) 2025-07-01 12:07:24 +08:00
cf2173644e Release db.session connection before workflow new thread long time operation (#21726)
Co-authored-by: 李强04 <liqiang04@gaotu.cn>
2025-07-01 12:05:29 +08:00
1b99e44e99 feat: Retain LLM Configuration Settings When Changing Model (#21247) 2025-07-01 11:32:46 +08:00
b8b9c3a783 fix: set the func.coalesce() second paramter default value #21239 (#21240)
Signed-off-by: YoungLH <974840768@qq.com>
2025-07-01 11:31:14 +08:00
25de39d9c6 Feat: sync input variable names to main() function (#21667) 2025-07-01 10:57:07 +08:00
2455135eaa chore: translate i18n files (#21732)
Co-authored-by: douxc <7553076+douxc@users.noreply.github.com>
2025-07-01 10:53:59 +08:00
dffbdd140c fix: user cannot select 'Customer Service & Operations' category (#21733) 2025-07-01 10:48:11 +08:00
Han
69b6f6f5d2 Fixes issue 21157/20661 extra quote in agent node (#21674)
Co-authored-by: Wang Han <wanghan@zhejianglab.org>
2025-07-01 10:43:46 +08:00
6013d90426 Fix/ serveral bugs fixed in enterprise (#21729) 2025-07-01 10:42:11 +08:00
9ded6f6a40 [fix] #21678 User input of remote file link on the run page form causes conversation/message interface error (#21683)
Co-authored-by: 李强04 <liqiang04@gaotu.cn>
2025-07-01 10:40:39 +08:00
7c76458b18 fix: fix node valid detect (#21709) 2025-07-01 10:32:00 +08:00
eb9edf4908 fix: copy inspect variable value get extra quotes (#21680) 2025-06-30 22:14:29 +08:00
55a6b330ec Add get document detail service api (#21700)
Co-authored-by: lizb <lizb@sugon.com>
2025-06-30 22:13:56 +08:00
96d27d7087 fix: enter and exit full canvas cause nav items missing (#21691) 2025-06-30 22:00:35 +08:00
9588a64487 fix: keep search params in web app url when needs authorize (#21717) 2025-06-30 18:28:31 +08:00
18757d07c9 fix: #21427 correct segment settings when creating documents via API (#21673) 2025-06-29 14:49:32 +08:00
0f23e3d9ab fix(ui): no hover effect in copy button of code node (#21671) 2025-06-29 14:49:10 +08:00
765adabb32 feat: Add autofill by default value in endpoint plugin setting page. (#21669) 2025-06-29 13:09:21 +08:00
37e19de7ab feat(inner-api/workspace): include tenant details in CreateWorkspace response (#21636) 2025-06-27 18:28:03 +08:00
28f5c37211 Add Env 'CELERY_SENTINEL_PASSWORD' for celery connect redis sentinel. (#21198) 2025-06-27 17:37:11 +08:00
38a704743c chore: Add missing svg icon sources (#21627) 2025-06-27 16:56:22 +08:00
87efe45240 feat(plugin): Add API endpoint for invoking LLM with structured output (#21624) 2025-06-27 15:57:44 +08:00
7236c4895b fix: annotation remove functionality Fixes #21448 (#21616)
Co-authored-by: siqiuchen <siqiu.chen2@dentsplysirona.com>
2025-06-27 15:45:24 +08:00
0cb00d5fd2 refactor: move structured output support outside LLM Node (#21565)
Co-authored-by: Novice <novice12185727@gmail.com>
2025-06-27 14:55:31 +08:00
cdb9eecbaf fix: Resolving conflicts caused by tablestore dependency on enum34 (#21605)
Co-authored-by: xiaozhiqing.xzq <xiaozhiqing.xzq@alibaba-inc.com>
2025-06-27 14:17:52 +08:00
ae8653beb0 style: decrease navbar z-index value from 30 to 15, fix style error (#21612) 2025-06-27 13:22:29 +08:00
787ad5ab38 feat: Refactor panel component, add adaptive width observer to optimize panel width management (#21576) 2025-06-27 10:50:33 +08:00
81fc49d78c fix: value_selector will be empty string (#21598) 2025-06-27 10:38:22 +08:00
7d9d670f90 feat: to add tag when tag input is unfocus (#21555) 2025-06-27 10:36:01 +08:00
fd41645f95 feat: Add display control logic for the variable inspection panel (#21539) 2025-06-27 10:22:39 +08:00
a06af88b26 Feat/api validate model provider (#21582)
Co-authored-by: crazywoola <427733928@qq.com>
2025-06-27 09:59:44 +08:00
33f0457a23 fix: wrong token number when using qa_model and answer is updated. (#21574) 2025-06-27 09:11:41 +08:00
cea6522122 feat: add DYNAMIC_SELECT parameter type for dynamic options in parameter entities (#21425) 2025-06-26 17:44:14 +08:00
ae00ba44db fix: fix create custom modal overlay add tool (#21553) 2025-06-26 16:45:45 +08:00
79fa3c7519 fix(web): optimize the pop logic of the tool selector (#21558) (#21559) 2025-06-26 16:45:01 +08:00
d2814650e6 feat: prevent input of non-numeric values ​​in numer input (#21562) 2025-06-26 16:43:26 +08:00
17722f581b feat: add tooltip to workflow run node name (#21564) 2025-06-26 16:42:48 +08:00
ad9eebd02d fix: update retrieval method cache (#21409) 2025-06-26 10:58:21 +08:00
cefb8e4218 chore: Simplify code logic (#21496)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-06-26 10:09:52 +08:00
90aba77471 chore: remove unused code (#21497)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-06-26 10:08:17 +08:00
785d4b3de7 feat: refactor: test_dataset unit tests #21499 (#21502) 2025-06-26 10:07:54 +08:00
6bb82f8ee0 Fix minor comment missing (#21517) 2025-06-26 10:06:49 +08:00
45dc0a43d3 fix: prompt editor insert context (#21526) 2025-06-26 09:58:58 +08:00
1610f62a28 fix: var inspect doc link error (#21515) 2025-06-25 20:21:51 +08:00
d454f09e13 feat: add a magic field in the cancel invite api response (#21505) 2025-06-25 18:37:56 +08:00
00f0b569cc Feat/kb index (#20868)
Co-authored-by: twwu <twwu@dify.ai>
2025-06-25 17:52:59 +08:00
3acaa59885 fix(update_provider_when_message_created): Fix db transaction (#21503)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-06-25 17:23:45 +08:00
2d5cdbe79c chore(version): Bump to 1.5.0 (#21415)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-06-25 16:54:44 +08:00
9ad3553d7f fix: app description too long break ui (#21491) 2025-06-25 16:41:55 +08:00
8f15341f1e fix(event_handlers): DB dead lock (#21468)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-06-25 16:20:37 +08:00
31b0cff709 chore: update cover image (#21489) 2025-06-25 16:06:21 +08:00
1dd2607dfd feat(oauth): refactor proxy context (#21483) 2025-06-25 15:10:45 +08:00
164e5481c5 feat(oauth): plugin oauth service (#21480) 2025-06-25 14:14:30 +08:00
a098825fcc Update smtp.py (#21335) 2025-06-25 13:50:35 +08:00
0f5417ab84 feat(web): Contains sys.files in the default template. (#21476) 2025-06-25 13:06:07 +08:00
268da31332 fix(api): adding variable to variable pool recursively while loading draft variables. (#21478)
This PR fix the issue that `ObjectSegment` are not recursively added to the draft variable pool while loading draft variables from database. It also fixes an issue about loading variables with more than two elements in the its selector.

Enhances #19735.
Closes #21477.
2025-06-25 12:39:22 +08:00
94f8e48647 Refactor update dataset (fix #21401) (#21402)
Co-authored-by: Jyong <76649700+JohnJyong@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-06-25 11:44:35 +08:00
819c02f1f5 fix: prompt editor click to insert variable (#21470) 2025-06-25 11:34:58 +08:00
449e16782e fix: update DocumentList to include max height and overflow styles (#21466) 2025-06-25 11:17:31 +08:00
257809bb30 fix: in plugin config panel appselector can not list all apps() (#21457) 2025-06-25 11:14:46 +08:00
8a20134a5b fix: inputs.items is undefined (#21463) 2025-06-25 11:02:27 +08:00
27172b0898 fix (conf/code): Variables not correctly filled can still be referenced (#21451)
Co-authored-by: crazywoola <427733928@qq.com>
2025-06-25 10:27:36 +08:00
5b33d086c6 chore: translate i18n files (#21459)
Co-authored-by: laipz8200 <16485841+laipz8200@users.noreply.github.com>
2025-06-25 10:26:21 +08:00
c7ee0f2a93 fix(knowledge_base): Unchecked metadata name length (#21454)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-06-25 10:18:20 +08:00
8ea27bc341 feat: persist debug-and-preview panel width in localstorage (#21434) 2025-06-24 20:34:11 +08:00
c8ebad055c fix: text generation app log not show (#21436) 2025-06-24 20:33:10 +08:00
9de552cb42 fix: first message query error (#21444)
Co-authored-by: 刘江波 <jiangbo721@163.com>
2025-06-24 20:32:50 +08:00
501d3b6203 feat(api): Explicitly define version method for all BaseNode subclasses (#21443)
This PR addresses issue #21441 by implementing explicit `version` method definitions for all `BaseNode` subclasses to improve code maintainability.

### Changes

Added explicit `version` method definitions for all `BaseNode` subclasses:

- `QuestionClassifierNode`
- `KnowledgeRetrievalNode` 
- `AgentNode`

Added comprehensive test suite to validate:

1. All subclasses of `BaseNode` have explicitly defined `version` method
2. All subclasses have required `_node_type` property
3. The `(node_type, node_version)` combination is unique across all subclasses
2025-06-24 20:27:22 +08:00
f668d89621 fix: last run not export var and ts problem (#21424) 2025-06-24 16:48:33 +08:00
d60287621a add dataset info in response (#21413) 2025-06-24 16:07:31 +08:00
197c018c1f fix missing style of conversation variable panel default value textarea (#21422) 2025-06-24 15:58:39 +08:00
45146edb31 fix(document_extractor): xlsx file column int type error (#21408) 2025-06-24 13:42:13 +08:00
973b3854b4 add dataset info in response (#21406) 2025-06-24 11:13:56 +08:00
2467bd738a Improve App Icon Picker: Stable Modal Height & Collapsible Emoji Style Section (#21399) 2025-06-24 11:13:28 +08:00
27baf383dd feat: make RESPECT_XFORWARD_HEADERS_ENABLED configurable in Compose (#21248) 2025-06-24 10:00:51 +08:00
537b3b2b4a Fixes #21366: Add link to the one-click Dify deployment solution on Alibaba Cloud DMS (#21385) 2025-06-24 09:40:58 +08:00
e0de3b97ab Fixes #21357 Correct the wrong link destination (#quick-start) (#21373) 2025-06-24 09:28:12 +08:00
4a59360ad3 chore: translate i18n files (#21395)
Co-authored-by: QuantumGhost <2939865+QuantumGhost@users.noreply.github.com>
2025-06-24 09:25:02 +08:00
1a1bfd4048 feat: last run frontend (#21369)
The frontend of feat: Persist Variables for Enhanced Debugging Workflow (#20699).

Co-authored-by: jZonG <jzongcode@gmail.com>
2025-06-24 09:10:30 +08:00
10b738a296 feat: Persist Variables for Enhanced Debugging Workflow (#20699)
This pull request introduces a feature aimed at improving the debugging experience during workflow editing. With the addition of variable persistence, the system will automatically retain the output variables from previously executed nodes. These persisted variables can then be reused when debugging subsequent nodes, eliminating the need for repetitive manual input.

By streamlining this aspect of the workflow, the feature minimizes user errors and significantly reduces debugging effort, offering a smoother and more efficient experience.

Key highlights of this change:

- Automatic persistence of output variables for executed nodes.
- Reuse of persisted variables to simplify input steps for nodes requiring them (e.g., `code`, `template`, `variable_assigner`).
- Enhanced debugging experience with reduced friction.

Closes #19735.
2025-06-24 09:05:29 +08:00
3113350e51 fix(migrate/tools): Correct parameter name in tool_builtin_providers migration function (#21358) 2025-06-23 14:56:34 +08:00
15
3537088135 Fixes #21351 (#21354) 2025-06-23 14:01:51 +08:00
43f5b21852 feat: add config for max-tree-depth (#21291) 2025-06-23 13:55:57 +08:00
e56d7547f7 fix: web error (#21340) 2025-06-23 13:54:52 +08:00
ea68c92bbb fix segment display the default (#21317)
Co-authored-by: lizb <lizb@sugon.com>
2025-06-23 13:48:39 +08:00
b348455144 fix: Update segmentation type initialization to support parent-child mode based on document form (#21359) 2025-06-23 13:45:35 +08:00
f2f6e95880 fix: Update the logic for segment type settings to support parent-child mode selection. (#21278) 2025-06-23 10:36:30 +08:00
725a221315 fix: hide marketplace information(#21275) (#21276)
Co-authored-by: Xiaoba Yu <xb1823725853@gmail.com>
2025-06-23 10:23:43 +08:00
a0a89b562c Feature:Refactor batch update document status for #21324 (#21325) 2025-06-23 09:49:13 +08:00
Jin
3e7f8bad56 fix: markdown_extractor lost chunks if it starts without a header(#21308) (#21309) 2025-06-21 23:10:00 +08:00
d333aac84a refactor: env check with function (#21310) 2025-06-21 18:33:13 +08:00
3fefb34d44 fix: wrong translation (#21311) 2025-06-21 11:51:52 +08:00
870e73c03b Knowledge base API supports status updates #18147 (#18235) 2025-06-21 11:18:48 +08:00
ef20f694b2 Fixes #21203 Add a rapid deployment solution for deploying to Alibaba Cloud throug… (#21206)
Co-authored-by: herui.gh <>
2025-06-20 20:10:45 +08:00
57f7368a0e fix notion dataset rule not found (#21236) 2025-06-20 20:05:01 +08:00
fa70033438 style(dark): Adjust chat bubble background color (langgenius#20990) (#21264) 2025-06-20 20:04:39 +08:00
77be115f09 critical! insert_explore_app_list_api (#21277) 2025-06-20 20:03:30 +08:00
3f9ced5374 Revert "feat:conversation variable support file array" (#21273) 2025-06-20 19:57:28 +08:00
ba5eebf3a2 feat(mermaid): Rearchitect component for robustness, security, and theming (#21281) 2025-06-20 19:55:58 +08:00
186ac74c1f fix: update auto translations for days of the week(#21279) (#21287)
Co-authored-by: Xiaoba Yu <xb1823725853@gmail.com>
2025-06-20 19:54:43 +08:00
223448af18 Feat: support selecting model in auto generator (#21208) 2025-06-20 13:42:42 +08:00
d34795fc08 bug: fix minor exception msg missing (#21255) 2025-06-20 09:23:41 +08:00
40e8ad419b fix: not permitted schema of markdown link cause page crash (#21258) 2025-06-20 09:23:30 +08:00
6b1ad634f1 fix(workflow_run): sequence_number race. (#21228)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-06-19 17:53:49 +08:00
8f64327d57 feat: use default access mode when importing dsl (#21231) 2025-06-19 17:14:28 +08:00
1d41b642ca chore: remove redundant code (#21211) 2025-06-19 15:47:08 +08:00
cff039d123 fix: workflow import sync env and conversation variables (#21215) 2025-06-19 15:44:42 +08:00
0cfdb8c043 fix: fix load_balancing_config save error (#21213) 2025-06-19 15:41:36 +08:00
db20f9bb71 Fix/dataset page may redirect to apps if it still getting user info and workspace info (#21221) 2025-06-19 15:40:15 +08:00
2020a31785 fix(plugin/migrations) refactor data migration to use specific provider ID classes. (#21187) 2025-06-19 13:02:39 +08:00
2c04a16eaa Revert "bug: fix sequence number may be duplicated when multi-threads running the same workflow #21047" (#21207) 2025-06-19 12:05:44 +08:00
6325129761 fix wrongly remove reset nodes (#20880)
Co-authored-by: zhuqingchao <zhuqingchao@xiaomi.com>
2025-06-19 11:37:07 +08:00
9a18a98b58 fix keyword search top-k not initial (#21202) 2025-06-19 11:10:41 +08:00
7b9e01aa07 Feat/support sendgrid (#21011)
Co-authored-by: André de Matteo <andre.matteo@accenture.com>
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
2025-06-19 10:27:38 +08:00
2bb19f85c6 feat:conversation variable support file array (#21174)
Co-authored-by: kino.lu <kino.lu@vipshop.com>
2025-06-19 10:26:38 +08:00
17fe62cf91 feat: add support for Matrixone database (#20714) 2025-06-19 10:20:12 +08:00
e99861d4fe Add Filter of Get Workflow Logs (#21172)
Co-authored-by: lizb <lizb@sugon.com>
2025-06-19 10:10:16 +08:00
a205ee16b9 feat: improve the orgnize node operation (#21183) 2025-06-19 10:05:33 +08:00
9835730278 Translation fix (#21194) 2025-06-19 09:36:56 +08:00
8331b63baa add func args missing in apps chat. (#21085)
Signed-off-by: zhanluxianshen <zhanluxianshen@163.com>
2025-06-18 20:42:33 +08:00
2df4699312 fix(echarts): Resolve interaction issues on charts with timelines (#21185) 2025-06-18 20:22:33 +08:00
2eae7503e1 Minor Improvements for File Validation and Configuration Handling #21179 (#21171)
Co-authored-by: tech <cto@sb>
2025-06-18 18:33:28 +08:00
dbae5b0564 fix: workflow shortcuts (#21164) 2025-06-18 16:31:41 +08:00
30cfc9c172 Feat/plugin install scope management (#19963) 2025-06-18 16:25:00 +08:00
420a34a546 Fix: web app auth maybe failed (#21166) 2025-06-18 16:15:41 +08:00
918bb9a2f7 bug: fix sequence number may be duplicated when multi-threads running the same workflow #21047 (#21153) 2025-06-18 16:10:11 +08:00
99acdcdef7 chore: translate i18n files (#21163)
Co-authored-by: douxc <7553076+douxc@users.noreply.github.com>
2025-06-18 16:03:41 +08:00
59fdfc3728 fix: remove redundant PG_USER (#21162) 2025-06-18 16:03:32 +08:00
614c5e087e Feat: add check before install plugin (#20014) 2025-06-18 15:51:23 +08:00
ine
83719cab73 fix: add environment variable POSTGRES_USER (#20786) 2025-06-18 14:44:42 +08:00
9e73e8b9e8 feat: add search endpoint for Firecrawl Integration (#20521)
Co-authored-by: crazywoola <427733928@qq.com>
2025-06-18 14:37:03 +08:00
d4be356ffb fix(api): add support for "image" icon when duplicate app (#20744) (#20761) 2025-06-18 14:35:42 +08:00
47e0f92c0f Fixes #20748 KnowledgeRetrievalNode return all external documents when reranker disabled even top-k configed (#20762) 2025-06-18 14:35:12 +08:00
6d033d4064 clean duplicate validate for dataset_configs (#20775)
Signed-off-by: zhanluxianshen <zhanluxianshen@163.com>
2025-06-18 14:34:58 +08:00
ab290ed968 unreachable-code for lb model fetch. (#20797)
Signed-off-by: zhanluxianshen <zhanluxianshen@163.com>
2025-06-18 14:33:49 +08:00
879f839d75 refactor(graph_engine): Merge duplicated if block (#20784)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-06-18 14:33:29 +08:00
15800c6108 feat: Embedded chat window supports userVariables configuration. (#20983) 2025-06-18 14:27:02 +08:00
787a556bd7 add service api ratelimit check (#20878) 2025-06-18 14:05:28 +08:00
ea44b895e2 chore: cancel enforcing uppercase of the text of plugin navigation button on the header bar (#20890) 2025-06-18 14:02:45 +08:00
37f26c412f add healthcheck to oceanbase container (#20989) 2025-06-18 14:00:59 +08:00
1da8027445 feat: Support drop DSL file into the browser to create app (#20706)
Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-06-18 13:58:57 +08:00
ce3e2e5eb8 Set a default value for the PLUGIN_S3_USE_AWS environment variable in the dify-plugin-daemon. (#21152) 2025-06-18 12:29:14 +08:00
b69f952e3e fix(web): number type prompt variable required validation not effective (#20898) 2025-06-18 11:33:10 +08:00
0784c6295d fix Multiple <think>\n Interface rendering exception (#20977) 2025-06-18 11:31:04 +08:00
45c89bd6de feat: add pagenation to notion extractor (#20919) 2025-06-18 11:30:55 +08:00
8ac3bd1768 feat: Add support for hidden attributes to form item types (#20956) 2025-06-18 11:30:30 +08:00
945d1569ee fix(web): fix unique key issue (#20809) (#20810) 2025-06-18 10:04:18 +08:00
61526c027d [Bug] fix misusing ACCESS_TOKEN_EXPIRE_MINUTES in jwt on exp (#21030)
Co-authored-by: tech <cto@sb>
2025-06-18 09:37:49 +08:00
4689e8953e fix: shorten connection timeout to pypi.org for deprecation check for weaviate client (#21131) 2025-06-18 09:25:52 +08:00
4e701b1efd fix(web): enhance API test page experience by adding loading state for test button (#21091) 2025-06-18 09:24:41 +08:00
21c2de2d7e fix(code-editor): optimize the loading style of the CodeEditor component in dark mode (#21116) (#21120) 2025-06-17 17:49:44 +08:00
72a6cde828 chore: translate i18n files (#21053)
Co-authored-by: zxhlyh <16177003+zxhlyh@users.noreply.github.com>
2025-06-17 17:49:01 +08:00
0476937f55 fix(agent_node):Fix spelling errors. (#21094) 2025-06-17 17:48:43 +08:00
fc187d6998 chore: responsive header (#21115) 2025-06-17 17:37:06 +08:00
0dcacdf83d feat: add a flask_context_manager. (#21061)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-06-17 16:31:29 +08:00
7a2a8a2ffd chore: check input variable key of code/template node is valid (#21057) 2025-06-17 16:27:51 +08:00
2ddc3658a0 fix: remove the x overflow scroll bar of monitoring page (#21059) 2025-06-17 16:26:56 +08:00
6a5236b200 chore: cleanup wrong and unused doc links in i18 translations by appling docLink method usage (#21112) 2025-06-17 16:14:53 +08:00
6c0a91a64f fix: some dark theme display incorrect (#21055) 2025-06-17 16:11:57 +08:00
df6451076b fix: prevent nodes from being unintentionally deleted by pressing the backspace key. (#21023) 2025-06-17 16:11:30 +08:00
19339c3de1 fix: doc error (#21108) 2025-06-17 15:56:45 +08:00
c5acffb034 fix: page loop in datasets and apps if current user is dataset_operator (#21114) 2025-06-17 15:48:58 +08:00
110bb5e5a5 fix: auto redirect to login page if web app needs login (#21096) 2025-06-17 11:33:46 +08:00
d7663159e9 Fix/webapp loop login (#21092) 2025-06-17 10:45:03 +08:00
2440ac43b1 fix: Replace GenericProviderID with ToolProviderID (#21064)
Signed-off-by: -LAN- <laipz8200@outlook.com>
2025-06-16 17:35:46 +08:00
41cb39eb5d fix: redirect to apps page if current user has no permission to visit dataset page (#21065) 2025-06-16 16:39:05 +08:00
2324 changed files with 138691 additions and 27597 deletions

View File

@ -1,14 +1,15 @@
#!/bin/bash
npm add -g pnpm@10.11.1
npm add -g pnpm@10.13.1
cd web && pnpm install
pipx install uv
echo 'alias start-api="cd /workspaces/dify/api && uv run python -m flask run --host 0.0.0.0 --port=5001 --debug"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && uv run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion"' >> ~/.bashrc
echo 'alias start-worker="cd /workspaces/dify/api && uv run python -m celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage"' >> ~/.bashrc
echo 'alias start-web="cd /workspaces/dify/web && pnpm dev"' >> ~/.bashrc
echo 'alias start-web-prod="cd /workspaces/dify/web && pnpm build && pnpm start"' >> ~/.bashrc
echo 'alias start-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env up -d"' >> ~/.bashrc
echo 'alias stop-containers="cd /workspaces/dify/docker && docker-compose -f docker-compose.middleware.yaml -p dify --env-file middleware.env down"' >> ~/.bashrc
source /home/vscode/.bashrc

View File

@ -8,13 +8,15 @@ body:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
required: true
- label: This is only for bug report, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
required: true
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
required: true
- label: "[FOR CHINESE USERS] 请务必使用英提交 Issue,否则会被关闭。谢谢!:)"
- label: 【中文用户 & Non English User】请使用英提交,否则会被关闭
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
@ -42,20 +44,22 @@ body:
attributes:
label: Steps to reproduce
description: We highly suggest including screenshots and a bug report log. Please use the right markdown syntax for code blocks.
placeholder: Having detailed steps helps us reproduce the bug.
placeholder: Having detailed steps helps us reproduce the bug. If you have logs, please use fenced code blocks (triple backticks ```) to format them.
validations:
required: true
- type: textarea
attributes:
label: ✔️ Expected Behavior
placeholder: What were you expecting?
description: Describe what you expected to happen.
placeholder: What were you expecting? Please do not copy and paste the steps to reproduce here.
validations:
required: false
required: true
- type: textarea
attributes:
label: ❌ Actual Behavior
placeholder: What happened instead?
description: Describe what actually happened.
placeholder: What happened instead? Please do not copy and paste the steps to reproduce here.
validations:
required: false

View File

@ -1,5 +1,11 @@
blank_issues_enabled: false
contact_links:
- name: "\U0001F4A1 Model Providers & Plugins"
url: "https://github.com/langgenius/dify-official-plugins/issues/new/choose"
about: Report issues with official plugins or model providers, you will need to provide the plugin version and other relevant details.
- name: "\U0001F4AC Documentation Issues"
url: "https://github.com/langgenius/dify-docs/issues/new"
about: Report issues with the documentation, such as typos, outdated information, or missing content. Please provide the specific section and details of the issue.
- name: "\U0001F4E7 Discussions"
url: https://github.com/langgenius/dify/discussions/categories/general
about: General discussions and request help from the community
about: General discussions and seek help from the community

View File

@ -1,24 +0,0 @@
name: "📚 Documentation Issue"
description: Report issues in our documentation
labels:
- documentation
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:)"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
attributes:
label: Provide a description of requested docs changes
placeholder: Briefly describe which document needs to be corrected and why.
validations:
required: true

View File

@ -8,11 +8,11 @@ body:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
required: true
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:)"
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true

44
.github/ISSUE_TEMPLATE/refactor.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: "✨ Refactor"
description: Refactor existing code for improved readability and maintainability.
title: "[Chore/Refactor] "
labels:
- refactor
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have read the [Contributing Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) and [Language Policy](https://github.com/langgenius/dify/issues/1542).
required: true
- label: This is only for refactoring, if you would like to ask a question, please head to [Discussions](https://github.com/langgenius/dify/discussions/categories/general).
required: true
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report, otherwise it will be closed.
required: true
- label: 【中文用户 & Non English User】请使用英语提交否则会被关闭
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: textarea
id: description
attributes:
label: Description
placeholder: "Describe the refactor you are proposing."
validations:
required: true
- type: textarea
id: motivation
attributes:
label: Motivation
placeholder: "Explain why this refactor is necessary."
validations:
required: false
- type: textarea
id: additional-context
attributes:
label: Additional Context
placeholder: "Add any other context or screenshots about the request here."
validations:
required: false

View File

@ -1,55 +0,0 @@
name: "🌐 Localization/Translation issue"
description: Report incorrect translations. [please use English :)]
labels:
- translation
body:
- type: checkboxes
attributes:
label: Self Checks
description: "To make sure we get to you in time, please check the following :)"
options:
- label: I have searched for existing issues [search for existing issues](https://github.com/langgenius/dify/issues), including closed ones.
required: true
- label: I confirm that I am using English to submit this report (我已阅读并同意 [Language Policy](https://github.com/langgenius/dify/issues/1542)).
required: true
- label: "[FOR CHINESE USERS] 请务必使用英文提交 Issue否则会被关闭。谢谢:)"
required: true
- label: "Please do not modify this template :) and fill in all the required fields."
required: true
- type: input
attributes:
label: Dify version
description: Hover over system tray icon or look at Settings
validations:
required: true
- type: input
attributes:
label: Utility with translation issue
placeholder: Some area
description: Please input here the utility with the translation issue
validations:
required: true
- type: input
attributes:
label: 🌐 Language affected
placeholder: "German"
validations:
required: true
- type: textarea
attributes:
label: ❌ Actual phrase(s)
placeholder: What is there? Please include a screenshot as that is extremely helpful.
validations:
required: true
- type: textarea
attributes:
label: ✔️ Expected phrase(s)
placeholder: What was expected?
validations:
required: true
- type: textarea
attributes:
label: Why is the current translation wrong
placeholder: Why do you feel this is incorrect?
validations:
required: true

View File

@ -8,7 +8,7 @@ inputs:
uv-version:
description: UV version to set up
required: true
default: '~=0.7.11'
default: '0.8.9'
uv-lockfile:
description: Path to the UV lockfile to restore cache from
required: true
@ -26,7 +26,7 @@ runs:
python-version: ${{ inputs.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v5
uses: astral-sh/setup-uv@v6
with:
version: ${{ inputs.uv-version }}
python-version: ${{ inputs.python-version }}

View File

@ -47,15 +47,17 @@ jobs:
- name: Run Unit tests
run: |
uv run --project api bash dev/pytest/pytest_unit_tests.sh
- name: Coverage Summary
run: |
set -x
# Extract coverage percentage and create a summary
TOTAL_COVERAGE=$(python -c 'import json; print(json.load(open("coverage.json"))["totals"]["percent_covered_display"])')
# Create a detailed coverage summary
echo "### Test Coverage Summary :test_tube:" >> $GITHUB_STEP_SUMMARY
echo "Total Coverage: ${TOTAL_COVERAGE}%" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
uv run --project api coverage report >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY
uv run --project api coverage report --format=markdown >> $GITHUB_STEP_SUMMARY
- name: Run dify config tests
run: uv run --project api dev/pytest/pytest_config_tests.py
@ -83,11 +85,20 @@ jobs:
compose-file: |
docker/docker-compose.middleware.yaml
services: |
db
redis
sandbox
ssrf_proxy
- name: setup test config
run: |
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
- name: Run Workflow
run: uv run --project api bash dev/pytest/pytest_workflow.sh
- name: Run Tool
run: uv run --project api bash dev/pytest/pytest_tools.sh
- name: Run TestContainers
run: uv run --project api bash dev/pytest/pytest_testcontainers.sh

31
.github/workflows/autofix.yml vendored Normal file
View File

@ -0,0 +1,31 @@
name: autofix.ci
on:
workflow_call:
pull_request:
push:
branches: [ "main" ]
permissions:
contents: read
jobs:
autofix:
if: github.repository == 'langgenius/dify'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
# Use uv to ensure we have the same ruff version in CI and locally.
- uses: astral-sh/setup-uv@7edac99f961f18b581bbd960d59d049f04c0002f
- run: |
cd api
uv sync --dev
# Fix lint errors
uv run ruff check --fix-only .
# Format code
uv run ruff format .
- name: ast-grep
run: |
uvx --from ast-grep-cli sg --pattern 'db.session.query($WHATEVER).filter($HERE)' --rewrite 'db.session.query($WHATEVER).where($HERE)' -l py --update-all
- uses: autofix-ci/action@635ffb0c9798bd160680f18fd73371e355b85f27

View File

@ -6,6 +6,8 @@ on:
- "main"
- "deploy/dev"
- "deploy/enterprise"
- "build/**"
- "release/e-*"
tags:
- "*"

View File

@ -28,7 +28,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: |
api/**
@ -49,8 +49,8 @@ jobs:
if: steps.changed-files.outputs.any_changed == 'true'
run: |
uv run --directory api ruff --version
uv run --directory api ruff check --diff ./
uv run --directory api ruff format --check --diff ./
uv run --directory api ruff check ./
uv run --directory api ruff format --check ./
- name: Dotenv check
if: steps.changed-files.outputs.any_changed == 'true'
@ -75,14 +75,14 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: web/**
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
package_json_file: web/package.json
run_install: false
- name: Setup NodeJS
@ -95,10 +95,12 @@ jobs:
- name: Web dependencies
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: Web style check
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm run lint
docker-compose-template:
@ -113,7 +115,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: |
docker/generate_docker_compose
@ -144,7 +146,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: |
**.sh
@ -152,13 +154,15 @@ jobs:
**.yml
**Dockerfile
dev/**
.editorconfig
- name: Super-linter
uses: super-linter/super-linter/slim@v7
uses: super-linter/super-linter/slim@v8
if: steps.changed-files.outputs.any_changed == 'true'
env:
BASH_SEVERITY: warning
DEFAULT_BRANCH: main
DEFAULT_BRANCH: origin/main
EDITORCONFIG_FILE_NAME: editorconfig-checker.json
FILTER_REGEX_INCLUDE: pnpm-lock.yaml
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IGNORE_GENERATED_FILES: true
@ -168,16 +172,6 @@ jobs:
# FIXME: temporarily disabled until api-docker.yaml's run script is fixed for shellcheck
# VALIDATE_GITHUB_ACTIONS: true
VALIDATE_DOCKERFILE_HADOLINT: true
VALIDATE_EDITORCONFIG: true
VALIDATE_XML: true
VALIDATE_YAML: true
- name: EditorConfig checks
uses: super-linter/super-linter/slim@v7
env:
DEFAULT_BRANCH: main
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
IGNORE_GENERATED_FILES: true
IGNORE_GITIGNORED_FILES: true
# EditorConfig validation
VALIDATE_EDITORCONFIG: true
EDITORCONFIG_FILE_NAME: editorconfig-checker.json

View File

@ -1,13 +1,18 @@
name: Check i18n Files and Create PR
on:
pull_request:
types: [closed]
push:
branches: [main]
paths:
- 'web/i18n/en-US/*.ts'
permissions:
contents: write
pull-requests: write
jobs:
check-and-update:
if: github.event.pull_request.merged == true
if: github.repository == 'langgenius/dify'
runs-on: ubuntu-latest
defaults:
run:
@ -15,8 +20,8 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2 # last 2 commits
persist-credentials: false
fetch-depth: 2
token: ${{ secrets.GITHUB_TOKEN }}
- name: Check for file changes in i18n/en-US
id: check_files
@ -27,6 +32,13 @@ jobs:
echo "Changed files: $changed_files"
if [ -n "$changed_files" ]; then
echo "FILES_CHANGED=true" >> $GITHUB_ENV
file_args=""
for file in $changed_files; do
filename=$(basename "$file" .ts)
file_args="$file_args --file=$filename"
done
echo "FILE_ARGS=$file_args" >> $GITHUB_ENV
echo "File arguments: $file_args"
else
echo "FILES_CHANGED=false" >> $GITHUB_ENV
fi
@ -34,7 +46,7 @@ jobs:
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
version: 10
package_json_file: web/package.json
run_install: false
- name: Set up Node.js
@ -47,16 +59,19 @@ jobs:
- name: Install dependencies
if: env.FILES_CHANGED == 'true'
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: Run npm script
- name: Generate i18n translations
if: env.FILES_CHANGED == 'true'
run: pnpm run auto-gen-i18n
working-directory: ./web
run: pnpm run auto-gen-i18n ${{ env.FILE_ARGS }}
- name: Create Pull Request
if: env.FILES_CHANGED == 'true'
uses: peter-evans/create-pull-request@v6
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Update i18n files based on en-US changes
title: 'chore: translate i18n files'
body: This PR was automatically created to update i18n files based on changes in en-US locale.

View File

@ -84,10 +84,14 @@ jobs:
elasticsearch
oceanbase
- name: Check VDB Ready (TiDB, Oceanbase)
- name: setup test config
run: |
uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
uv run --project api python api/tests/integration_tests/vdb/oceanbase/check_oceanbase_ready.py
echo $(pwd)
ls -lah .
cp api/tests/integration_tests/.env.example api/tests/integration_tests/.env
- name: Check VDB Ready (TiDB)
run: uv run --project api python api/tests/integration_tests/vdb/tidb_vector/check_tiflash_ready.py
- name: Test Vector Stores
run: uv run --project api bash dev/pytest/pytest_vdb.sh

View File

@ -27,7 +27,7 @@ jobs:
- name: Check changed files
id: changed-files
uses: tj-actions/changed-files@v45
uses: tj-actions/changed-files@v46
with:
files: web/**
@ -35,7 +35,7 @@ jobs:
if: steps.changed-files.outputs.any_changed == 'true'
uses: pnpm/action-setup@v4
with:
version: 10
package_json_file: web/package.json
run_install: false
- name: Setup Node.js
@ -48,8 +48,10 @@ jobs:
- name: Install dependencies
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm install --frozen-lockfile
- name: Run tests
if: steps.changed-files.outputs.any_changed == 'true'
working-directory: ./web
run: pnpm test

8
.gitignore vendored
View File

@ -179,6 +179,7 @@ docker/volumes/pgvecto_rs/data/*
docker/volumes/couchbase/*
docker/volumes/oceanbase/*
docker/volumes/plugin_daemon/*
docker/volumes/matrixone/*
!docker/volumes/oceanbase/init.d
docker/nginx/conf.d/default.conf
@ -196,6 +197,8 @@ sdks/python-client/dify_client.egg-info
!.vscode/README.md
pyrightconfig.json
api/.vscode
# vscode Code History Extension
.history
.idea/
@ -210,3 +213,8 @@ mise.toml
# Next.js build output
.next/
# AI Assistant
.roo/
api/.env.backup
/clickzetta

83
CLAUDE.md Normal file
View File

@ -0,0 +1,83 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## Project Overview
Dify is an open-source platform for developing LLM applications with an intuitive interface combining agentic AI workflows, RAG pipelines, agent capabilities, and model management.
The codebase consists of:
- **Backend API** (`/api`): Python Flask application with Domain-Driven Design architecture
- **Frontend Web** (`/web`): Next.js 15 application with TypeScript and React 19
- **Docker deployment** (`/docker`): Containerized deployment configurations
## Development Commands
### Backend (API)
All Python commands must be prefixed with `uv run --project api`:
```bash
# Start development servers
./dev/start-api # Start API server
./dev/start-worker # Start Celery worker
# Run tests
uv run --project api pytest # Run all tests
uv run --project api pytest tests/unit_tests/ # Unit tests only
uv run --project api pytest tests/integration_tests/ # Integration tests
# Code quality
./dev/reformat # Run all formatters and linters
uv run --project api ruff check --fix ./ # Fix linting issues
uv run --project api ruff format ./ # Format code
uv run --project api mypy . # Type checking
```
### Frontend (Web)
```bash
cd web
pnpm lint # Run ESLint
pnpm eslint-fix # Fix ESLint issues
pnpm test # Run Jest tests
```
## Testing Guidelines
### Backend Testing
- Use `pytest` for all backend tests
- Write tests first (TDD approach)
- Test structure: Arrange-Act-Assert
## Code Style Requirements
### Python
- Use type hints for all functions and class attributes
- No `Any` types unless absolutely necessary
- Implement special methods (`__repr__`, `__str__`) appropriately
### TypeScript/JavaScript
- Strict TypeScript configuration
- ESLint with Prettier integration
- Avoid `any` type
## Important Notes
- **Environment Variables**: Always use UV for Python commands: `uv run --project api <command>`
- **Comments**: Only write meaningful comments that explain "why", not "what"
- **File Creation**: Always prefer editing existing files over creating new ones
- **Documentation**: Don't create documentation files unless explicitly requested
- **Code Quality**: Always run `./dev/reformat` before committing backend changes
## Common Development Tasks
### Adding a New API Endpoint
1. Create controller in `/api/controllers/`
2. Add service logic in `/api/services/`
3. Update routes in controller's `__init__.py`
4. Write tests in `/api/tests/`
## Project-Specific Conventions
- All async tasks use Celery with Redis as broker

View File

@ -54,7 +54,7 @@
<a href="./README_BN.md"><img alt="README in বাংলা" src="https://img.shields.io/badge/বাংলা-d9d9d9"></a>
</p>
Dify is an open-source LLM app development platform. Its intuitive interface combines agentic AI workflow, RAG pipeline, agent capabilities, model management, observability features, and more, allowing you to quickly move from prototype to production.
Dify is an open-source platform for developing LLM applications. Its intuitive interface combines agentic AI workflows, RAG pipelines, agent capabilities, model management, observability features, and moreallowing you to quickly move from prototype to production.
## Quick start
@ -65,7 +65,7 @@ Dify is an open-source LLM app development platform. Its intuitive interface com
</br>
The easiest way to start the Dify server is through [docker compose](docker/docker-compose.yaml). Before running Dify with the following commands, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
The easiest way to start the Dify server is through [Docker Compose](docker/docker-compose.yaml). Before running Dify with the following commands, make sure that [Docker](https://docs.docker.com/get-docker/) and [Docker Compose](https://docs.docker.com/compose/install/) are installed on your machine:
```bash
cd dify
@ -205,6 +205,7 @@ If you'd like to configure a highly-available setup, there are community-contrib
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 NEW! YAML files (Supports Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Using Terraform for Deployment
@ -224,14 +225,28 @@ Deploy Dify to AWS with [CDK](https://aws.amazon.com/cdk/)
##### AWS
- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Using Alibaba Cloud Computing Nest
Quickly deploy Dify to Alibaba cloud with [Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Using Alibaba Cloud Data Management
One-Click deploy Dify to Alibaba Cloud with [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### Deploy to AKS with Azure Devops Pipeline
One-Click deploy Dify to AKS with [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
## Contributing
For those who'd like to contribute code, see our [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
At the same time, please consider supporting Dify by sharing it on social media and at events and conferences.
> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
> We are looking for contributors to help translate Dify into languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
## Community & contact
@ -252,8 +267,8 @@ At the same time, please consider supporting Dify by sharing it on social media
## Security disclosure
To protect your privacy, please avoid posting security issues on GitHub. Instead, send your questions to security@dify.ai and we will provide you with a more detailed answer.
To protect your privacy, please avoid posting security issues on GitHub. Instead, report issues to security@dify.ai, and our team will respond with detailed answer.
## License
This repository is available under the [Dify Open Source License](LICENSE), which is essentially Apache 2.0 with a few additional restrictions.
This repository is licensed under the [Dify Open Source License](LICENSE), based on Apache 2.0 with additional conditions.

View File

@ -188,6 +188,7 @@ docker compose up -d
- [رسم بياني Helm من قبل @magicsong](https://github.com/magicsong/ai-charts)
- [ملف YAML من قبل @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [ملف YAML من قبل @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 جديد! ملفات YAML (تدعم Dify v1.6.0) بواسطة @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### استخدام Terraform للتوزيع
@ -207,14 +208,27 @@ docker compose up -d
##### AWS
- [AWS CDK بواسطة @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK بواسطة @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK بواسطة @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### استخدام Alibaba Cloud للنشر
[بسرعة نشر Dify إلى سحابة علي بابا مع عش الحوسبة السحابية علي بابا](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### استخدام Alibaba Cloud Data Management للنشر
انشر Dify على علي بابا كلاود بنقرة واحدة باستخدام [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### استخدام Azure Devops Pipeline للنشر على AKS
انشر Dify على AKS بنقرة واحدة باستخدام [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
## المساهمة
لأولئك الذين يرغبون في المساهمة، انظر إلى [دليل المساهمة](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) لدينا.
في الوقت نفسه، يرجى النظر في دعم Dify عن طريق مشاركته على وسائل التواصل الاجتماعي وفي الفعاليات والمؤتمرات.
> نحن نبحث عن مساهمين لمساعدة في ترجمة Dify إلى لغات أخرى غير اللغة الصينية المندرين أو الإنجليزية. إذا كنت مهتمًا بالمساعدة، يرجى الاطلاع على [README للترجمة](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) لمزيد من المعلومات، واترك لنا تعليقًا في قناة `global-users` على [خادم المجتمع على Discord](https://discord.gg/8Tpq4AcN9c).
> نحن نبحث عن مساهمين لمساعدة في ترجمة Dify إلى لغات أخرى غير اللغة الصينية المندرين أو الإنجليزية. إذا كنت مهتمًا بالمساعدة، يرجى الاطلاع على [README للترجمة](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) لمزيد من المعلومات، واترك لنا تعليقًا في قناة `global-users` على [خادم المجتمع على Discord](https://discord.gg/8Tpq4AcN9c).
**المساهمون**

View File

@ -204,6 +204,8 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 নতুন! YAML ফাইলসমূহ (Dify v1.6.0 সমর্থিত) তৈরি করেছেন @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### টেরাফর্ম ব্যবহার করে ডিপ্লয়
@ -223,14 +225,28 @@ GitHub-এ ডিফাইকে স্টার দিয়ে রাখুন
##### AWS
- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud ব্যবহার করে ডিপ্লয়
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management ব্যবহার করে ডিপ্লয়
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### AKS-এ ডিপ্লয় করার জন্য Azure Devops Pipeline ব্যবহার
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) ব্যবহার করে Dify কে AKS-এ এক ক্লিকে ডিপ্লয় করুন
## Contributing
যারা কোড অবদান রাখতে চান, তাদের জন্য আমাদের [অবদান নির্দেশিকা] দেখুন (https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)।
একই সাথে, সোশ্যাল মিডিয়া এবং ইভেন্ট এবং কনফারেন্সে এটি শেয়ার করে Dify কে সমর্থন করুন।
> আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন।
> আমরা ম্যান্ডারিন বা ইংরেজি ছাড়া অন্য ভাষায় Dify অনুবাদ করতে সাহায্য করার জন্য অবদানকারীদের খুঁজছি। আপনি যদি সাহায্য করতে আগ্রহী হন, তাহলে আরও তথ্যের জন্য [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) দেখুন এবং আমাদের [ডিসকর্ড কমিউনিটি সার্ভার](https://discord.gg/8Tpq4AcN9c) এর `গ্লোবাল-ইউজারস` চ্যানেলে আমাদের একটি মন্তব্য করুন।
## কমিউনিটি এবং যোগাযোগ

View File

@ -194,9 +194,9 @@ docker compose up -d
如果您需要自定义配置,请参考 [.env.example](docker/.env.example) 文件中的注释,并更新 `.env` 文件中对应的值。此外,您可能需要根据您的具体部署环境和需求对 `docker-compose.yaml` 文件本身进行调整,例如更改镜像版本、端口映射或卷挂载。完成任何更改后,请重新运行 `docker-compose up -d`。您可以在[此处](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用环境变量的完整列表。
#### 使用 Helm Chart 部署
#### 使用 Helm Chart 或 Kubernetes 资源清单YAML部署
使用 [Helm Chart](https://helm.sh/) 版本或者 YAML 文件,可以在 Kubernetes 上部署 Dify。
使用 [Helm Chart](https://helm.sh/) 版本或者 Kubernetes 资源清单(YAML,可以在 Kubernetes 上部署 Dify。
- [Helm Chart by @LeoQuote](https://github.com/douban/charts/tree/master/charts/dify)
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
@ -204,6 +204,10 @@ docker compose up -d
- [YAML 文件 by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 NEW! YAML 文件 (支持 Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### 使用 Terraform 部署
使用 [terraform](https://www.terraform.io/) 一键将 Dify 部署到云平台
@ -219,7 +223,20 @@ docker compose up -d
使用 [CDK](https://aws.amazon.com/cdk/) 将 Dify 部署到 AWS
##### AWS
- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### 使用 阿里云计算巢 部署
使用 [阿里云计算巢](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88) 将 Dify 一键部署到 阿里云
#### 使用 阿里云数据管理DMS 部署
使用 [阿里云数据管理DMS](https://help.aliyun.com/zh/dms/dify-in-invitational-preview) 将 Dify 一键部署到 阿里云
#### 使用 Azure Devops Pipeline 部署到AKS
使用[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) 将 Dify 一键部署到 AKS
## Star History
@ -231,7 +248,7 @@ docker compose up -d
对于那些想要贡献代码的人,请参阅我们的[贡献指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
同时,请考虑通过社交媒体、活动和会议来支持 Dify 的分享。
> 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。
> 我们正在寻找贡献者来帮助将 Dify 翻译成除了中文和英文之外的其他语言。如果您有兴趣帮助,请参阅我们的[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)获取更多信息,并在我们的[Discord 社区服务器](https://discord.gg/8Tpq4AcN9c)的`global-users`频道中留言。
**Contributors**

View File

@ -203,6 +203,7 @@ Falls Sie eine hochverfügbare Konfiguration einrichten möchten, gibt es von de
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 NEW! YAML files (Supports Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Terraform für die Bereitstellung verwenden
@ -219,14 +220,28 @@ Stellen Sie Dify mit nur einem Klick mithilfe von [terraform](https://www.terraf
Bereitstellung von Dify auf AWS mit [CDK](https://aws.amazon.com/cdk/)
##### AWS
- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
Ein-Klick-Bereitstellung von Dify in der Alibaba Cloud mit [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### Verwendung von Azure Devops Pipeline für AKS-Bereitstellung
Stellen Sie Dify mit einem Klick in AKS bereit, indem Sie [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) verwenden
## Contributing
Falls Sie Code beitragen möchten, lesen Sie bitte unseren [Contribution Guide](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md). Gleichzeitig bitten wir Sie, Dify zu unterstützen, indem Sie es in den sozialen Medien teilen und auf Veranstaltungen und Konferenzen präsentieren.
> Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c).
> Wir suchen Mitwirkende, die dabei helfen, Dify in weitere Sprachen zu übersetzen außer Mandarin oder Englisch. Wenn Sie Interesse an einer Mitarbeit haben, lesen Sie bitte die [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) für weitere Informationen und hinterlassen Sie einen Kommentar im `global-users`-Kanal unseres [Discord Community Servers](https://discord.gg/8Tpq4AcN9c).
## Gemeinschaft & Kontakt

View File

@ -203,6 +203,7 @@ Si desea configurar una configuración de alta disponibilidad, la comunidad prop
- [Gráfico Helm por @magicsong](https://github.com/magicsong/ai-charts)
- [Ficheros YAML por @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [Ficheros YAML por @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 ¡NUEVO! Archivos YAML (compatible con Dify v1.6.0) por @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Uso de Terraform para el despliegue
@ -219,7 +220,21 @@ Despliega Dify en una plataforma en la nube con un solo clic utilizando [terrafo
Despliegue Dify en AWS usando [CDK](https://aws.amazon.com/cdk/)
##### AWS
- [AWS CDK por @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK por @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK por @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
Despliega Dify en Alibaba Cloud con un solo clic con [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### Uso de Azure Devops Pipeline para implementar en AKS
Implementa Dify en AKS con un clic usando [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
## Contribuir
@ -227,7 +242,7 @@ Para aquellos que deseen contribuir con código, consulten nuestra [Guía de con
Al mismo tiempo, considera apoyar a Dify compartiéndolo en redes sociales y en eventos y conferencias.
> Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c).
> Estamos buscando colaboradores para ayudar con la traducción de Dify a idiomas que no sean el mandarín o el inglés. Si estás interesado en ayudar, consulta el [README de i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para obtener más información y déjanos un comentario en el canal `global-users` de nuestro [Servidor de Comunidad en Discord](https://discord.gg/8Tpq4AcN9c).
**Contribuidores**

View File

@ -201,6 +201,7 @@ Si vous souhaitez configurer une configuration haute disponibilité, la communau
- [Helm Chart par @magicsong](https://github.com/magicsong/ai-charts)
- [Fichier YAML par @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [Fichier YAML par @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 NOUVEAU ! Fichiers YAML (compatible avec Dify v1.6.0) par @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Utilisation de Terraform pour le déploiement
@ -217,7 +218,21 @@ Déployez Dify sur une plateforme cloud en un clic en utilisant [terraform](http
Déployez Dify sur AWS en utilisant [CDK](https://aws.amazon.com/cdk/)
##### AWS
- [AWS CDK par @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK par @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK par @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
Déployez Dify en un clic sur Alibaba Cloud avec [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### Utilisation d'Azure Devops Pipeline pour déployer sur AKS
Déployez Dify sur AKS en un clic en utilisant [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
## Contribuer
@ -225,7 +240,7 @@ Pour ceux qui souhaitent contribuer du code, consultez notre [Guide de contribut
Dans le même temps, veuillez envisager de soutenir Dify en le partageant sur les réseaux sociaux et lors d'événements et de conférences.
> Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c).
> Nous recherchons des contributeurs pour aider à traduire Dify dans des langues autres que le mandarin ou l'anglais. Si vous êtes intéressé à aider, veuillez consulter le [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) pour plus d'informations, et laissez-nous un commentaire dans le canal `global-users` de notre [Serveur communautaire Discord](https://discord.gg/8Tpq4AcN9c).
**Contributeurs**

View File

@ -155,7 +155,7 @@ DifyはオープンソースのLLMアプリケーション開発プラットフ
[こちら](https://dify.ai)のDify Cloudサービスを利用して、セットアップ不要で試すことができます。サンドボックスプランには、200回のGPT-4呼び出しが無料で含まれています。
- **Dify Community Editionのセルフホスティング</br>**
この[スタートガイド](#quick-start)を使用して、ローカル環境でDifyを簡単に実行できます。
この[スタートガイド](#クイックスタート)を使用して、ローカル環境でDifyを簡単に実行できます。
詳しくは[ドキュメント](https://docs.dify.ai)をご覧ください。
- **企業/組織向けのDify</br>**
@ -202,6 +202,7 @@ docker compose up -d
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 新着YAML ファイルDify v1.6.0 対応by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Terraformを使用したデプロイ
@ -218,7 +219,19 @@ docker compose up -d
[CDK](https://aws.amazon.com/cdk/) を使用して、DifyをAWSにデプロイします
##### AWS
- [@KevinZhaoによるAWS CDK](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [@KevinZhaoによるAWS CDK (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [@tmokmssによるAWS CDK (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) を利用して、DifyをAlibaba Cloudへワンクリックでデプロイできます
#### AKSへのデプロイにAzure Devops Pipelineを使用
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)を使用してDifyをAKSにワンクリックでデプロイ
## 貢献
@ -226,7 +239,7 @@ docker compose up -d
同時に、DifyをSNSやイベント、カンファレンスで共有してサポートしていただけると幸いです。
> Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。
> Difyを英語または中国語以外の言語に翻訳してくれる貢献者を募集しています。興味がある場合は、詳細については[i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)を参照してください。また、[Discordコミュニティサーバー](https://discord.gg/8Tpq4AcN9c)の`global-users`チャンネルにコメントを残してください。
**貢献者**

View File

@ -201,6 +201,7 @@ If you'd like to configure a highly-available setup, there are community-contrib
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 NEW! YAML files (Supports Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Terraform atorlugu pilersitsineq
@ -217,7 +218,21 @@ wa'logh nIqHom neH ghun deployment toy'wI' [terraform](https://www.terraform.io/
wa'logh nIqHom neH ghun deployment toy'wI' [CDK](https://aws.amazon.com/cdk/) lo'laH.
##### AWS
- [AWS CDK qachlot @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK qachlot @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK qachlot @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### AKS 'e' Deploy je Azure Devops Pipeline lo'laH
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) lo'laH Dify AKS 'e' wa'DIch click 'e' Deploy
## Contributing
@ -225,7 +240,7 @@ For those who'd like to contribute code, see our [Contribution Guide](https://gi
At the same time, please consider supporting Dify by sharing it on social media and at events and conferences.
> We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
> We are looking for contributors to help with translating Dify to languages other than Mandarin or English. If you are interested in helping, please see the [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) for more information, and leave us a comment in the `global-users` channel of our [Discord Community Server](https://discord.gg/8Tpq4AcN9c).
**Contributors**

View File

@ -195,6 +195,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
- [Helm Chart by @magicsong](https://github.com/magicsong/ai-charts)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 NEW! YAML files (Supports Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Terraform을 사용한 배포
@ -211,7 +212,21 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
[CDK](https://aws.amazon.com/cdk/)를 사용하여 AWS에 Dify 배포
##### AWS
- [KevinZhao의 AWS CDK](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [KevinZhao의 AWS CDK (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [tmokmss의 AWS CDK (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)를 통해 원클릭으로 Dify를 Alibaba Cloud에 배포할 수 있습니다
#### AKS에 배포하기 위해 Azure Devops Pipeline 사용
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)을 사용하여 Dify를 AKS에 원클릭으로 배포
## 기여
@ -219,7 +234,7 @@ Dify를 Kubernetes에 배포하고 프리미엄 스케일링 설정을 구성했
동시에 Dify를 소셜 미디어와 행사 및 컨퍼런스에 공유하여 지원하는 것을 고려해 주시기 바랍니다.
> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요.
> 우리는 Dify를 중국어나 영어 이외의 언어로 번역하는 데 도움을 줄 수 있는 기여자를 찾고 있습니다. 도움을 주고 싶으시다면 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md)에서 더 많은 정보를 확인하시고 [Discord 커뮤니티 서버](https://discord.gg/8Tpq4AcN9c)의 `global-users` 채널에 댓글을 남겨주세요.
**기여자**

View File

@ -200,6 +200,7 @@ Se deseja configurar uma instalação de alta disponibilidade, há [Helm Charts]
- [Helm Chart de @magicsong](https://github.com/magicsong/ai-charts)
- [Arquivo YAML por @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [Arquivo YAML por @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 NOVO! Arquivos YAML (Compatível com Dify v1.6.0) por @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Usando o Terraform para Implantação
@ -216,14 +217,28 @@ Implante o Dify na Plataforma Cloud com um único clique usando [terraform](http
Implante o Dify na AWS usando [CDK](https://aws.amazon.com/cdk/)
##### AWS
- [AWS CDK por @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK por @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK por @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
Implante o Dify na Alibaba Cloud com um clique usando o [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### Usando Azure Devops Pipeline para Implantar no AKS
Implante o Dify no AKS com um clique usando [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
## Contribuindo
Para aqueles que desejam contribuir com código, veja nosso [Guia de Contribuição](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md).
Ao mesmo tempo, considere apoiar o Dify compartilhando-o nas redes sociais e em eventos e conferências.
> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c).
> Estamos buscando contribuidores para ajudar na tradução do Dify para idiomas além de Mandarim e Inglês. Se você tiver interesse em ajudar, consulte o [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) para mais informações e deixe-nos um comentário no canal `global-users` em nosso [Servidor da Comunidade no Discord](https://discord.gg/8Tpq4AcN9c).
**Contribuidores**

View File

@ -201,6 +201,7 @@ Star Dify on GitHub and be instantly notified of new releases.
- [Helm Chart by @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [YAML file by @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [YAML file by @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 NEW! YAML files (Supports Dify v1.6.0) by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Uporaba Terraform za uvajanje
@ -217,7 +218,21 @@ namestite Dify v Cloud Platform z enim klikom z uporabo [terraform](https://www.
Uvedite Dify v AWS z uporabo [CDK](https://aws.amazon.com/cdk/)
##### AWS
- [AWS CDK by @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK by @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
Z enim klikom namestite Dify na Alibaba Cloud z [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### Uporaba Azure Devops Pipeline za uvajanje v AKS
Z enim klikom namestite Dify v AKS z uporabo [Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
## Prispevam

View File

@ -194,6 +194,7 @@ Yüksek kullanılabilirliğe sahip bir kurulum yapılandırmak isterseniz, Dify'
- [@BorisPolonsky tarafından Helm Chart](https://github.com/BorisPolonsky/dify-helm)
- [@Winson-030 tarafından YAML dosyası](https://github.com/Winson-030/dify-kubernetes)
- [@wyy-holding tarafından YAML dosyası](https://github.com/wyy-holding/dify-k8s)
- [🚀 YENİ! YAML dosyaları (Dify v1.6.0 destekli) @Zhoneym tarafından](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Dağıtım için Terraform Kullanımı
@ -210,14 +211,28 @@ Dify'ı bulut platformuna tek tıklamayla dağıtın [terraform](https://www.ter
[CDK](https://aws.amazon.com/cdk/) kullanarak Dify'ı AWS'ye dağıtın
##### AWS
- [AWS CDK tarafından @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK tarafından @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK tarafından @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
[Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/) kullanarak Dify'ı tek tıkla Alibaba Cloud'a dağıtın
#### AKS'ye Dağıtım için Azure Devops Pipeline Kullanımı
[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) kullanarak Dify'ı tek tıkla AKS'ye dağıtın
## Katkıda Bulunma
Kod katkısında bulunmak isteyenler için [Katkı Kılavuzumuza](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md) bakabilirsiniz.
Aynı zamanda, lütfen Dify'ı sosyal medyada, etkinliklerde ve konferanslarda paylaşarak desteklemeyi düşünün.
> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın.
> Dify'ı Mandarin veya İngilizce dışındaki dillere çevirmemize yardımcı olacak katkıda bulunanlara ihtiyacımız var. Yardımcı olmakla ilgileniyorsanız, lütfen daha fazla bilgi için [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) dosyasına bakın ve [Discord Topluluk Sunucumuzdaki](https://discord.gg/8Tpq4AcN9c) `global-users` kanalında bize bir yorum bırakın.
**Katkıda Bulunanlar**

View File

@ -197,12 +197,13 @@ Dify 的所有功能都提供相應的 API因此您可以輕鬆地將 Dify
如果您需要自定義配置,請參考我們的 [.env.example](docker/.env.example) 文件中的註釋,並在您的 `.env` 文件中更新相應的值。此外,根據您特定的部署環境和需求,您可能需要調整 `docker-compose.yaml` 文件本身,例如更改映像版本、端口映射或卷掛載。進行任何更改後,請重新運行 `docker-compose up -d`。您可以在[這裡](https://docs.dify.ai/getting-started/install-self-hosted/environments)找到可用環境變數的完整列表。
如果您想配置高可用性設置,社區貢獻的 [Helm Charts](https://helm.sh/) 和 YAML 文件允許在 Kubernetes 上部署 Dify。
如果您想配置高可用性設置,社區貢獻的 [Helm Charts](https://helm.sh/) 和 Kubernetes 資源清單(YAML允許在 Kubernetes 上部署 Dify。
- [由 @LeoQuote 提供的 Helm Chart](https://github.com/douban/charts/tree/master/charts/dify)
- [由 @BorisPolonsky 提供的 Helm Chart](https://github.com/BorisPolonsky/dify-helm)
- [由 @Winson-030 提供的 YAML 文件](https://github.com/Winson-030/dify-kubernetes)
- [由 @wyy-holding 提供的 YAML 文件](https://github.com/wyy-holding/dify-k8s)
- [🚀 NEW! YAML 檔案(支援 Dify v1.6.0by @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
### 使用 Terraform 進行部署
@ -222,14 +223,28 @@ Dify 的所有功能都提供相應的 API因此您可以輕鬆地將 Dify
### AWS
- [由 @KevinZhao 提供的 AWS CDK](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [由 @KevinZhao 提供的 AWS CDK (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [由 @tmokmss 提供的 AWS CDK (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### 使用 阿里云计算巢進行部署
[阿里云](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### 使用 阿里雲數據管理DMS 進行部署
透過 [阿里雲數據管理DMS](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/),一鍵將 Dify 部署至阿里雲
#### 使用 Azure Devops Pipeline 部署到AKS
使用[Azure Devops Pipeline Helm Chart by @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS) 將 Dify 一鍵部署到 AKS
## 貢獻
對於想要貢獻程式碼的開發者,請參閱我們的[貢獻指南](https://github.com/langgenius/dify/blob/main/CONTRIBUTING.md)。
同時,也請考慮透過在社群媒體和各種活動與會議上分享 Dify 來支持我們。
> 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。
> 我們正在尋找貢獻者協助將 Dify 翻譯成中文和英文以外的語言。如果您有興趣幫忙,請查看 [i18n README](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) 獲取更多資訊,並在我們的 [Discord 社群伺服器](https://discord.gg/8Tpq4AcN9c) 的 `global-users` 頻道留言給我們。
## 社群與聯絡方式

View File

@ -196,6 +196,7 @@ Nếu bạn muốn cấu hình một cài đặt có độ sẵn sàng cao, có
- [Helm Chart bởi @BorisPolonsky](https://github.com/BorisPolonsky/dify-helm)
- [Tệp YAML bởi @Winson-030](https://github.com/Winson-030/dify-kubernetes)
- [Tệp YAML bởi @wyy-holding](https://github.com/wyy-holding/dify-k8s)
- [🚀 MỚI! Tệp YAML (Hỗ trợ Dify v1.6.0) bởi @Zhoneym](https://github.com/Zhoneym/DifyAI-Kubernetes)
#### Sử dụng Terraform để Triển khai
@ -212,7 +213,22 @@ Triển khai Dify lên nền tảng đám mây với một cú nhấp chuột b
Triển khai Dify trên AWS bằng [CDK](https://aws.amazon.com/cdk/)
##### AWS
- [AWS CDK bởi @KevinZhao](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK bởi @KevinZhao (EKS based)](https://github.com/aws-samples/solution-for-deploying-dify-on-aws)
- [AWS CDK bởi @tmokmss (ECS based)](https://github.com/aws-samples/dify-self-hosted-on-aws)
#### Alibaba Cloud
[Alibaba Cloud Computing Nest](https://computenest.console.aliyun.com/service/instance/create/default?type=user&ServiceName=Dify%E7%A4%BE%E5%8C%BA%E7%89%88)
#### Alibaba Cloud Data Management
Triển khai Dify lên Alibaba Cloud chỉ với một cú nhấp chuột bằng [Alibaba Cloud Data Management](https://www.alibabacloud.com/help/en/dms/dify-in-invitational-preview/)
#### Sử dụng Azure Devops Pipeline để Triển khai lên AKS
Triển khai Dify lên AKS chỉ với một cú nhấp chuột bằng [Azure Devops Pipeline Helm Chart bởi @LeoZhang](https://github.com/Ruiruiz30/Dify-helm-chart-AKS)
## Đóng góp
@ -220,7 +236,7 @@ Triển khai Dify trên AWS bằng [CDK](https://aws.amazon.com/cdk/)
Đồng thời, vui lòng xem xét hỗ trợ Dify bằng cách chia sẻ nó trên mạng xã hội và tại các sự kiện và hội nghị.
> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi.
> Chúng tôi đang tìm kiếm người đóng góp để giúp dịch Dify sang các ngôn ngữ khác ngoài tiếng Trung hoặc tiếng Anh. Nếu bạn quan tâm đến việc giúp đỡ, vui lòng xem [README i18n](https://github.com/langgenius/dify/blob/main/web/i18n-config/README.md) để biết thêm thông tin và để lại bình luận cho chúng tôi trong kênh `global-users` của [Máy chủ Cộng đồng Discord](https://discord.gg/8Tpq4AcN9c) của chúng tôi.
**Người đóng góp**

View File

@ -4,18 +4,28 @@
# Alternatively you can set it with `SECRET_KEY` environment variable.
SECRET_KEY=
# Ensure UTF-8 encoding
LANG=en_US.UTF-8
LC_ALL=en_US.UTF-8
PYTHONIOENCODING=utf-8
# Console API base URL
CONSOLE_API_URL=http://127.0.0.1:5001
CONSOLE_WEB_URL=http://127.0.0.1:3000
CONSOLE_API_URL=http://localhost:5001
CONSOLE_WEB_URL=http://localhost:3000
# Service API base URL
SERVICE_API_URL=http://127.0.0.1:5001
SERVICE_API_URL=http://localhost:5001
# Web APP base URL
APP_WEB_URL=http://127.0.0.1:3000
APP_WEB_URL=http://localhost:3000
# Files URL
FILES_URL=http://127.0.0.1:5001
FILES_URL=http://localhost:5001
# INTERNAL_FILES_URL is used for plugin daemon communication within Docker network.
# Set this to the internal Docker service URL for proper plugin file access.
# Example: INTERNAL_FILES_URL=http://api:5001
INTERNAL_FILES_URL=http://127.0.0.1:5001
# The time in seconds after the signature is rejected
FILES_ACCESS_TIMEOUT=300
@ -32,6 +42,15 @@ REDIS_PORT=6379
REDIS_USERNAME=
REDIS_PASSWORD=difyai123456
REDIS_USE_SSL=false
# SSL configuration for Redis (when REDIS_USE_SSL=true)
REDIS_SSL_CERT_REQS=CERT_NONE
# Options: CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
REDIS_SSL_CA_CERTS=
# Path to CA certificate file for SSL verification
REDIS_SSL_CERTFILE=
# Path to client certificate file for SSL authentication
REDIS_SSL_KEYFILE=
# Path to client private key file for SSL authentication
REDIS_DB=0
# redis Sentinel configuration.
@ -49,7 +68,7 @@ REDIS_CLUSTERS_PASSWORD=
# celery configuration
CELERY_BROKER_URL=redis://:difyai123456@localhost:${REDIS_PORT}/1
CELERY_BACKEND=redis
# PostgreSQL database configuration
DB_USERNAME=postgres
DB_PASSWORD=difyai123456
@ -133,12 +152,14 @@ SUPABASE_API_KEY=your-access-key
SUPABASE_URL=your-server-url
# CORS configuration
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
WEB_API_CORS_ALLOW_ORIGINS=http://localhost:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://localhost:3000,*
# Vector database configuration
# support: weaviate, qdrant, milvus, myscale, relyt, pgvecto_rs, pgvector, pgvector, chroma, opensearch, tidb_vector, couchbase, vikingdb, upstash, lindorm, oceanbase, opengauss, tablestore
# Supported values are `weaviate`, `qdrant`, `milvus`, `myscale`, `relyt`, `pgvector`, `pgvecto-rs`, `chroma`, `opensearch`, `oracle`, `tencent`, `elasticsearch`, `elasticsearch-ja`, `analyticdb`, `couchbase`, `vikingdb`, `oceanbase`, `opengauss`, `tablestore`,`vastbase`,`tidb`,`tidb_on_qdrant`,`baidu`,`lindorm`,`huawei_cloud`,`upstash`, `matrixone`.
VECTOR_STORE=weaviate
# Prefix used to create collection name in vector database
VECTOR_INDEX_NAME_PREFIX=Vector_index
# Weaviate configuration
WEAVIATE_ENDPOINT=http://localhost:8080
@ -220,6 +241,7 @@ TABLESTORE_ENDPOINT=https://instance-name.cn-hangzhou.ots.aliyuncs.com
TABLESTORE_INSTANCE_NAME=instance-name
TABLESTORE_ACCESS_KEY_ID=xxx
TABLESTORE_ACCESS_KEY_SECRET=xxx
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE=false
# Tidb Vector configuration
TIDB_VECTOR_HOST=xxx.eu-central-1.xxx.aws.tidbcloud.com
@ -294,6 +316,13 @@ VIKINGDB_SCHEMA=http
VIKINGDB_CONNECTION_TIMEOUT=30
VIKINGDB_SOCKET_TIMEOUT=30
# Matrixone configration
MATRIXONE_HOST=127.0.0.1
MATRIXONE_PORT=6001
MATRIXONE_USER=dump
MATRIXONE_PASSWORD=111
MATRIXONE_DATABASE=dify
# Lindorm configuration
LINDORM_URL=http://ld-*******************-proxy-search-pub.lindorm.aliyuncs.com:30070
LINDORM_USERNAME=admin
@ -332,9 +361,11 @@ PROMPT_GENERATION_MAX_TOKENS=512
CODE_GENERATION_MAX_TOKENS=1024
PLUGIN_BASED_TOKEN_COUNTING_ENABLED=false
# Mail configuration, support: resend, smtp
# Mail configuration, support: resend, smtp, sendgrid
MAIL_TYPE=
# If using SendGrid, use the 'from' field for authentication if necessary.
MAIL_DEFAULT_SEND_FROM=no-reply <no-reply@dify.ai>
# resend configuration
RESEND_API_KEY=
RESEND_API_URL=https://api.resend.com
# smtp configuration
@ -344,7 +375,8 @@ SMTP_USERNAME=123
SMTP_PASSWORD=abc
SMTP_USE_TLS=true
SMTP_OPPORTUNISTIC_TLS=false
# Sendgid configuration
SENDGRID_API_KEY=
# Sentry configuration
SENTRY_DSN=
@ -434,6 +466,26 @@ MAX_VARIABLE_SIZE=204800
# hybrid: Save new data to object storage, read from both object storage and RDBMS
WORKFLOW_NODE_EXECUTION_STORAGE=rdbms
# Repository configuration
# Core workflow execution repository implementation
CORE_WORKFLOW_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository
# Core workflow node execution repository implementation
CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY=core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository
# API workflow node execution repository implementation
API_WORKFLOW_NODE_EXECUTION_REPOSITORY=repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository
# API workflow run repository implementation
API_WORKFLOW_RUN_REPOSITORY=repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository
# Workflow log cleanup configuration
# Enable automatic cleanup of workflow run logs to manage database size
WORKFLOW_LOG_CLEANUP_ENABLED=true
# Number of days to retain workflow run logs (default: 30 days)
WORKFLOW_LOG_RETENTION_DAYS=30
# Batch size for workflow log cleanup operations (default: 100)
WORKFLOW_LOG_CLEANUP_BATCH_SIZE=100
# App configuration
APP_MAX_EXECUTION_TIME=1200
APP_MAX_ACTIVE_REQUESTS=0
@ -441,6 +493,16 @@ APP_MAX_ACTIVE_REQUESTS=0
# Celery beat configuration
CELERY_BEAT_SCHEDULER_TIME=1
# Celery schedule tasks configuration
ENABLE_CLEAN_EMBEDDING_CACHE_TASK=false
ENABLE_CLEAN_UNUSED_DATASETS_TASK=false
ENABLE_CREATE_TIDB_SERVERLESS_TASK=false
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK=false
ENABLE_CLEAN_MESSAGES=false
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK=false
ENABLE_DATASETS_QUEUE_MONITOR=false
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK=true
# Position configuration
POSITION_TOOL_PINS=
POSITION_TOOL_INCLUDES=
@ -467,6 +529,8 @@ ENDPOINT_URL_TEMPLATE=http://localhost:5002/e/{hook_id}
# Reset password token expiry minutes
RESET_PASSWORD_TOKEN_EXPIRY_MINUTES=5
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES=5
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES=5
CREATE_TIDB_SERVICE_JOB_ENABLED=false
@ -477,6 +541,8 @@ LOGIN_LOCKOUT_DURATION=86400
# Enable OpenTelemetry
ENABLE_OTEL=false
OTLP_TRACE_ENDPOINT=
OTLP_METRIC_ENDPOINT=
OTLP_BASE_ENDPOINT=http://localhost:4318
OTLP_API_KEY=
OTEL_EXPORTER_OTLP_PROTOCOL=

View File

@ -1,6 +1,4 @@
exclude = [
"migrations/*",
]
exclude = ["migrations/*"]
line-length = 120
[format]
@ -9,14 +7,14 @@ quote-style = "double"
[lint]
preview = false
select = [
"B", # flake8-bugbear rules
"C4", # flake8-comprehensions
"E", # pycodestyle E rules
"F", # pyflakes rules
"FURB", # refurb rules
"I", # isort rules
"N", # pep8-naming
"PT", # flake8-pytest-style rules
"B", # flake8-bugbear rules
"C4", # flake8-comprehensions
"E", # pycodestyle E rules
"F", # pyflakes rules
"FURB", # refurb rules
"I", # isort rules
"N", # pep8-naming
"PT", # flake8-pytest-style rules
"PLC0208", # iteration-over-set
"PLC0414", # useless-import-alias
"PLE0604", # invalid-all-object
@ -24,19 +22,19 @@ select = [
"PLR0402", # manual-from-import
"PLR1711", # useless-return
"PLR1714", # repeated-equality-comparison
"RUF013", # implicit-optional
"RUF019", # unnecessary-key-check
"RUF100", # unused-noqa
"RUF101", # redirected-noqa
"RUF200", # invalid-pyproject-toml
"RUF022", # unsorted-dunder-all
"S506", # unsafe-yaml-load
"SIM", # flake8-simplify rules
"TRY400", # error-instead-of-exception
"TRY401", # verbose-log-message
"UP", # pyupgrade rules
"W191", # tab-indentation
"W605", # invalid-escape-sequence
"RUF013", # implicit-optional
"RUF019", # unnecessary-key-check
"RUF100", # unused-noqa
"RUF101", # redirected-noqa
"RUF200", # invalid-pyproject-toml
"RUF022", # unsorted-dunder-all
"S506", # unsafe-yaml-load
"SIM", # flake8-simplify rules
"TRY400", # error-instead-of-exception
"TRY401", # verbose-log-message
"UP", # pyupgrade rules
"W191", # tab-indentation
"W605", # invalid-escape-sequence
# security related linting rules
# RCE proctection (sort of)
"S102", # exec-builtin, disallow use of `exec`
@ -44,39 +42,42 @@ select = [
"S301", # suspicious-pickle-usage, disallow use of `pickle` and its wrappers.
"S302", # suspicious-marshal-usage, disallow use of `marshal` module
"S311", # suspicious-non-cryptographic-random-usage
"G001", # don't use str format to logging messages
"G004", # don't use f-strings to format logging messages
]
ignore = [
"E402", # module-import-not-at-top-of-file
"E711", # none-comparison
"E712", # true-false-comparison
"E721", # type-comparison
"E722", # bare-except
"F821", # undefined-name
"F841", # unused-variable
"E402", # module-import-not-at-top-of-file
"E711", # none-comparison
"E712", # true-false-comparison
"E721", # type-comparison
"E722", # bare-except
"F821", # undefined-name
"F841", # unused-variable
"FURB113", # repeated-append
"FURB152", # math-constant
"UP007", # non-pep604-annotation
"UP032", # f-string
"UP045", # non-pep604-annotation-optional
"B005", # strip-with-multi-characters
"B006", # mutable-argument-default
"B007", # unused-loop-control-variable
"B026", # star-arg-unpacking-after-keyword-arg
"B903", # class-as-data-structure
"B904", # raise-without-from-inside-except
"B905", # zip-without-explicit-strict
"N806", # non-lowercase-variable-in-function
"N815", # mixed-case-variable-in-class-scope
"PT011", # pytest-raises-too-broad
"SIM102", # collapsible-if
"SIM103", # needless-bool
"SIM105", # suppressible-exception
"SIM107", # return-in-try-except-finally
"SIM108", # if-else-block-instead-of-if-exp
"SIM113", # enumerate-for-loop
"SIM117", # multiple-with-statements
"SIM210", # if-expr-with-true-false
"UP007", # non-pep604-annotation
"UP032", # f-string
"UP045", # non-pep604-annotation-optional
"B005", # strip-with-multi-characters
"B006", # mutable-argument-default
"B007", # unused-loop-control-variable
"B026", # star-arg-unpacking-after-keyword-arg
"B903", # class-as-data-structure
"B904", # raise-without-from-inside-except
"B905", # zip-without-explicit-strict
"N806", # non-lowercase-variable-in-function
"N815", # mixed-case-variable-in-class-scope
"PT011", # pytest-raises-too-broad
"SIM102", # collapsible-if
"SIM103", # needless-bool
"SIM105", # suppressible-exception
"SIM107", # return-in-try-except-finally
"SIM108", # if-else-block-instead-of-if-exp
"SIM113", # enumerate-for-loop
"SIM117", # multiple-with-statements
"SIM210", # if-expr-with-true-false
"UP038", # deprecated and not recommended by Ruff, https://docs.astral.sh/ruff/rules/non-pep604-isinstance/
]
[lint.per-file-ignores]

View File

@ -4,7 +4,7 @@ FROM python:3.12-slim-bookworm AS base
WORKDIR /app/api
# Install uv
ENV UV_VERSION=0.7.11
ENV UV_VERSION=0.8.9
RUN pip install --no-cache-dir uv==${UV_VERSION}
@ -19,7 +19,7 @@ RUN apt-get update \
# Install Python dependencies
COPY pyproject.toml uv.lock ./
RUN uv sync --locked
RUN uv sync --locked --no-dev
# production stage
FROM base AS production
@ -37,6 +37,11 @@ EXPOSE 5001
# set timezone
ENV TZ=UTC
# Set UTF-8 locale
ENV LANG=en_US.UTF-8
ENV LC_ALL=en_US.UTF-8
ENV PYTHONIOENCODING=utf-8
WORKDIR /app/api
RUN \
@ -47,6 +52,8 @@ RUN \
curl nodejs libgmp-dev libmpfr-dev libmpc-dev \
# For Security
expat libldap-2.5-0 perl libsqlite3-0 zlib1g \
# install fonts to support the use of tools like pypdfium2
fonts-noto-cjk \
# install a package to improve the accuracy of guessing mime type and file extension
media-types \
# install libmagic to support the use of python-magic guess MIMETYPE

View File

@ -74,7 +74,12 @@
10. If you need to handle and debug the async tasks (e.g. dataset importing and documents indexing), please start the worker service.
```bash
uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion
uv run celery -A app.celery worker -P gevent -c 1 --loglevel INFO -Q dataset,generation,mail,ops_trace,app_deletion,plugin,workflow_storage
```
Addition, if you want to debug the celery scheduled tasks, you can use the following command in another terminal:
```bash
uv run celery -A app.celery beat
```
## Testing

View File

@ -32,7 +32,7 @@ def create_app() -> DifyApp:
initialize_extensions(app)
end_time = time.perf_counter()
if dify_config.DEBUG:
logging.info(f"Finished create_app ({round((end_time - start_time) * 1000, 2)} ms)")
logging.info("Finished create_app (%s ms)", round((end_time - start_time) * 1000, 2))
return app
@ -51,6 +51,7 @@ def initialize_extensions(app: DifyApp):
ext_login,
ext_mail,
ext_migrate,
ext_orjson,
ext_otel,
ext_proxy_fix,
ext_redis,
@ -67,6 +68,7 @@ def initialize_extensions(app: DifyApp):
ext_logging,
ext_warnings,
ext_import_modules,
ext_orjson,
ext_set_secretkey,
ext_compress,
ext_code_based_extension,
@ -91,14 +93,14 @@ def initialize_extensions(app: DifyApp):
is_enabled = ext.is_enabled() if hasattr(ext, "is_enabled") else True
if not is_enabled:
if dify_config.DEBUG:
logging.info(f"Skipped {short_name}")
logging.info("Skipped %s", short_name)
continue
start_time = time.perf_counter()
ext.init_app(app)
end_time = time.perf_counter()
if dify_config.DEBUG:
logging.info(f"Loaded {short_name} ({round((end_time - start_time) * 1000, 2)} ms)")
logging.info("Loaded %s (%s ms)", short_name, round((end_time - start_time) * 1000, 2))
def create_migrations_app():

View File

@ -2,19 +2,23 @@ import base64
import json
import logging
import secrets
from typing import Optional
from typing import Any, Optional
import click
import sqlalchemy as sa
from flask import current_app
from pydantic import TypeAdapter
from sqlalchemy import select
from werkzeug.exceptions import NotFound
from sqlalchemy.exc import SQLAlchemyError
from configs import dify_config
from constants.languages import languages
from core.plugin.entities.plugin import ToolProviderID
from core.rag.datasource.vdb.vector_factory import Vector
from core.rag.datasource.vdb.vector_type import VectorType
from core.rag.index_processor.constant.built_in_field import BuiltInField
from core.rag.models.document import Document
from core.tools.utils.system_oauth_encryption import encrypt_system_oauth_params
from events.app_event import app_was_created
from extensions.ext_database import db
from extensions.ext_redis import redis_client
@ -27,10 +31,12 @@ from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, D
from models.dataset import Document as DatasetDocument
from models.model import Account, App, AppAnnotationSetting, AppMode, Conversation, MessageAnnotation
from models.provider import Provider, ProviderModel
from models.tools import ToolOAuthSystemClient
from services.account_service import AccountService, RegisterService, TenantService
from services.clear_free_plan_tenant_expired_logs import ClearFreePlanTenantExpiredLogs
from services.plugin.data_migration import PluginDataMigration
from services.plugin.plugin_migration import PluginMigration
from tasks.remove_app_and_related_data_task import delete_draft_variables_batch
@click.command("reset-password", help="Reset the account password.")
@ -46,16 +52,16 @@ def reset_password(email, new_password, password_confirm):
click.echo(click.style("Passwords do not match.", fg="red"))
return
account = db.session.query(Account).filter(Account.email == email).one_or_none()
account = db.session.query(Account).where(Account.email == email).one_or_none()
if not account:
click.echo(click.style("Account not found for email: {}".format(email), fg="red"))
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
return
try:
valid_password(new_password)
except:
click.echo(click.style("Invalid password. Must match {}".format(password_pattern), fg="red"))
click.echo(click.style(f"Invalid password. Must match {password_pattern}", fg="red"))
return
# generate password salt
@ -85,16 +91,16 @@ def reset_email(email, new_email, email_confirm):
click.echo(click.style("New emails do not match.", fg="red"))
return
account = db.session.query(Account).filter(Account.email == email).one_or_none()
account = db.session.query(Account).where(Account.email == email).one_or_none()
if not account:
click.echo(click.style("Account not found for email: {}".format(email), fg="red"))
click.echo(click.style(f"Account not found for email: {email}", fg="red"))
return
try:
email_validate(new_email)
except:
click.echo(click.style("Invalid email: {}".format(new_email), fg="red"))
click.echo(click.style(f"Invalid email: {new_email}", fg="red"))
return
account.email = new_email
@ -132,13 +138,13 @@ def reset_encrypt_key_pair():
tenant.encrypt_public_key = generate_key_pair(tenant.id)
db.session.query(Provider).filter(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
db.session.query(ProviderModel).filter(ProviderModel.tenant_id == tenant.id).delete()
db.session.query(Provider).where(Provider.provider_type == "custom", Provider.tenant_id == tenant.id).delete()
db.session.query(ProviderModel).where(ProviderModel.tenant_id == tenant.id).delete()
db.session.commit()
click.echo(
click.style(
"Congratulations! The asymmetric key pair of workspace {} has been reset.".format(tenant.id),
f"Congratulations! The asymmetric key pair of workspace {tenant.id} has been reset.",
fg="green",
)
)
@ -168,7 +174,7 @@ def migrate_annotation_vector_database():
per_page = 50
apps = (
db.session.query(App)
.filter(App.status == "normal")
.where(App.status == "normal")
.order_by(App.created_at.desc())
.limit(per_page)
.offset((page - 1) * per_page)
@ -176,8 +182,8 @@ def migrate_annotation_vector_database():
)
if not apps:
break
except NotFound:
break
except SQLAlchemyError:
raise
page += 1
for app in apps:
@ -186,25 +192,25 @@ def migrate_annotation_vector_database():
f"Processing the {total_count} app {app.id}. " + f"{create_count} created, {skipped_count} skipped."
)
try:
click.echo("Creating app annotation index: {}".format(app.id))
click.echo(f"Creating app annotation index: {app.id}")
app_annotation_setting = (
db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app.id).first()
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == app.id).first()
)
if not app_annotation_setting:
skipped_count = skipped_count + 1
click.echo("App annotation setting disabled: {}".format(app.id))
click.echo(f"App annotation setting disabled: {app.id}")
continue
# get dataset_collection_binding info
dataset_collection_binding = (
db.session.query(DatasetCollectionBinding)
.filter(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
.where(DatasetCollectionBinding.id == app_annotation_setting.collection_binding_id)
.first()
)
if not dataset_collection_binding:
click.echo("App annotation collection binding not found: {}".format(app.id))
click.echo(f"App annotation collection binding not found: {app.id}")
continue
annotations = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app.id).all()
annotations = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app.id).all()
dataset = Dataset(
id=app.id,
tenant_id=app.tenant_id,
@ -248,9 +254,7 @@ def migrate_annotation_vector_database():
create_count += 1
except Exception as e:
click.echo(
click.style(
"Error creating app annotation index: {} {}".format(e.__class__.__name__, str(e)), fg="red"
)
click.style(f"Error creating app annotation index: {e.__class__.__name__} {str(e)}", fg="red")
)
continue
@ -281,6 +285,7 @@ def migrate_knowledge_vector_database():
VectorType.ELASTICSEARCH,
VectorType.OPENGAUSS,
VectorType.TABLESTORE,
VectorType.MATRIXONE,
}
lower_collection_vector_types = {
VectorType.ANALYTICDB,
@ -300,12 +305,12 @@ def migrate_knowledge_vector_database():
while True:
try:
stmt = (
select(Dataset).filter(Dataset.indexing_technique == "high_quality").order_by(Dataset.created_at.desc())
select(Dataset).where(Dataset.indexing_technique == "high_quality").order_by(Dataset.created_at.desc())
)
datasets = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
except NotFound:
break
except SQLAlchemyError:
raise
page += 1
for dataset in datasets:
@ -314,7 +319,7 @@ def migrate_knowledge_vector_database():
f"Processing the {total_count} dataset {dataset.id}. {create_count} created, {skipped_count} skipped."
)
try:
click.echo("Creating dataset vector database index: {}".format(dataset.id))
click.echo(f"Creating dataset vector database index: {dataset.id}")
if dataset.index_struct_dict:
if dataset.index_struct_dict["type"] == vector_type:
skipped_count = skipped_count + 1
@ -327,7 +332,7 @@ def migrate_knowledge_vector_database():
if dataset.collection_binding_id:
dataset_collection_binding = (
db.session.query(DatasetCollectionBinding)
.filter(DatasetCollectionBinding.id == dataset.collection_binding_id)
.where(DatasetCollectionBinding.id == dataset.collection_binding_id)
.one_or_none()
)
if dataset_collection_binding:
@ -362,7 +367,7 @@ def migrate_knowledge_vector_database():
dataset_documents = (
db.session.query(DatasetDocument)
.filter(
.where(
DatasetDocument.dataset_id == dataset.id,
DatasetDocument.indexing_status == "completed",
DatasetDocument.enabled == True,
@ -376,7 +381,7 @@ def migrate_knowledge_vector_database():
for dataset_document in dataset_documents:
segments = (
db.session.query(DocumentSegment)
.filter(
.where(
DocumentSegment.document_id == dataset_document.id,
DocumentSegment.status == "completed",
DocumentSegment.enabled == True,
@ -418,9 +423,7 @@ def migrate_knowledge_vector_database():
create_count += 1
except Exception as e:
db.session.rollback()
click.echo(
click.style("Error creating dataset index: {} {}".format(e.__class__.__name__, str(e)), fg="red")
)
click.echo(click.style(f"Error creating dataset index: {e.__class__.__name__} {str(e)}", fg="red"))
continue
click.echo(
@ -456,14 +459,14 @@ def convert_to_agent_apps():
"""
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query))
rs = conn.execute(sa.text(sql_query))
apps = []
for i in rs:
app_id = str(i.id)
if app_id not in proceeded_app_ids:
proceeded_app_ids.append(app_id)
app = db.session.query(App).filter(App.id == app_id).first()
app = db.session.query(App).where(App.id == app_id).first()
if app is not None:
apps.append(app)
@ -471,23 +474,23 @@ def convert_to_agent_apps():
break
for app in apps:
click.echo("Converting app: {}".format(app.id))
click.echo(f"Converting app: {app.id}")
try:
app.mode = AppMode.AGENT_CHAT.value
db.session.commit()
# update conversation mode to agent
db.session.query(Conversation).filter(Conversation.app_id == app.id).update(
db.session.query(Conversation).where(Conversation.app_id == app.id).update(
{Conversation.mode: AppMode.AGENT_CHAT.value}
)
db.session.commit()
click.echo(click.style("Converted app: {}".format(app.id), fg="green"))
click.echo(click.style(f"Converted app: {app.id}", fg="green"))
except Exception as e:
click.echo(click.style("Convert app error: {} {}".format(e.__class__.__name__, str(e)), fg="red"))
click.echo(click.style(f"Convert app error: {e.__class__.__name__} {str(e)}", fg="red"))
click.echo(click.style("Conversion complete. Converted {} agent apps.".format(len(proceeded_app_ids)), fg="green"))
click.echo(click.style(f"Conversion complete. Converted {len(proceeded_app_ids)} agent apps.", fg="green"))
@click.command("add-qdrant-index", help="Add Qdrant index.")
@ -555,12 +558,12 @@ def old_metadata_migration():
try:
stmt = (
select(DatasetDocument)
.filter(DatasetDocument.doc_metadata.is_not(None))
.where(DatasetDocument.doc_metadata.is_not(None))
.order_by(DatasetDocument.created_at.desc())
)
documents = db.paginate(select=stmt, page=page, per_page=50, max_per_page=50, error_out=False)
except NotFound:
break
except SQLAlchemyError:
raise
if not documents:
break
for document in documents:
@ -573,7 +576,7 @@ def old_metadata_migration():
else:
dataset_metadata = (
db.session.query(DatasetMetadata)
.filter(DatasetMetadata.dataset_id == document.dataset_id, DatasetMetadata.name == key)
.where(DatasetMetadata.dataset_id == document.dataset_id, DatasetMetadata.name == key)
.first()
)
if not dataset_metadata:
@ -597,7 +600,7 @@ def old_metadata_migration():
else:
dataset_metadata_binding = (
db.session.query(DatasetMetadataBinding) # type: ignore
.filter(
.where(
DatasetMetadataBinding.dataset_id == document.dataset_id,
DatasetMetadataBinding.document_id == document.id,
DatasetMetadataBinding.metadata_id == dataset_metadata.id,
@ -660,7 +663,7 @@ def create_tenant(email: str, language: Optional[str] = None, name: Optional[str
click.echo(
click.style(
"Account and tenant created.\nAccount: {}\nPassword: {}".format(email, new_password),
f"Account and tenant created.\nAccount: {email}\nPassword: {new_password}",
fg="green",
)
)
@ -701,7 +704,7 @@ def fix_app_site_missing():
sql = """select apps.id as id from apps left join sites on sites.app_id=apps.id
where sites.id is null limit 1000"""
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql))
rs = conn.execute(sa.text(sql))
processed_count = 0
for i in rs:
@ -712,7 +715,7 @@ where sites.id is null limit 1000"""
continue
try:
app = db.session.query(App).filter(App.id == app_id).first()
app = db.session.query(App).where(App.id == app_id).first()
if not app:
print(f"App {app_id} not found")
continue
@ -721,16 +724,16 @@ where sites.id is null limit 1000"""
if tenant:
accounts = tenant.get_accounts()
if not accounts:
print("Fix failed for app {}".format(app.id))
print(f"Fix failed for app {app.id}")
continue
account = accounts[0]
print("Fixing missing site for app {}".format(app.id))
print(f"Fixing missing site for app {app.id}")
app_was_created.send(app, account=account)
except Exception:
failed_app_ids.append(app_id)
click.echo(click.style("Failed to fix missing site for app {}".format(app_id), fg="red"))
logging.exception(f"Failed to fix app related site missing issue, app_id: {app_id}")
click.echo(click.style(f"Failed to fix missing site for app {app_id}", fg="red"))
logging.exception("Failed to fix app related site missing issue, app_id: %s", app_id)
continue
if not processed_count:
@ -915,7 +918,7 @@ def clear_orphaned_file_records(force: bool):
)
orphaned_message_files = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
rs = conn.execute(sa.text(query))
for i in rs:
orphaned_message_files.append({"id": str(i[0]), "message_id": str(i[1])})
@ -936,7 +939,7 @@ def clear_orphaned_file_records(force: bool):
click.echo(click.style("- Deleting orphaned message_files records", fg="white"))
query = "DELETE FROM message_files WHERE id IN :ids"
with db.engine.begin() as conn:
conn.execute(db.text(query), {"ids": tuple([record["id"] for record in orphaned_message_files])})
conn.execute(sa.text(query), {"ids": tuple([record["id"] for record in orphaned_message_files])})
click.echo(
click.style(f"Removed {len(orphaned_message_files)} orphaned message_files records.", fg="green")
)
@ -953,7 +956,7 @@ def clear_orphaned_file_records(force: bool):
click.echo(click.style(f"- Listing file records in table {files_table['table']}", fg="white"))
query = f"SELECT {files_table['id_column']}, {files_table['key_column']} FROM {files_table['table']}"
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
rs = conn.execute(sa.text(query))
for i in rs:
all_files_in_tables.append({"table": files_table["table"], "id": str(i[0]), "key": i[1]})
click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
@ -973,7 +976,7 @@ def clear_orphaned_file_records(force: bool):
f"SELECT {ids_table['column']} FROM {ids_table['table']} WHERE {ids_table['column']} IS NOT NULL"
)
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
rs = conn.execute(sa.text(query))
for i in rs:
all_ids_in_tables.append({"table": ids_table["table"], "id": str(i[0])})
elif ids_table["type"] == "text":
@ -988,7 +991,7 @@ def clear_orphaned_file_records(force: bool):
f"FROM {ids_table['table']}"
)
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
rs = conn.execute(sa.text(query))
for i in rs:
for j in i[0]:
all_ids_in_tables.append({"table": ids_table["table"], "id": j})
@ -1007,7 +1010,7 @@ def clear_orphaned_file_records(force: bool):
f"FROM {ids_table['table']}"
)
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
rs = conn.execute(sa.text(query))
for i in rs:
for j in i[0]:
all_ids_in_tables.append({"table": ids_table["table"], "id": j})
@ -1036,7 +1039,7 @@ def clear_orphaned_file_records(force: bool):
click.echo(click.style(f"- Deleting orphaned file records in table {files_table['table']}", fg="white"))
query = f"DELETE FROM {files_table['table']} WHERE {files_table['id_column']} IN :ids"
with db.engine.begin() as conn:
conn.execute(db.text(query), {"ids": tuple(orphaned_files)})
conn.execute(sa.text(query), {"ids": tuple(orphaned_files)})
except Exception as e:
click.echo(click.style(f"Error deleting orphaned file records: {str(e)}", fg="red"))
return
@ -1106,7 +1109,7 @@ def remove_orphaned_files_on_storage(force: bool):
click.echo(click.style(f"- Listing files from table {files_table['table']}", fg="white"))
query = f"SELECT {files_table['key_column']} FROM {files_table['table']}"
with db.engine.begin() as conn:
rs = conn.execute(db.text(query))
rs = conn.execute(sa.text(query))
for i in rs:
all_files_in_tables.append(str(i[0]))
click.echo(click.style(f"Found {len(all_files_in_tables)} files in tables.", fg="white"))
@ -1154,3 +1157,184 @@ def remove_orphaned_files_on_storage(force: bool):
click.echo(click.style(f"Removed {removed_files} orphaned files without errors.", fg="green"))
else:
click.echo(click.style(f"Removed {removed_files} orphaned files, with {error_files} errors.", fg="yellow"))
@click.command("setup-system-tool-oauth-client", help="Setup system tool oauth client.")
@click.option("--provider", prompt=True, help="Provider name")
@click.option("--client-params", prompt=True, help="Client Params")
def setup_system_tool_oauth_client(provider, client_params):
"""
Setup system tool oauth client
"""
provider_id = ToolProviderID(provider)
provider_name = provider_id.provider_name
plugin_id = provider_id.plugin_id
try:
# json validate
click.echo(click.style(f"Validating client params: {client_params}", fg="yellow"))
client_params_dict = TypeAdapter(dict[str, Any]).validate_json(client_params)
click.echo(click.style("Client params validated successfully.", fg="green"))
click.echo(click.style(f"Encrypting client params: {client_params}", fg="yellow"))
click.echo(click.style(f"Using SECRET_KEY: `{dify_config.SECRET_KEY}`", fg="yellow"))
oauth_client_params = encrypt_system_oauth_params(client_params_dict)
click.echo(click.style("Client params encrypted successfully.", fg="green"))
except Exception as e:
click.echo(click.style(f"Error parsing client params: {str(e)}", fg="red"))
return
deleted_count = (
db.session.query(ToolOAuthSystemClient)
.filter_by(
provider=provider_name,
plugin_id=plugin_id,
)
.delete()
)
if deleted_count > 0:
click.echo(click.style(f"Deleted {deleted_count} existing oauth client params.", fg="yellow"))
oauth_client = ToolOAuthSystemClient(
provider=provider_name,
plugin_id=plugin_id,
encrypted_oauth_params=oauth_client_params,
)
db.session.add(oauth_client)
db.session.commit()
click.echo(click.style(f"OAuth client params setup successfully. id: {oauth_client.id}", fg="green"))
def _find_orphaned_draft_variables(batch_size: int = 1000) -> list[str]:
"""
Find draft variables that reference non-existent apps.
Args:
batch_size: Maximum number of orphaned app IDs to return
Returns:
List of app IDs that have draft variables but don't exist in the apps table
"""
query = """
SELECT DISTINCT wdv.app_id
FROM workflow_draft_variables AS wdv
WHERE NOT EXISTS(
SELECT 1 FROM apps WHERE apps.id = wdv.app_id
)
LIMIT :batch_size
"""
with db.engine.connect() as conn:
result = conn.execute(sa.text(query), {"batch_size": batch_size})
return [row[0] for row in result]
def _count_orphaned_draft_variables() -> dict[str, Any]:
"""
Count orphaned draft variables by app.
Returns:
Dictionary with statistics about orphaned variables
"""
query = """
SELECT
wdv.app_id,
COUNT(*) as variable_count
FROM workflow_draft_variables AS wdv
WHERE NOT EXISTS(
SELECT 1 FROM apps WHERE apps.id = wdv.app_id
)
GROUP BY wdv.app_id
ORDER BY variable_count DESC
"""
with db.engine.connect() as conn:
result = conn.execute(sa.text(query))
orphaned_by_app = {row[0]: row[1] for row in result}
total_orphaned = sum(orphaned_by_app.values())
app_count = len(orphaned_by_app)
return {
"total_orphaned_variables": total_orphaned,
"orphaned_app_count": app_count,
"orphaned_by_app": orphaned_by_app,
}
@click.command()
@click.option("--dry-run", is_flag=True, help="Show what would be deleted without actually deleting")
@click.option("--batch-size", default=1000, help="Number of records to process per batch (default 1000)")
@click.option("--max-apps", default=None, type=int, help="Maximum number of apps to process (default: no limit)")
@click.option("-f", "--force", is_flag=True, help="Skip user confirmation and force the command to execute.")
def cleanup_orphaned_draft_variables(
dry_run: bool,
batch_size: int,
max_apps: int | None,
force: bool = False,
):
"""
Clean up orphaned draft variables from the database.
This script finds and removes draft variables that belong to apps
that no longer exist in the database.
"""
logger = logging.getLogger(__name__)
# Get statistics
stats = _count_orphaned_draft_variables()
logger.info("Found %s orphaned draft variables", stats["total_orphaned_variables"])
logger.info("Across %s non-existent apps", stats["orphaned_app_count"])
if stats["total_orphaned_variables"] == 0:
logger.info("No orphaned draft variables found. Exiting.")
return
if dry_run:
logger.info("DRY RUN: Would delete the following:")
for app_id, count in sorted(stats["orphaned_by_app"].items(), key=lambda x: x[1], reverse=True)[
:10
]: # Show top 10
logger.info(" App %s: %s variables", app_id, count)
if len(stats["orphaned_by_app"]) > 10:
logger.info(" ... and %s more apps", len(stats["orphaned_by_app"]) - 10)
return
# Confirm deletion
if not force:
click.confirm(
f"Are you sure you want to delete {stats['total_orphaned_variables']} "
f"orphaned draft variables from {stats['orphaned_app_count']} apps?",
abort=True,
)
total_deleted = 0
processed_apps = 0
while True:
if max_apps and processed_apps >= max_apps:
logger.info("Reached maximum app limit (%s). Stopping.", max_apps)
break
orphaned_app_ids = _find_orphaned_draft_variables(batch_size=10)
if not orphaned_app_ids:
logger.info("No more orphaned draft variables found.")
break
for app_id in orphaned_app_ids:
if max_apps and processed_apps >= max_apps:
break
try:
deleted_count = delete_draft_variables_batch(app_id, batch_size)
total_deleted += deleted_count
processed_apps += 1
logger.info("Deleted %s variables for app %s", deleted_count, app_id)
except Exception:
logger.exception("Error processing app %s", app_id)
continue
logger.info("Cleanup completed. Total deleted: %s variables across %s apps", total_deleted, processed_apps)

View File

@ -1,8 +1,11 @@
import logging
from pathlib import Path
from typing import Any
from pydantic.fields import FieldInfo
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict
from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict, TomlConfigSettingsSource
from libs.file_utils import search_file_upwards
from .deploy import DeploymentConfig
from .enterprise import EnterpriseFeatureConfig
@ -38,7 +41,7 @@ class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
case RemoteSettingsSourceName.NACOS:
remote_source = NacosSettingsSource(current_state)
case _:
logger.warning(f"Unsupported remote source: {remote_source_name}")
logger.warning("Unsupported remote source: %s", remote_source_name)
return {}
d: dict[str, Any] = {}
@ -99,4 +102,12 @@ class DifyConfig(
RemoteSettingsSourceFactory(settings_cls),
dotenv_settings,
file_secret_settings,
TomlConfigSettingsSource(
settings_cls=settings_cls,
toml_file=search_file_upwards(
base_dir_path=Path(__file__).parent,
target_file_name="pyproject.toml",
max_search_parent_depth=2,
),
),
)

View File

@ -31,6 +31,15 @@ class SecurityConfig(BaseSettings):
description="Duration in minutes for which a password reset token remains valid",
default=5,
)
CHANGE_EMAIL_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
description="Duration in minutes for which a change email token remains valid",
default=5,
)
OWNER_TRANSFER_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
description="Duration in minutes for which a owner transfer token remains valid",
default=5,
)
LOGIN_DISABLED: bool = Field(
description="Whether to disable login checks",
@ -237,6 +246,13 @@ class FileAccessConfig(BaseSettings):
default="",
)
INTERNAL_FILES_URL: str = Field(
description="Internal base URL for file access within Docker network,"
" used for plugin daemon and internal service communication."
" Falls back to FILES_URL if not specified.",
default="",
)
FILES_ACCESS_TIMEOUT: int = Field(
description="Expiration time in seconds for file access URLs",
default=300,
@ -314,17 +330,17 @@ class HttpConfig(BaseSettings):
def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: Annotated[
PositiveInt, Field(ge=10, description="Maximum connection timeout in seconds for HTTP requests")
] = 10
HTTP_REQUEST_MAX_CONNECT_TIMEOUT: int = Field(
ge=1, description="Maximum connection timeout in seconds for HTTP requests", default=10
)
HTTP_REQUEST_MAX_READ_TIMEOUT: Annotated[
PositiveInt, Field(ge=60, description="Maximum read timeout in seconds for HTTP requests")
] = 60
HTTP_REQUEST_MAX_READ_TIMEOUT: int = Field(
ge=1, description="Maximum read timeout in seconds for HTTP requests", default=60
)
HTTP_REQUEST_MAX_WRITE_TIMEOUT: Annotated[
PositiveInt, Field(ge=10, description="Maximum write timeout in seconds for HTTP requests")
] = 20
HTTP_REQUEST_MAX_WRITE_TIMEOUT: int = Field(
ge=1, description="Maximum write timeout in seconds for HTTP requests", default=20
)
HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
description="Maximum allowed size in bytes for binary data in HTTP requests",
@ -530,6 +546,39 @@ class WorkflowNodeExecutionConfig(BaseSettings):
)
class RepositoryConfig(BaseSettings):
"""
Configuration for repository implementations
"""
CORE_WORKFLOW_EXECUTION_REPOSITORY: str = Field(
description="Repository implementation for WorkflowExecution. Options: "
"'core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository' (default), "
"'core.repositories.celery_workflow_execution_repository.CeleryWorkflowExecutionRepository'",
default="core.repositories.sqlalchemy_workflow_execution_repository.SQLAlchemyWorkflowExecutionRepository",
)
CORE_WORKFLOW_NODE_EXECUTION_REPOSITORY: str = Field(
description="Repository implementation for WorkflowNodeExecution. Options: "
"'core.repositories.sqlalchemy_workflow_node_execution_repository."
"SQLAlchemyWorkflowNodeExecutionRepository' (default), "
"'core.repositories.celery_workflow_node_execution_repository."
"CeleryWorkflowNodeExecutionRepository'",
default="core.repositories.sqlalchemy_workflow_node_execution_repository.SQLAlchemyWorkflowNodeExecutionRepository",
)
API_WORKFLOW_NODE_EXECUTION_REPOSITORY: str = Field(
description="Service-layer repository implementation for WorkflowNodeExecutionModel operations. "
"Specify as a module path",
default="repositories.sqlalchemy_api_workflow_node_execution_repository.DifyAPISQLAlchemyWorkflowNodeExecutionRepository",
)
API_WORKFLOW_RUN_REPOSITORY: str = Field(
description="Service-layer repository implementation for WorkflowRun operations. Specify as a module path",
default="repositories.sqlalchemy_api_workflow_run_repository.DifyAPISQLAlchemyWorkflowRunRepository",
)
class AuthConfig(BaseSettings):
"""
Configuration for authentication and OAuth
@ -580,6 +629,16 @@ class AuthConfig(BaseSettings):
default=86400,
)
CHANGE_EMAIL_LOCKOUT_DURATION: PositiveInt = Field(
description="Time (in seconds) a user must wait before retrying change email after exceeding the rate limit.",
default=86400,
)
OWNER_TRANSFER_LOCKOUT_DURATION: PositiveInt = Field(
description="Time (in seconds) a user must wait before retrying owner transfer after exceeding the rate limit.",
default=86400,
)
class ModerationConfig(BaseSettings):
"""
@ -609,7 +668,7 @@ class MailConfig(BaseSettings):
"""
MAIL_TYPE: Optional[str] = Field(
description="Email service provider type ('smtp' or 'resend'), default to None.",
description="Email service provider type ('smtp' or 'resend' or 'sendGrid), default to None.",
default=None,
)
@ -663,6 +722,11 @@ class MailConfig(BaseSettings):
default=50,
)
SENDGRID_API_KEY: Optional[str] = Field(
description="API key for SendGrid service",
default=None,
)
class RagEtlConfig(BaseSettings):
"""
@ -774,6 +838,41 @@ class CeleryBeatConfig(BaseSettings):
)
class CeleryScheduleTasksConfig(BaseSettings):
ENABLE_CLEAN_EMBEDDING_CACHE_TASK: bool = Field(
description="Enable clean embedding cache task",
default=False,
)
ENABLE_CLEAN_UNUSED_DATASETS_TASK: bool = Field(
description="Enable clean unused datasets task",
default=False,
)
ENABLE_CREATE_TIDB_SERVERLESS_TASK: bool = Field(
description="Enable create tidb service job task",
default=False,
)
ENABLE_UPDATE_TIDB_SERVERLESS_STATUS_TASK: bool = Field(
description="Enable update tidb service job status task",
default=False,
)
ENABLE_CLEAN_MESSAGES: bool = Field(
description="Enable clean messages task",
default=False,
)
ENABLE_MAIL_CLEAN_DOCUMENT_NOTIFY_TASK: bool = Field(
description="Enable mail clean document notify task",
default=False,
)
ENABLE_DATASETS_QUEUE_MONITOR: bool = Field(
description="Enable queue monitor task",
default=False,
)
ENABLE_CHECK_UPGRADABLE_PLUGIN_TASK: bool = Field(
description="Enable check upgradable plugin task",
default=True,
)
class PositionConfig(BaseSettings):
POSITION_PROVIDER_PINS: str = Field(
description="Comma-separated list of pinned model providers",
@ -869,6 +968,14 @@ class AccountConfig(BaseSettings):
)
class WorkflowLogConfig(BaseSettings):
WORKFLOW_LOG_CLEANUP_ENABLED: bool = Field(default=True, description="Enable workflow run log cleanup")
WORKFLOW_LOG_RETENTION_DAYS: int = Field(default=30, description="Retention days for workflow run logs")
WORKFLOW_LOG_CLEANUP_BATCH_SIZE: int = Field(
default=100, description="Batch size for workflow run log cleanup operations"
)
class FeatureConfig(
# place the configs in alphabet order
AppExecutionConfig,
@ -891,6 +998,7 @@ class FeatureConfig(
MultiModalTransferConfig,
PositionConfig,
RagEtlConfig,
RepositoryConfig,
SecurityConfig,
ToolConfig,
UpdateConfig,
@ -902,5 +1010,7 @@ class FeatureConfig(
# hosted services config
HostedServiceConfig,
CeleryBeatConfig,
CeleryScheduleTasksConfig,
WorkflowLogConfig,
):
pass

View File

@ -10,6 +10,7 @@ from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
from .storage.amazon_s3_storage_config import S3StorageConfig
from .storage.azure_blob_storage_config import AzureBlobStorageConfig
from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig
from .storage.clickzetta_volume_storage_config import ClickZettaVolumeStorageConfig
from .storage.google_cloud_storage_config import GoogleCloudStorageConfig
from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
from .storage.oci_storage_config import OCIStorageConfig
@ -20,10 +21,12 @@ from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
from .vdb.analyticdb_config import AnalyticdbConfig
from .vdb.baidu_vector_config import BaiduVectorDBConfig
from .vdb.chroma_config import ChromaConfig
from .vdb.clickzetta_config import ClickzettaConfig
from .vdb.couchbase_config import CouchbaseConfig
from .vdb.elasticsearch_config import ElasticsearchConfig
from .vdb.huawei_cloud_config import HuaweiCloudConfig
from .vdb.lindorm_config import LindormConfig
from .vdb.matrixone_config import MatrixoneConfig
from .vdb.milvus_config import MilvusConfig
from .vdb.myscale_config import MyScaleConfig
from .vdb.oceanbase_config import OceanBaseVectorConfig
@ -51,6 +54,7 @@ class StorageConfig(BaseSettings):
"aliyun-oss",
"azure-blob",
"baidu-obs",
"clickzetta-volume",
"google-storage",
"huawei-obs",
"oci-storage",
@ -60,8 +64,9 @@ class StorageConfig(BaseSettings):
"local",
] = Field(
description="Type of storage to use."
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', "
"'huawei-obs', 'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'opendal'.",
" Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', "
"'clickzetta-volume', 'google-storage', 'huawei-obs', 'oci-storage', 'tencent-cos', "
"'volcengine-tos', 'supabase'. Default is 'opendal'.",
default="opendal",
)
@ -84,6 +89,11 @@ class VectorStoreConfig(BaseSettings):
default=False,
)
VECTOR_INDEX_NAME_PREFIX: Optional[str] = Field(
description="Prefix used to create collection name in vector database",
default="Vector_index",
)
class KeywordStoreConfig(BaseSettings):
KEYWORD_STORE: str = Field(
@ -134,7 +144,8 @@ class DatabaseConfig(BaseSettings):
default="postgresql",
)
@computed_field
@computed_field # type: ignore[misc]
@property
def SQLALCHEMY_DATABASE_URI(self) -> str:
db_extras = (
f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
@ -161,6 +172,11 @@ class DatabaseConfig(BaseSettings):
default=3600,
)
SQLALCHEMY_POOL_USE_LIFO: bool = Field(
description="If True, SQLAlchemy will use last-in-first-out way to retrieve connections from pool.",
default=False,
)
SQLALCHEMY_POOL_PRE_PING: bool = Field(
description="If True, enables connection pool pre-ping feature to check connections.",
default=False,
@ -198,13 +214,14 @@ class DatabaseConfig(BaseSettings):
"pool_recycle": self.SQLALCHEMY_POOL_RECYCLE,
"pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
"connect_args": connect_args,
"pool_use_lifo": self.SQLALCHEMY_POOL_USE_LIFO,
}
class CeleryConfig(DatabaseConfig):
CELERY_BACKEND: str = Field(
description="Backend for Celery task results. Options: 'database', 'redis'.",
default="database",
description="Backend for Celery task results. Options: 'database', 'redis', 'rabbitmq'.",
default="redis",
)
CELERY_BROKER_URL: Optional[str] = Field(
@ -222,6 +239,10 @@ class CeleryConfig(DatabaseConfig):
default=None,
)
CELERY_SENTINEL_PASSWORD: Optional[str] = Field(
description="Password of the Redis Sentinel master.",
default=None,
)
CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
description="Timeout for Redis Sentinel socket operations in seconds.",
default=0.1,
@ -229,11 +250,12 @@ class CeleryConfig(DatabaseConfig):
@computed_field
def CELERY_RESULT_BACKEND(self) -> str | None:
return (
"db+{}".format(self.SQLALCHEMY_DATABASE_URI)
if self.CELERY_BACKEND == "database"
else self.CELERY_BROKER_URL
)
if self.CELERY_BACKEND in ("database", "rabbitmq"):
return f"db+{self.SQLALCHEMY_DATABASE_URI}"
elif self.CELERY_BACKEND == "redis":
return self.CELERY_BROKER_URL
else:
return None
@property
def BROKER_USE_SSL(self) -> bool:
@ -286,6 +308,7 @@ class MiddlewareConfig(
AliyunOSSStorageConfig,
AzureBlobStorageConfig,
BaiduOBSStorageConfig,
ClickZettaVolumeStorageConfig,
GoogleCloudStorageConfig,
HuaweiCloudOBSStorageConfig,
OCIStorageConfig,
@ -298,6 +321,7 @@ class MiddlewareConfig(
VectorStoreConfig,
AnalyticdbConfig,
ChromaConfig,
ClickzettaConfig,
HuaweiCloudConfig,
MilvusConfig,
MyScaleConfig,
@ -323,5 +347,6 @@ class MiddlewareConfig(
OpenGaussConfig,
TableStoreConfig,
DatasetQueueMonitorConfig,
MatrixoneConfig,
):
pass

View File

@ -39,6 +39,26 @@ class RedisConfig(BaseSettings):
default=False,
)
REDIS_SSL_CERT_REQS: str = Field(
description="SSL certificate requirements (CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED)",
default="CERT_NONE",
)
REDIS_SSL_CA_CERTS: Optional[str] = Field(
description="Path to the CA certificate file for SSL verification",
default=None,
)
REDIS_SSL_CERTFILE: Optional[str] = Field(
description="Path to the client certificate file for SSL authentication",
default=None,
)
REDIS_SSL_KEYFILE: Optional[str] = Field(
description="Path to the client private key file for SSL authentication",
default=None,
)
REDIS_USE_SENTINEL: Optional[bool] = Field(
description="Enable Redis Sentinel mode for high availability",
default=False,

View File

@ -0,0 +1,65 @@
"""ClickZetta Volume Storage Configuration"""
from typing import Optional
from pydantic import Field
from pydantic_settings import BaseSettings
class ClickZettaVolumeStorageConfig(BaseSettings):
"""Configuration for ClickZetta Volume storage."""
CLICKZETTA_VOLUME_USERNAME: Optional[str] = Field(
description="Username for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_PASSWORD: Optional[str] = Field(
description="Password for ClickZetta Volume authentication",
default=None,
)
CLICKZETTA_VOLUME_INSTANCE: Optional[str] = Field(
description="ClickZetta instance identifier",
default=None,
)
CLICKZETTA_VOLUME_SERVICE: str = Field(
description="ClickZetta service endpoint",
default="api.clickzetta.com",
)
CLICKZETTA_VOLUME_WORKSPACE: str = Field(
description="ClickZetta workspace name",
default="quick_start",
)
CLICKZETTA_VOLUME_VCLUSTER: str = Field(
description="ClickZetta virtual cluster name",
default="default_ap",
)
CLICKZETTA_VOLUME_SCHEMA: str = Field(
description="ClickZetta schema name",
default="dify",
)
CLICKZETTA_VOLUME_TYPE: str = Field(
description="ClickZetta volume type (table|user|external)",
default="user",
)
CLICKZETTA_VOLUME_NAME: Optional[str] = Field(
description="ClickZetta volume name for external volumes",
default=None,
)
CLICKZETTA_VOLUME_TABLE_PREFIX: str = Field(
description="Prefix for ClickZetta volume table names",
default="dataset_",
)
CLICKZETTA_VOLUME_DIFY_PREFIX: str = Field(
description="Directory prefix for User Volume to organize Dify files",
default="dify_km",
)

View File

@ -0,0 +1,69 @@
from typing import Optional
from pydantic import BaseModel, Field
class ClickzettaConfig(BaseModel):
"""
Clickzetta Lakehouse vector database configuration
"""
CLICKZETTA_USERNAME: Optional[str] = Field(
description="Username for authenticating with Clickzetta Lakehouse",
default=None,
)
CLICKZETTA_PASSWORD: Optional[str] = Field(
description="Password for authenticating with Clickzetta Lakehouse",
default=None,
)
CLICKZETTA_INSTANCE: Optional[str] = Field(
description="Clickzetta Lakehouse instance ID",
default=None,
)
CLICKZETTA_SERVICE: Optional[str] = Field(
description="Clickzetta API service endpoint (e.g., 'api.clickzetta.com')",
default="api.clickzetta.com",
)
CLICKZETTA_WORKSPACE: Optional[str] = Field(
description="Clickzetta workspace name",
default="default",
)
CLICKZETTA_VCLUSTER: Optional[str] = Field(
description="Clickzetta virtual cluster name",
default="default_ap",
)
CLICKZETTA_SCHEMA: Optional[str] = Field(
description="Database schema name in Clickzetta",
default="public",
)
CLICKZETTA_BATCH_SIZE: Optional[int] = Field(
description="Batch size for bulk insert operations",
default=100,
)
CLICKZETTA_ENABLE_INVERTED_INDEX: Optional[bool] = Field(
description="Enable inverted index for full-text search capabilities",
default=True,
)
CLICKZETTA_ANALYZER_TYPE: Optional[str] = Field(
description="Analyzer type for full-text search: keyword, english, chinese, unicode",
default="chinese",
)
CLICKZETTA_ANALYZER_MODE: Optional[str] = Field(
description="Analyzer mode for tokenization: max_word (fine-grained) or smart (intelligent)",
default="smart",
)
CLICKZETTA_VECTOR_DISTANCE_FUNCTION: Optional[str] = Field(
description="Distance function for vector similarity: l2_distance or cosine_distance",
default="cosine_distance",
)

View File

@ -1,12 +1,13 @@
from typing import Optional
from pydantic import Field, PositiveInt
from pydantic import Field, PositiveInt, model_validator
from pydantic_settings import BaseSettings
class ElasticsearchConfig(BaseSettings):
"""
Configuration settings for Elasticsearch
Configuration settings for both self-managed and Elastic Cloud deployments.
Can load from environment variables or .env files.
"""
ELASTICSEARCH_HOST: Optional[str] = Field(
@ -28,3 +29,50 @@ class ElasticsearchConfig(BaseSettings):
description="Password for authenticating with Elasticsearch (default is 'elastic')",
default="elastic",
)
# Elastic Cloud (optional)
ELASTICSEARCH_USE_CLOUD: Optional[bool] = Field(
description="Set to True to use Elastic Cloud instead of self-hosted Elasticsearch", default=False
)
ELASTICSEARCH_CLOUD_URL: Optional[str] = Field(
description="Full URL for Elastic Cloud deployment (e.g., 'https://example.es.region.aws.found.io:443')",
default=None,
)
ELASTICSEARCH_API_KEY: Optional[str] = Field(
description="API key for authenticating with Elastic Cloud", default=None
)
# Common options
ELASTICSEARCH_CA_CERTS: Optional[str] = Field(
description="Path to CA certificate file for SSL verification", default=None
)
ELASTICSEARCH_VERIFY_CERTS: bool = Field(
description="Whether to verify SSL certificates (default is False)", default=False
)
ELASTICSEARCH_REQUEST_TIMEOUT: int = Field(
description="Request timeout in milliseconds (default is 100000)", default=100000
)
ELASTICSEARCH_RETRY_ON_TIMEOUT: bool = Field(
description="Whether to retry requests on timeout (default is True)", default=True
)
ELASTICSEARCH_MAX_RETRIES: int = Field(
description="Maximum number of retry attempts (default is 10000)", default=10000
)
@model_validator(mode="after")
def validate_elasticsearch_config(self):
"""Validate Elasticsearch configuration based on deployment type."""
if self.ELASTICSEARCH_USE_CLOUD:
if not self.ELASTICSEARCH_CLOUD_URL:
raise ValueError("ELASTICSEARCH_CLOUD_URL is required when using Elastic Cloud")
if not self.ELASTICSEARCH_API_KEY:
raise ValueError("ELASTICSEARCH_API_KEY is required when using Elastic Cloud")
else:
if not self.ELASTICSEARCH_HOST:
raise ValueError("ELASTICSEARCH_HOST is required for self-hosted Elasticsearch")
if not self.ELASTICSEARCH_USERNAME:
raise ValueError("ELASTICSEARCH_USERNAME is required for self-hosted Elasticsearch")
if not self.ELASTICSEARCH_PASSWORD:
raise ValueError("ELASTICSEARCH_PASSWORD is required for self-hosted Elasticsearch")
return self

View File

@ -0,0 +1,14 @@
from pydantic import BaseModel, Field
class MatrixoneConfig(BaseModel):
"""Matrixone vector database configuration."""
MATRIXONE_HOST: str = Field(default="localhost", description="Host address of the Matrixone server")
MATRIXONE_PORT: int = Field(default=6001, description="Port number of the Matrixone server")
MATRIXONE_USER: str = Field(default="dump", description="Username for authenticating with Matrixone")
MATRIXONE_PASSWORD: str = Field(default="111", description="Password for authenticating with Matrixone")
MATRIXONE_DATABASE: str = Field(default="dify", description="Name of the Matrixone database to connect to")
MATRIXONE_METRIC: str = Field(
default="l2", description="Distance metric type for vector similarity search (cosine or l2)"
)

View File

@ -28,3 +28,8 @@ class TableStoreConfig(BaseSettings):
description="AccessKey secret for the instance name",
default=None,
)
TABLESTORE_NORMALIZE_FULLTEXT_BM25_SCORE: bool = Field(
description="Whether to normalize full-text search scores to [0, 1]",
default=False,
)

View File

@ -12,6 +12,16 @@ class OTelConfig(BaseSettings):
default=False,
)
OTLP_TRACE_ENDPOINT: str = Field(
description="OTLP trace endpoint",
default="",
)
OTLP_METRIC_ENDPOINT: str = Field(
description="OTLP metric endpoint",
default="",
)
OTLP_BASE_ENDPOINT: str = Field(
description="OTLP base endpoint",
default="http://localhost:4318",

View File

@ -1,17 +1,13 @@
from pydantic import Field
from pydantic_settings import BaseSettings
from configs.packaging.pyproject import PyProjectConfig, PyProjectTomlConfig
class PackagingInfo(BaseSettings):
class PackagingInfo(PyProjectTomlConfig):
"""
Packaging build information
"""
CURRENT_VERSION: str = Field(
description="Dify version",
default="1.4.3",
)
COMMIT_SHA: str = Field(
description="SHA-1 checksum of the git commit used to build the app",
default="",

View File

@ -0,0 +1,17 @@
from pydantic import BaseModel, Field
from pydantic_settings import BaseSettings
class PyProjectConfig(BaseModel):
version: str = Field(description="Dify version", default="")
class PyProjectTomlConfig(BaseSettings):
"""
configs in api/pyproject.toml
"""
project: PyProjectConfig = Field(
description="configs in the project section of pyproject.toml",
default=PyProjectConfig(),
)

View File

@ -76,7 +76,7 @@ class ApolloClient:
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
if code == 200:
if not body:
logger.error(f"get_json_from_net load configs failed, body is {body}")
logger.error("get_json_from_net load configs failed, body is %s", body)
return None
data = json.loads(body)
data = data["configurations"]
@ -207,7 +207,7 @@ class ApolloClient:
# if the length is 0 it is returned directly
if len(notifications) == 0:
return
url = "{}/notifications/v2".format(self.config_url)
url = f"{self.config_url}/notifications/v2"
params = {
"appId": self.app_id,
"cluster": self.cluster,
@ -222,7 +222,7 @@ class ApolloClient:
return
if http_code == 200:
if not body:
logger.error(f"_long_poll load configs failed,body is {body}")
logger.error("_long_poll load configs failed,body is %s", body)
return
data = json.loads(body)
for entry in data:
@ -273,12 +273,12 @@ class ApolloClient:
time.sleep(60 * 10) # 10 minutes
def _do_heart_beat(self, namespace):
url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip)
url = f"{self.config_url}/configs/{self.app_id}/{self.cluster}/{namespace}?ip={self.ip}"
try:
code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
if code == 200:
if not body:
logger.error(f"_do_heart_beat load configs failed,body is {body}")
logger.error("_do_heart_beat load configs failed,body is %s", body)
return None
data = json.loads(body)
if self.last_release_key == data["releaseKey"]:

View File

@ -24,7 +24,7 @@ def url_encode_wrapper(params):
def no_key_cache_key(namespace, key):
return "{}{}{}".format(namespace, len(namespace), key)
return f"{namespace}{len(namespace)}{key}"
# Returns whether the obtained value is obtained, and None if it does not

View File

@ -1,6 +1,7 @@
from configs import dify_config
HIDDEN_VALUE = "[__HIDDEN__]"
UNKNOWN_VALUE = "[__UNKNOWN__]"
UUID_NIL = "00000000-0000-0000-0000-000000000000"
DEFAULT_FILE_NUMBER_LIMITS = 3
@ -8,10 +9,10 @@ DEFAULT_FILE_NUMBER_LIMITS = 3
IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])
VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "mpga"]
VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "webm"]
VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS])
AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "webm", "amr"]
AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "amr", "mpga"]
AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])

View File

@ -28,5 +28,5 @@ def supported_language(lang):
if lang in languages:
return lang
error = "{lang} is not a valid language.".format(lang=lang)
error = f"{lang} is not a valid language."
raise ValueError(error)

View File

@ -1,5 +1,7 @@
from werkzeug.exceptions import HTTPException
from libs.exception import BaseHTTPException
class FilenameNotExistsError(HTTPException):
code = 400
@ -9,3 +11,27 @@ class FilenameNotExistsError(HTTPException):
class RemoteFileUploadError(HTTPException):
code = 400
description = "Error uploading remote file."
class FileTooLargeError(BaseHTTPException):
error_code = "file_too_large"
description = "File size exceeded. {message}"
code = 413
class UnsupportedFileTypeError(BaseHTTPException):
error_code = "unsupported_file_type"
description = "File type not allowed."
code = 415
class TooManyFilesError(BaseHTTPException):
error_code = "too_many_files"
description = "Only one file is allowed."
code = 400
class NoFileUploadedError(BaseHTTPException):
error_code = "no_file_uploaded"
description = "Please upload your file."
code = 400

View File

@ -1,3 +1,4 @@
import contextlib
import mimetypes
import os
import platform
@ -65,10 +66,8 @@ def guess_file_info_from_response(response: httpx.Response):
# Use python-magic to guess MIME type if still unknown or generic
if mimetype == "application/octet-stream" and magic is not None:
try:
with contextlib.suppress(magic.MagicException):
mimetype = magic.from_buffer(response.content[:1024], mime=True)
except magic.MagicException:
pass
extension = os.path.splitext(filename)[1]

View File

@ -56,6 +56,7 @@ from .app import (
conversation,
conversation_variables,
generator,
mcp_server,
message,
model_config,
ops_trace,
@ -63,6 +64,7 @@ from .app import (
statistic,
workflow,
workflow_app_log,
workflow_draft_variable,
workflow_run,
workflow_statistic,
)
@ -82,6 +84,7 @@ from .datasets import (
external,
hit_testing,
metadata,
upload_file,
website,
)

View File

@ -56,8 +56,7 @@ class InsertExploreAppListApi(Resource):
parser.add_argument("position", type=int, required=True, nullable=False, location="json")
args = parser.parse_args()
with Session(db.engine) as session:
app = session.execute(select(App).filter(App.id == args["app_id"])).scalar_one_or_none()
app = db.session.execute(select(App).where(App.id == args["app_id"])).scalar_one_or_none()
if not app:
raise NotFound(f"App '{args['app_id']}' is not found")
@ -75,41 +74,41 @@ class InsertExploreAppListApi(Resource):
with Session(db.engine) as session:
recommended_app = session.execute(
select(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"])
select(RecommendedApp).where(RecommendedApp.app_id == args["app_id"])
).scalar_one_or_none()
if not recommended_app:
recommended_app = RecommendedApp(
app_id=app.id,
description=desc,
copyright=copy_right,
privacy_policy=privacy_policy,
custom_disclaimer=custom_disclaimer,
language=args["language"],
category=args["category"],
position=args["position"],
)
if not recommended_app:
recommended_app = RecommendedApp(
app_id=app.id,
description=desc,
copyright=copy_right,
privacy_policy=privacy_policy,
custom_disclaimer=custom_disclaimer,
language=args["language"],
category=args["category"],
position=args["position"],
)
db.session.add(recommended_app)
db.session.add(recommended_app)
app.is_public = True
db.session.commit()
app.is_public = True
db.session.commit()
return {"result": "success"}, 201
else:
recommended_app.description = desc
recommended_app.copyright = copy_right
recommended_app.privacy_policy = privacy_policy
recommended_app.custom_disclaimer = custom_disclaimer
recommended_app.language = args["language"]
recommended_app.category = args["category"]
recommended_app.position = args["position"]
return {"result": "success"}, 201
else:
recommended_app.description = desc
recommended_app.copyright = copy_right
recommended_app.privacy_policy = privacy_policy
recommended_app.custom_disclaimer = custom_disclaimer
recommended_app.language = args["language"]
recommended_app.category = args["category"]
recommended_app.position = args["position"]
app.is_public = True
app.is_public = True
db.session.commit()
db.session.commit()
return {"result": "success"}, 200
return {"result": "success"}, 200
class InsertExploreAppApi(Resource):
@ -118,21 +117,21 @@ class InsertExploreAppApi(Resource):
def delete(self, app_id):
with Session(db.engine) as session:
recommended_app = session.execute(
select(RecommendedApp).filter(RecommendedApp.app_id == str(app_id))
select(RecommendedApp).where(RecommendedApp.app_id == str(app_id))
).scalar_one_or_none()
if not recommended_app:
return {"result": "success"}, 204
with Session(db.engine) as session:
app = session.execute(select(App).filter(App.id == recommended_app.app_id)).scalar_one_or_none()
app = session.execute(select(App).where(App.id == recommended_app.app_id)).scalar_one_or_none()
if app:
app.is_public = False
with Session(db.engine) as session:
installed_apps = session.execute(
select(InstalledApp).filter(
select(InstalledApp).where(
InstalledApp.app_id == recommended_app.app_id,
InstalledApp.tenant_id != InstalledApp.app_owner_tenant_id,
)

View File

@ -61,7 +61,7 @@ class BaseApiKeyListResource(Resource):
_get_resource(resource_id, current_user.current_tenant_id, self.resource_model)
keys = (
db.session.query(ApiToken)
.filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
.where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
.all()
)
return {"items": keys}
@ -76,7 +76,7 @@ class BaseApiKeyListResource(Resource):
current_key_count = (
db.session.query(ApiToken)
.filter(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
.where(ApiToken.type == self.resource_type, getattr(ApiToken, self.resource_id_field) == resource_id)
.count()
)
@ -117,7 +117,7 @@ class BaseApiKeyResource(Resource):
key = (
db.session.query(ApiToken)
.filter(
.where(
getattr(ApiToken, self.resource_id_field) == resource_id,
ApiToken.type == self.resource_type,
ApiToken.id == api_key_id,
@ -128,7 +128,7 @@ class BaseApiKeyResource(Resource):
if key is None:
flask_restful.abort(404, message="API key not found")
db.session.query(ApiToken).filter(ApiToken.id == api_key_id).delete()
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()
return {"result": "success"}, 204

View File

@ -1,11 +1,12 @@
from typing import Literal
from flask import request
from flask_login import current_user
from flask_restful import Resource, marshal, marshal_with, reqparse
from werkzeug.exceptions import Forbidden
from controllers.common.errors import NoFileUploadedError, TooManyFilesError
from controllers.console import api
from controllers.console.app.error import NoFileUploadedError
from controllers.console.datasets.error import TooManyFilesError
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
@ -25,7 +26,7 @@ class AnnotationReplyActionApi(Resource):
@login_required
@account_initialization_required
@cloud_edition_billing_resource_check("annotation")
def post(self, app_id, action):
def post(self, app_id, action: Literal["enable", "disable"]):
if not current_user.is_editor:
raise Forbidden()
@ -39,8 +40,6 @@ class AnnotationReplyActionApi(Resource):
result = AppAnnotationService.enable_app_annotation(args, app_id)
elif action == "disable":
result = AppAnnotationService.disable_app_annotation(app_id)
else:
raise ValueError("Unsupported annotation reply action")
return result, 200
@ -86,7 +85,7 @@ class AnnotationReplyActionStatusApi(Resource):
raise Forbidden()
job_id = str(job_id)
app_annotation_job_key = "{}_app_annotation_job_{}".format(action, str(job_id))
app_annotation_job_key = f"{action}_app_annotation_job_{str(job_id)}"
cache_result = redis_client.get(app_annotation_job_key)
if cache_result is None:
raise ValueError("The job does not exist.")
@ -94,13 +93,13 @@ class AnnotationReplyActionStatusApi(Resource):
job_status = cache_result.decode()
error_msg = ""
if job_status == "error":
app_annotation_error_key = "{}_app_annotation_error_{}".format(action, str(job_id))
app_annotation_error_key = f"{action}_app_annotation_error_{str(job_id)}"
error_msg = redis_client.get(app_annotation_error_key).decode()
return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
class AnnotationListApi(Resource):
class AnnotationApi(Resource):
@setup_required
@login_required
@account_initialization_required
@ -123,22 +122,6 @@ class AnnotationListApi(Resource):
}
return response, 200
class AnnotationExportApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, app_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id)
response = {"data": marshal(annotation_list, annotation_fields)}
return response, 200
class AnnotationCreateApi(Resource):
@setup_required
@login_required
@account_initialization_required
@ -156,6 +139,48 @@ class AnnotationCreateApi(Resource):
annotation = AppAnnotationService.insert_app_annotation_directly(args, app_id)
return annotation
@setup_required
@login_required
@account_initialization_required
def delete(self, app_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
# Use request.args.getlist to get annotation_ids array directly
annotation_ids = request.args.getlist("annotation_id")
# If annotation_ids are provided, handle batch deletion
if annotation_ids:
# Check if any annotation_ids contain empty strings or invalid values
if not all(annotation_id.strip() for annotation_id in annotation_ids if annotation_id):
return {
"code": "bad_request",
"message": "annotation_ids are required if the parameter is provided.",
}, 400
result = AppAnnotationService.delete_app_annotations_in_batch(app_id, annotation_ids)
return result, 204
# If no annotation_ids are provided, handle clearing all annotations
else:
AppAnnotationService.clear_all_annotations(app_id)
return {"result": "success"}, 204
class AnnotationExportApi(Resource):
@setup_required
@login_required
@account_initialization_required
def get(self, app_id):
if not current_user.is_editor:
raise Forbidden()
app_id = str(app_id)
annotation_list = AppAnnotationService.export_annotation_list_by_app_id(app_id)
response = {"data": marshal(annotation_list, annotation_fields)}
return response, 200
class AnnotationUpdateDeleteApi(Resource):
@setup_required
@ -199,16 +224,17 @@ class AnnotationBatchImportApi(Resource):
raise Forbidden()
app_id = str(app_id)
# get file from request
file = request.files["file"]
# check file
if "file" not in request.files:
raise NoFileUploadedError()
if len(request.files) > 1:
raise TooManyFilesError()
# get file from request
file = request.files["file"]
# check file type
if not file.filename or not file.filename.endswith(".csv"):
if not file.filename or not file.filename.lower().endswith(".csv"):
raise ValueError("Invalid file type. Only CSV files are allowed")
return AppAnnotationService.batch_import_app_annotations(app_id, file)
@ -223,14 +249,14 @@ class AnnotationBatchImportStatusApi(Resource):
raise Forbidden()
job_id = str(job_id)
indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id))
indexing_cache_key = f"app_annotation_batch_import_{str(job_id)}"
cache_result = redis_client.get(indexing_cache_key)
if cache_result is None:
raise ValueError("The job does not exist.")
job_status = cache_result.decode()
error_msg = ""
if job_status == "error":
indexing_error_msg_key = "app_annotation_batch_import_error_msg_{}".format(str(job_id))
indexing_error_msg_key = f"app_annotation_batch_import_error_msg_{str(job_id)}"
error_msg = redis_client.get(indexing_error_msg_key).decode()
return {"job_id": job_id, "job_status": job_status, "error_msg": error_msg}, 200
@ -265,7 +291,7 @@ api.add_resource(AnnotationReplyActionApi, "/apps/<uuid:app_id>/annotation-reply
api.add_resource(
AnnotationReplyActionStatusApi, "/apps/<uuid:app_id>/annotation-reply/<string:action>/status/<uuid:job_id>"
)
api.add_resource(AnnotationListApi, "/apps/<uuid:app_id>/annotations")
api.add_resource(AnnotationApi, "/apps/<uuid:app_id>/annotations")
api.add_resource(AnnotationExportApi, "/apps/<uuid:app_id>/annotations/export")
api.add_resource(AnnotationUpdateDeleteApi, "/apps/<uuid:app_id>/annotations/<uuid:annotation_id>")
api.add_resource(AnnotationBatchImportApi, "/apps/<uuid:app_id>/annotations/batch-import")

View File

@ -28,6 +28,12 @@ from services.feature_service import FeatureService
ALLOW_CREATE_APP_MODES = ["chat", "agent-chat", "advanced-chat", "workflow", "completion"]
def _validate_description_length(description):
if description and len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
class AppListApi(Resource):
@setup_required
@login_required
@ -94,7 +100,7 @@ class AppListApi(Resource):
"""Create app"""
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, location="json")
parser.add_argument("description", type=str, location="json")
parser.add_argument("description", type=_validate_description_length, location="json")
parser.add_argument("mode", type=str, choices=ALLOW_CREATE_APP_MODES, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
@ -146,11 +152,12 @@ class AppApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
parser.add_argument("description", type=str, location="json")
parser.add_argument("description", type=_validate_description_length, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")
parser.add_argument("use_icon_as_answer_icon", type=bool, location="json")
parser.add_argument("max_active_requests", type=int, location="json")
args = parser.parse_args()
app_service = AppService()
@ -188,7 +195,7 @@ class AppCopyApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, location="json")
parser.add_argument("description", type=str, location="json")
parser.add_argument("description", type=_validate_description_length, location="json")
parser.add_argument("icon_type", type=str, location="json")
parser.add_argument("icon", type=str, location="json")
parser.add_argument("icon_background", type=str, location="json")

View File

@ -17,6 +17,8 @@ from libs.login import login_required
from models import Account
from models.model import App
from services.app_dsl_service import AppDslService, ImportStatus
from services.enterprise.enterprise_service import EnterpriseService
from services.feature_service import FeatureService
class AppImportApi(Resource):
@ -60,7 +62,9 @@ class AppImportApi(Resource):
app_id=args.get("app_id"),
)
session.commit()
if result.app_id and FeatureService.get_system_features().webapp_auth.enabled:
# update web app setting as private
EnterpriseService.WebAppAuth.update_app_access_mode(result.app_id, "private")
# Return appropriate status code based on result
status = result.status
if status == ImportStatus.FAILED.value:

View File

@ -90,23 +90,11 @@ class ChatMessageTextApi(Resource):
message_id = args.get("message_id", None)
text = args.get("text", None)
if (
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
and app_model.workflow
and app_model.workflow.features_dict
):
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
if text_to_speech is None:
raise ValueError("TTS is not enabled")
voice = args.get("voice") or text_to_speech.get("voice")
else:
try:
if app_model.app_model_config is None:
raise ValueError("AppModelConfig not found")
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
except Exception:
voice = None
response = AudioService.transcript_tts(app_model=app_model, text=text, message_id=message_id, voice=voice)
voice = args.get("voice", None)
response = AudioService.transcript_tts(
app_model=app_model, text=text, voice=voice, message_id=message_id, is_draft=True
)
return response
except services.errors.app_model_config.AppModelConfigBrokenError:
logging.exception("App model config broken.")

View File

@ -1,6 +1,7 @@
import logging
import flask_login
from flask import request
from flask_restful import Resource, reqparse
from werkzeug.exceptions import InternalServerError, NotFound
@ -24,6 +25,7 @@ from core.errors.error import (
ProviderTokenNotInitError,
QuotaExceededError,
)
from core.helper.trace_id_helper import get_external_trace_id
from core.model_runtime.errors.invoke import InvokeError
from libs import helper
from libs.helper import uuid_value
@ -115,6 +117,10 @@ class ChatMessageApi(Resource):
streaming = args["response_mode"] != "blocking"
args["auto_generate_name"] = False
external_trace_id = get_external_trace_id(request)
if external_trace_id:
args["external_trace_id"] = external_trace_id
account = flask_login.current_user
try:

View File

@ -1,4 +1,4 @@
from datetime import UTC, datetime
from datetime import datetime
import pytz # pip install pytz
from flask_login import current_user
@ -19,6 +19,7 @@ from fields.conversation_fields import (
conversation_pagination_fields,
conversation_with_summary_pagination_fields,
)
from libs.datetime_utils import naive_utc_now
from libs.helper import DatetimeString
from libs.login import login_required
from models import Conversation, EndUser, Message, MessageAnnotation
@ -48,10 +49,10 @@ class CompletionConversationApi(Resource):
query = db.select(Conversation).where(Conversation.app_id == app_model.id, Conversation.mode == "completion")
if args["keyword"]:
query = query.join(Message, Message.conversation_id == Conversation.id).filter(
query = query.join(Message, Message.conversation_id == Conversation.id).where(
or_(
Message.query.ilike("%{}%".format(args["keyword"])),
Message.answer.ilike("%{}%".format(args["keyword"])),
Message.query.ilike(f"%{args['keyword']}%"),
Message.answer.ilike(f"%{args['keyword']}%"),
)
)
@ -120,7 +121,7 @@ class CompletionConversationDetailApi(Resource):
conversation = (
db.session.query(Conversation)
.filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
.where(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
.first()
)
@ -173,14 +174,14 @@ class ChatConversationApi(Resource):
query = db.select(Conversation).where(Conversation.app_id == app_model.id)
if args["keyword"]:
keyword_filter = "%{}%".format(args["keyword"])
keyword_filter = f"%{args['keyword']}%"
query = (
query.join(
Message,
Message.conversation_id == Conversation.id,
)
.join(subquery, subquery.c.conversation_id == Conversation.id)
.filter(
.where(
or_(
Message.query.ilike(keyword_filter),
Message.answer.ilike(keyword_filter),
@ -285,7 +286,7 @@ class ChatConversationDetailApi(Resource):
conversation = (
db.session.query(Conversation)
.filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
.where(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
.first()
)
@ -307,7 +308,7 @@ api.add_resource(ChatConversationDetailApi, "/apps/<uuid:app_id>/chat-conversati
def _get_conversation(app_model, conversation_id):
conversation = (
db.session.query(Conversation)
.filter(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
.where(Conversation.id == conversation_id, Conversation.app_id == app_model.id)
.first()
)
@ -315,7 +316,7 @@ def _get_conversation(app_model, conversation_id):
raise NotFound("Conversation Not Exists.")
if not conversation.read_at:
conversation.read_at = datetime.now(UTC).replace(tzinfo=None)
conversation.read_at = naive_utc_now()
conversation.read_account_id = current_user.id
db.session.commit()

View File

@ -79,18 +79,6 @@ class ProviderNotSupportSpeechToTextError(BaseHTTPException):
code = 400
class NoFileUploadedError(BaseHTTPException):
error_code = "no_file_uploaded"
description = "Please upload your file."
code = 400
class TooManyFilesError(BaseHTTPException):
error_code = "too_many_files"
description = "Only one file is allowed."
code = 400
class DraftWorkflowNotExist(BaseHTTPException):
error_code = "draft_workflow_not_exist"
description = "Draft workflow need to be initialized."

View File

@ -1,4 +1,4 @@
import os
from collections.abc import Sequence
from flask_login import current_user
from flask_restful import Resource, reqparse
@ -12,6 +12,8 @@ from controllers.console.app.error import (
)
from controllers.console.wraps import account_initialization_required, setup_required
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.helper.code_executor.javascript.javascript_code_provider import JavascriptCodeProvider
from core.helper.code_executor.python3.python3_code_provider import Python3CodeProvider
from core.llm_generator.llm_generator import LLMGenerator
from core.model_runtime.errors.invoke import InvokeError
from libs.login import login_required
@ -29,15 +31,12 @@ class RuleGenerateApi(Resource):
args = parser.parse_args()
account = current_user
PROMPT_GENERATION_MAX_TOKENS = int(os.getenv("PROMPT_GENERATION_MAX_TOKENS", "512"))
try:
rules = LLMGenerator.generate_rule_config(
tenant_id=account.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
no_variable=args["no_variable"],
rule_config_max_tokens=PROMPT_GENERATION_MAX_TOKENS,
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
@ -64,14 +63,12 @@ class RuleCodeGenerateApi(Resource):
args = parser.parse_args()
account = current_user
CODE_GENERATION_MAX_TOKENS = int(os.getenv("CODE_GENERATION_MAX_TOKENS", "1024"))
try:
code_result = LLMGenerator.generate_code(
tenant_id=account.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
code_language=args["code_language"],
max_tokens=CODE_GENERATION_MAX_TOKENS,
)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
@ -114,6 +111,121 @@ class RuleStructuredOutputGenerateApi(Resource):
return structured_output
class InstructionGenerateApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("flow_id", type=str, required=True, default="", location="json")
parser.add_argument("node_id", type=str, required=False, default="", location="json")
parser.add_argument("current", type=str, required=False, default="", location="json")
parser.add_argument("language", type=str, required=False, default="javascript", location="json")
parser.add_argument("instruction", type=str, required=True, nullable=False, location="json")
parser.add_argument("model_config", type=dict, required=True, nullable=False, location="json")
parser.add_argument("ideal_output", type=str, required=False, default="", location="json")
args = parser.parse_args()
code_template = (
Python3CodeProvider.get_default_code()
if args["language"] == "python"
else (JavascriptCodeProvider.get_default_code())
if args["language"] == "javascript"
else ""
)
try:
# Generate from nothing for a workflow node
if (args["current"] == code_template or args["current"] == "") and args["node_id"] != "":
from models import App, db
from services.workflow_service import WorkflowService
app = db.session.query(App).where(App.id == args["flow_id"]).first()
if not app:
return {"error": f"app {args['flow_id']} not found"}, 400
workflow = WorkflowService().get_draft_workflow(app_model=app)
if not workflow:
return {"error": f"workflow {args['flow_id']} not found"}, 400
nodes: Sequence = workflow.graph_dict["nodes"]
node = [node for node in nodes if node["id"] == args["node_id"]]
if len(node) == 0:
return {"error": f"node {args['node_id']} not found"}, 400
node_type = node[0]["data"]["type"]
match node_type:
case "llm":
return LLMGenerator.generate_rule_config(
current_user.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
no_variable=True,
)
case "agent":
return LLMGenerator.generate_rule_config(
current_user.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
no_variable=True,
)
case "code":
return LLMGenerator.generate_code(
tenant_id=current_user.current_tenant_id,
instruction=args["instruction"],
model_config=args["model_config"],
code_language=args["language"],
)
case _:
return {"error": f"invalid node type: {node_type}"}
if args["node_id"] == "" and args["current"] != "": # For legacy app without a workflow
return LLMGenerator.instruction_modify_legacy(
tenant_id=current_user.current_tenant_id,
flow_id=args["flow_id"],
current=args["current"],
instruction=args["instruction"],
model_config=args["model_config"],
ideal_output=args["ideal_output"],
)
if args["node_id"] != "" and args["current"] != "": # For workflow node
return LLMGenerator.instruction_modify_workflow(
tenant_id=current_user.current_tenant_id,
flow_id=args["flow_id"],
node_id=args["node_id"],
current=args["current"],
instruction=args["instruction"],
model_config=args["model_config"],
ideal_output=args["ideal_output"],
)
return {"error": "incompatible parameters"}, 400
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
raise ProviderQuotaExceededError()
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
except InvokeError as e:
raise CompletionRequestError(e.description)
class InstructionGenerationTemplateApi(Resource):
@setup_required
@login_required
@account_initialization_required
def post(self) -> dict:
parser = reqparse.RequestParser()
parser.add_argument("type", type=str, required=True, default=False, location="json")
args = parser.parse_args()
match args["type"]:
case "prompt":
from core.llm_generator.prompts import INSTRUCTION_GENERATE_TEMPLATE_PROMPT
return {"data": INSTRUCTION_GENERATE_TEMPLATE_PROMPT}
case "code":
from core.llm_generator.prompts import INSTRUCTION_GENERATE_TEMPLATE_CODE
return {"data": INSTRUCTION_GENERATE_TEMPLATE_CODE}
case _:
raise ValueError(f"Invalid type: {args['type']}")
api.add_resource(RuleGenerateApi, "/rule-generate")
api.add_resource(RuleCodeGenerateApi, "/rule-code-generate")
api.add_resource(RuleStructuredOutputGenerateApi, "/rule-structured-output-generate")
api.add_resource(InstructionGenerateApi, "/instruction-generate")
api.add_resource(InstructionGenerationTemplateApi, "/instruction-generate/template")

View File

@ -0,0 +1,119 @@
import json
from enum import StrEnum
from flask_login import current_user
from flask_restful import Resource, marshal_with, reqparse
from werkzeug.exceptions import NotFound
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from fields.app_fields import app_server_fields
from libs.login import login_required
from models.model import AppMCPServer
class AppMCPServerStatus(StrEnum):
ACTIVE = "active"
INACTIVE = "inactive"
class AppMCPServerController(Resource):
@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_server_fields)
def get(self, app_model):
server = db.session.query(AppMCPServer).where(AppMCPServer.app_id == app_model.id).first()
return server
@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_server_fields)
def post(self, app_model):
if not current_user.is_editor:
raise NotFound()
parser = reqparse.RequestParser()
parser.add_argument("description", type=str, required=False, location="json")
parser.add_argument("parameters", type=dict, required=True, location="json")
args = parser.parse_args()
description = args.get("description")
if not description:
description = app_model.description or ""
server = AppMCPServer(
name=app_model.name,
description=description,
parameters=json.dumps(args["parameters"], ensure_ascii=False),
status=AppMCPServerStatus.ACTIVE,
app_id=app_model.id,
tenant_id=current_user.current_tenant_id,
server_code=AppMCPServer.generate_server_code(16),
)
db.session.add(server)
db.session.commit()
return server
@setup_required
@login_required
@account_initialization_required
@get_app_model
@marshal_with(app_server_fields)
def put(self, app_model):
if not current_user.is_editor:
raise NotFound()
parser = reqparse.RequestParser()
parser.add_argument("id", type=str, required=True, location="json")
parser.add_argument("description", type=str, required=False, location="json")
parser.add_argument("parameters", type=dict, required=True, location="json")
parser.add_argument("status", type=str, required=False, location="json")
args = parser.parse_args()
server = db.session.query(AppMCPServer).where(AppMCPServer.id == args["id"]).first()
if not server:
raise NotFound()
description = args.get("description")
if description is None:
pass
elif not description:
server.description = app_model.description or ""
else:
server.description = description
server.parameters = json.dumps(args["parameters"], ensure_ascii=False)
if args["status"]:
if args["status"] not in [status.value for status in AppMCPServerStatus]:
raise ValueError("Invalid status")
server.status = args["status"]
db.session.commit()
return server
class AppMCPServerRefreshController(Resource):
@setup_required
@login_required
@account_initialization_required
@marshal_with(app_server_fields)
def get(self, server_id):
if not current_user.is_editor:
raise NotFound()
server = (
db.session.query(AppMCPServer)
.where(AppMCPServer.id == server_id)
.where(AppMCPServer.tenant_id == current_user.current_tenant_id)
.first()
)
if not server:
raise NotFound()
server.server_code = AppMCPServer.generate_server_code(16)
db.session.commit()
return server
api.add_resource(AppMCPServerController, "/apps/<uuid:app_id>/server")
api.add_resource(AppMCPServerRefreshController, "/apps/<uuid:server_id>/server/refresh")

View File

@ -55,7 +55,7 @@ class ChatMessageListApi(Resource):
conversation = (
db.session.query(Conversation)
.filter(Conversation.id == args["conversation_id"], Conversation.app_id == app_model.id)
.where(Conversation.id == args["conversation_id"], Conversation.app_id == app_model.id)
.first()
)
@ -65,7 +65,7 @@ class ChatMessageListApi(Resource):
if args["first_id"]:
first_message = (
db.session.query(Message)
.filter(Message.conversation_id == conversation.id, Message.id == args["first_id"])
.where(Message.conversation_id == conversation.id, Message.id == args["first_id"])
.first()
)
@ -74,7 +74,7 @@ class ChatMessageListApi(Resource):
history_messages = (
db.session.query(Message)
.filter(
.where(
Message.conversation_id == conversation.id,
Message.created_at < first_message.created_at,
Message.id != first_message.id,
@ -86,7 +86,7 @@ class ChatMessageListApi(Resource):
else:
history_messages = (
db.session.query(Message)
.filter(Message.conversation_id == conversation.id)
.where(Message.conversation_id == conversation.id)
.order_by(Message.created_at.desc())
.limit(args["limit"])
.all()
@ -97,7 +97,7 @@ class ChatMessageListApi(Resource):
current_page_first_message = history_messages[-1]
rest_count = (
db.session.query(Message)
.filter(
.where(
Message.conversation_id == conversation.id,
Message.created_at < current_page_first_message.created_at,
Message.id != current_page_first_message.id,
@ -183,7 +183,7 @@ class MessageAnnotationCountApi(Resource):
@account_initialization_required
@get_app_model
def get(self, app_model):
count = db.session.query(MessageAnnotation).filter(MessageAnnotation.app_id == app_model.id).count()
count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_model.id).count()
return {"count": count}
@ -230,7 +230,7 @@ class MessageApi(Resource):
def get(self, app_model, message_id):
message_id = str(message_id)
message = db.session.query(Message).filter(Message.id == message_id, Message.app_id == app_model.id).first()
message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first()
if not message:
raise NotFound("Message Not Exists.")

View File

@ -42,7 +42,7 @@ class ModelConfigResource(Resource):
if app_model.mode == AppMode.AGENT_CHAT.value or app_model.is_agent:
# get original app model config
original_app_model_config = (
db.session.query(AppModelConfig).filter(AppModelConfig.id == app_model.app_model_config_id).first()
db.session.query(AppModelConfig).where(AppModelConfig.id == app_model.app_model_config_id).first()
)
if original_app_model_config is None:
raise ValueError("Original app model config not found")

View File

@ -1,5 +1,3 @@
from datetime import UTC, datetime
from flask_login import current_user
from flask_restful import Resource, marshal_with, reqparse
from werkzeug.exceptions import Forbidden, NotFound
@ -10,6 +8,7 @@ from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from extensions.ext_database import db
from fields.app_fields import app_site_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import Site
@ -50,7 +49,7 @@ class AppSite(Resource):
if not current_user.is_editor:
raise Forbidden()
site = db.session.query(Site).filter(Site.app_id == app_model.id).first()
site = db.session.query(Site).where(Site.app_id == app_model.id).first()
if not site:
raise NotFound
@ -77,7 +76,7 @@ class AppSite(Resource):
setattr(site, attr_name, value)
site.updated_by = current_user.id
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
site.updated_at = naive_utc_now()
db.session.commit()
return site
@ -94,14 +93,14 @@ class AppSiteAccessTokenReset(Resource):
if not current_user.is_admin_or_owner:
raise Forbidden()
site = db.session.query(Site).filter(Site.app_id == app_model.id).first()
site = db.session.query(Site).where(Site.app_id == app_model.id).first()
if not site:
raise NotFound
site.code = Site.generate_code(16)
site.updated_by = current_user.id
site.updated_at = datetime.now(UTC).replace(tzinfo=None)
site.updated_at = naive_utc_now()
db.session.commit()
return site

View File

@ -2,6 +2,7 @@ from datetime import datetime
from decimal import Decimal
import pytz
import sqlalchemy as sa
from flask import jsonify
from flask_login import current_user
from flask_restful import Resource, reqparse
@ -9,10 +10,11 @@ from flask_restful import Resource, reqparse
from controllers.console import api
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from core.app.entities.app_invoke_entities import InvokeFrom
from extensions.ext_database import db
from libs.helper import DatetimeString
from libs.login import login_required
from models.model import AppMode
from models import AppMode, Message
class DailyMessageStatistic(Resource):
@ -65,7 +67,7 @@ WHERE
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "message_count": i.message_count})
@ -85,46 +87,41 @@ class DailyConversationStatistic(Resource):
parser.add_argument("end", type=DatetimeString("%Y-%m-%d %H:%M"), location="args")
args = parser.parse_args()
sql_query = """SELECT
DATE(DATE_TRUNC('day', created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz )) AS date,
COUNT(DISTINCT messages.conversation_id) AS conversation_count
FROM
messages
WHERE
app_id = :app_id"""
arg_dict = {"tz": account.timezone, "app_id": app_model.id}
timezone = pytz.timezone(account.timezone)
utc_timezone = pytz.utc
stmt = (
sa.select(
sa.func.date(
sa.func.date_trunc("day", sa.text("created_at AT TIME ZONE 'UTC' AT TIME ZONE :tz"))
).label("date"),
sa.func.count(sa.distinct(Message.conversation_id)).label("conversation_count"),
)
.select_from(Message)
.where(Message.app_id == app_model.id, Message.invoke_from != InvokeFrom.DEBUGGER.value)
)
if args["start"]:
start_datetime = datetime.strptime(args["start"], "%Y-%m-%d %H:%M")
start_datetime = start_datetime.replace(second=0)
start_datetime_timezone = timezone.localize(start_datetime)
start_datetime_utc = start_datetime_timezone.astimezone(utc_timezone)
sql_query += " AND created_at >= :start"
arg_dict["start"] = start_datetime_utc
stmt = stmt.where(Message.created_at >= start_datetime_utc)
if args["end"]:
end_datetime = datetime.strptime(args["end"], "%Y-%m-%d %H:%M")
end_datetime = end_datetime.replace(second=0)
end_datetime_timezone = timezone.localize(end_datetime)
end_datetime_utc = end_datetime_timezone.astimezone(utc_timezone)
stmt = stmt.where(Message.created_at < end_datetime_utc)
sql_query += " AND created_at < :end"
arg_dict["end"] = end_datetime_utc
sql_query += " GROUP BY date ORDER BY date"
stmt = stmt.group_by("date").order_by("date")
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "conversation_count": i.conversation_count})
rs = conn.execute(stmt, {"tz": account.timezone})
for row in rs:
response_data.append({"date": str(row.date), "conversation_count": row.conversation_count})
return jsonify({"data": response_data})
@ -179,7 +176,7 @@ WHERE
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "terminal_count": i.terminal_count})
@ -237,7 +234,7 @@ WHERE
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append(
{"date": str(i.date), "token_count": i.token_count, "total_price": i.total_price, "currency": "USD"}
@ -313,7 +310,7 @@ ORDER BY
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append(
{"date": str(i.date), "interactions": float(i.interactions.quantize(Decimal("0.01")))}
@ -376,7 +373,7 @@ WHERE
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append(
{
@ -438,7 +435,7 @@ WHERE
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "latency": round(i.latency * 1000, 4)})
@ -498,7 +495,7 @@ WHERE
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "tps": round(i.tokens_per_second, 4)})

View File

@ -1,5 +1,6 @@
import json
import logging
from collections.abc import Sequence
from typing import cast
from flask import abort, request
@ -18,10 +19,13 @@ from controllers.console.app.error import (
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvokeRateLimitError as InvokeRateLimitHttpError
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
from core.app.apps.base_app_queue_manager import AppQueueManager
from core.app.entities.app_invoke_entities import InvokeFrom
from core.file.models import File
from core.helper.trace_id_helper import get_external_trace_id
from extensions.ext_database import db
from factories import variable_factory
from factories import file_factory, variable_factory
from fields.workflow_fields import workflow_fields, workflow_pagination_fields
from fields.workflow_run_fields import workflow_run_node_execution_fields
from libs import helper
@ -30,6 +34,7 @@ from libs.login import current_user, login_required
from models import App
from models.account import Account
from models.model import AppMode
from models.workflow import Workflow
from services.app_generate_service import AppGenerateService
from services.errors.app import WorkflowHashNotEqualError
from services.errors.llm import InvokeRateLimitError
@ -38,6 +43,24 @@ from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseE
logger = logging.getLogger(__name__)
# TODO(QuantumGhost): Refactor existing node run API to handle file parameter parsing
# at the controller level rather than in the workflow logic. This would improve separation
# of concerns and make the code more maintainable.
def _parse_file(workflow: Workflow, files: list[dict] | None = None) -> Sequence[File]:
files = files or []
file_extra_config = FileUploadConfigManager.convert(workflow.features_dict, is_vision=False)
file_objs: Sequence[File] = []
if file_extra_config is None:
return file_objs
file_objs = file_factory.build_from_mappings(
mappings=files,
tenant_id=workflow.tenant_id,
config=file_extra_config,
)
return file_objs
class DraftWorkflowApi(Resource):
@setup_required
@login_required
@ -163,6 +186,10 @@ class AdvancedChatDraftWorkflowRunApi(Resource):
args = parser.parse_args()
external_trace_id = get_external_trace_id(request)
if external_trace_id:
args["external_trace_id"] = external_trace_id
try:
response = AppGenerateService.generate(
app_model=app_model, user=current_user, args=args, invoke_from=InvokeFrom.DEBUGGER, streaming=True
@ -351,6 +378,10 @@ class DraftWorkflowRunApi(Resource):
parser.add_argument("files", type=list, required=False, location="json")
args = parser.parse_args()
external_trace_id = get_external_trace_id(request)
if external_trace_id:
args["external_trace_id"] = external_trace_id
try:
response = AppGenerateService.generate(
app_model=app_model,
@ -402,15 +433,30 @@ class DraftWorkflowNodeRunApi(Resource):
parser = reqparse.RequestParser()
parser.add_argument("inputs", type=dict, required=True, nullable=False, location="json")
parser.add_argument("query", type=str, required=False, location="json", default="")
parser.add_argument("files", type=list, location="json", default=[])
args = parser.parse_args()
inputs = args.get("inputs")
if inputs == None:
user_inputs = args.get("inputs")
if user_inputs is None:
raise ValueError("missing inputs")
workflow_srv = WorkflowService()
# fetch draft workflow by app_model
draft_workflow = workflow_srv.get_draft_workflow(app_model=app_model)
if not draft_workflow:
raise ValueError("Workflow not initialized")
files = _parse_file(draft_workflow, args.get("files"))
workflow_service = WorkflowService()
workflow_node_execution = workflow_service.run_draft_workflow_node(
app_model=app_model, node_id=node_id, user_inputs=inputs, account=current_user
app_model=app_model,
draft_workflow=draft_workflow,
node_id=node_id,
user_inputs=user_inputs,
account=current_user,
query=args.get("query", ""),
files=files,
)
return workflow_node_execution
@ -731,6 +777,27 @@ class WorkflowByIdApi(Resource):
return None, 204
class DraftWorkflowNodeLastRunApi(Resource):
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
@marshal_with(workflow_run_node_execution_fields)
def get(self, app_model: App, node_id: str):
srv = WorkflowService()
workflow = srv.get_draft_workflow(app_model)
if not workflow:
raise NotFound("Workflow not found")
node_exec = srv.get_node_last_run(
app_model=app_model,
workflow=workflow,
node_id=node_id,
)
if node_exec is None:
raise NotFound("last run not found")
return node_exec
api.add_resource(
DraftWorkflowApi,
"/apps/<uuid:app_id>/workflows/draft",
@ -795,3 +862,7 @@ api.add_resource(
WorkflowByIdApi,
"/apps/<uuid:app_id>/workflows/<string:workflow_id>",
)
api.add_resource(
DraftWorkflowNodeLastRunApi,
"/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/last-run",
)

View File

@ -34,6 +34,20 @@ class WorkflowAppLogApi(Resource):
parser.add_argument(
"created_at__after", type=str, location="args", help="Filter logs created after this timestamp"
)
parser.add_argument(
"created_by_end_user_session_id",
type=str,
location="args",
required=False,
default=None,
)
parser.add_argument(
"created_by_account",
type=str,
location="args",
required=False,
default=None,
)
parser.add_argument("page", type=int_range(1, 99999), default=1, location="args")
parser.add_argument("limit", type=int_range(1, 100), default=20, location="args")
args = parser.parse_args()
@ -57,6 +71,8 @@ class WorkflowAppLogApi(Resource):
created_at_after=args.created_at__after,
page=args.page,
limit=args.limit,
created_by_end_user_session_id=args.created_by_end_user_session_id,
created_by_account=args.created_by_account,
)
return workflow_app_log_pagination

View File

@ -0,0 +1,426 @@
import logging
from typing import Any, NoReturn
from flask import Response
from flask_restful import Resource, fields, inputs, marshal, marshal_with, reqparse
from sqlalchemy.orm import Session
from werkzeug.exceptions import Forbidden
from controllers.console import api
from controllers.console.app.error import (
DraftWorkflowNotExist,
)
from controllers.console.app.wraps import get_app_model
from controllers.console.wraps import account_initialization_required, setup_required
from controllers.web.error import InvalidArgumentError, NotFoundError
from core.variables.segment_group import SegmentGroup
from core.variables.segments import ArrayFileSegment, FileSegment, Segment
from core.variables.types import SegmentType
from core.workflow.constants import CONVERSATION_VARIABLE_NODE_ID, SYSTEM_VARIABLE_NODE_ID
from factories.file_factory import build_from_mapping, build_from_mappings
from factories.variable_factory import build_segment_with_type
from libs.login import current_user, login_required
from models import App, AppMode, db
from models.workflow import WorkflowDraftVariable
from services.workflow_draft_variable_service import WorkflowDraftVariableList, WorkflowDraftVariableService
from services.workflow_service import WorkflowService
logger = logging.getLogger(__name__)
def _convert_values_to_json_serializable_object(value: Segment) -> Any:
if isinstance(value, FileSegment):
return value.value.model_dump()
elif isinstance(value, ArrayFileSegment):
return [i.model_dump() for i in value.value]
elif isinstance(value, SegmentGroup):
return [_convert_values_to_json_serializable_object(i) for i in value.value]
else:
return value.value
def _serialize_var_value(variable: WorkflowDraftVariable) -> Any:
value = variable.get_value()
# create a copy of the value to avoid affecting the model cache.
value = value.model_copy(deep=True)
# Refresh the url signature before returning it to client.
if isinstance(value, FileSegment):
file = value.value
file.remote_url = file.generate_url()
elif isinstance(value, ArrayFileSegment):
files = value.value
for file in files:
file.remote_url = file.generate_url()
return _convert_values_to_json_serializable_object(value)
def _create_pagination_parser():
parser = reqparse.RequestParser()
parser.add_argument(
"page",
type=inputs.int_range(1, 100_000),
required=False,
default=1,
location="args",
help="the page of data requested",
)
parser.add_argument("limit", type=inputs.int_range(1, 100), required=False, default=20, location="args")
return parser
def _serialize_variable_type(workflow_draft_var: WorkflowDraftVariable) -> str:
value_type = workflow_draft_var.value_type
return value_type.exposed_type().value
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS = {
"id": fields.String,
"type": fields.String(attribute=lambda model: model.get_variable_type()),
"name": fields.String,
"description": fields.String,
"selector": fields.List(fields.String, attribute=lambda model: model.get_selector()),
"value_type": fields.String(attribute=_serialize_variable_type),
"edited": fields.Boolean(attribute=lambda model: model.edited),
"visible": fields.Boolean,
}
_WORKFLOW_DRAFT_VARIABLE_FIELDS = dict(
_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS,
value=fields.Raw(attribute=_serialize_var_value),
)
_WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS = {
"id": fields.String,
"type": fields.String(attribute=lambda _: "env"),
"name": fields.String,
"description": fields.String,
"selector": fields.List(fields.String, attribute=lambda model: model.get_selector()),
"value_type": fields.String(attribute=_serialize_variable_type),
"edited": fields.Boolean(attribute=lambda model: model.edited),
"visible": fields.Boolean,
}
_WORKFLOW_DRAFT_ENV_VARIABLE_LIST_FIELDS = {
"items": fields.List(fields.Nested(_WORKFLOW_DRAFT_ENV_VARIABLE_FIELDS)),
}
def _get_items(var_list: WorkflowDraftVariableList) -> list[WorkflowDraftVariable]:
return var_list.variables
_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS = {
"items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_WITHOUT_VALUE_FIELDS), attribute=_get_items),
"total": fields.Raw(),
}
_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS = {
"items": fields.List(fields.Nested(_WORKFLOW_DRAFT_VARIABLE_FIELDS), attribute=_get_items),
}
def _api_prerequisite(f):
"""Common prerequisites for all draft workflow variable APIs.
It ensures the following conditions are satisfied:
- Dify has been property setup.
- The request user has logged in and initialized.
- The requested app is a workflow or a chat flow.
- The request user has the edit permission for the app.
"""
@setup_required
@login_required
@account_initialization_required
@get_app_model(mode=[AppMode.ADVANCED_CHAT, AppMode.WORKFLOW])
def wrapper(*args, **kwargs):
if not current_user.is_editor:
raise Forbidden()
return f(*args, **kwargs)
return wrapper
class WorkflowVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_WITHOUT_VALUE_FIELDS)
def get(self, app_model: App):
"""
Get draft workflow
"""
parser = _create_pagination_parser()
args = parser.parse_args()
# fetch draft workflow by app_model
workflow_service = WorkflowService()
workflow_exist = workflow_service.is_workflow_exist(app_model=app_model)
if not workflow_exist:
raise DraftWorkflowNotExist()
# fetch draft workflow by app_model
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
workflow_vars = draft_var_srv.list_variables_without_values(
app_id=app_model.id,
page=args.page,
limit=args.limit,
)
return workflow_vars
@_api_prerequisite
def delete(self, app_model: App):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
draft_var_srv.delete_workflow_variables(app_model.id)
db.session.commit()
return Response("", 204)
def validate_node_id(node_id: str) -> NoReturn | None:
if node_id in [
CONVERSATION_VARIABLE_NODE_ID,
SYSTEM_VARIABLE_NODE_ID,
]:
# NOTE(QuantumGhost): While we store the system and conversation variables as node variables
# with specific `node_id` in database, we still want to make the API separated. By disallowing
# accessing system and conversation variables in `WorkflowDraftNodeVariableListApi`,
# we mitigate the risk that user of the API depending on the implementation detail of the API.
#
# ref: [Hyrum's Law](https://www.hyrumslaw.com/)
raise InvalidArgumentError(
f"invalid node_id, please use correspond api for conversation and system variables, node_id={node_id}",
)
return None
class NodeVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App, node_id: str):
validate_node_id(node_id)
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
node_vars = draft_var_srv.list_node_variables(app_model.id, node_id)
return node_vars
@_api_prerequisite
def delete(self, app_model: App, node_id: str):
validate_node_id(node_id)
srv = WorkflowDraftVariableService(db.session())
srv.delete_node_variables(app_model.id, node_id)
db.session.commit()
return Response("", 204)
class VariableApi(Resource):
_PATCH_NAME_FIELD = "name"
_PATCH_VALUE_FIELD = "value"
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def get(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != app_model.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
return variable
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_FIELDS)
def patch(self, app_model: App, variable_id: str):
# Request payload for file types:
#
# Local File:
#
# {
# "type": "image",
# "transfer_method": "local_file",
# "url": "",
# "upload_file_id": "daded54f-72c7-4f8e-9d18-9b0abdd9f190"
# }
#
# Remote File:
#
#
# {
# "type": "image",
# "transfer_method": "remote_url",
# "url": "http://127.0.0.1:5001/files/1602650a-4fe4-423c-85a2-af76c083e3c4/file-preview?timestamp=1750041099&nonce=...&sign=...=",
# "upload_file_id": "1602650a-4fe4-423c-85a2-af76c083e3c4"
# }
parser = reqparse.RequestParser()
parser.add_argument(self._PATCH_NAME_FIELD, type=str, required=False, nullable=True, location="json")
# Parse 'value' field as-is to maintain its original data structure
parser.add_argument(self._PATCH_VALUE_FIELD, type=lambda x: x, required=False, nullable=True, location="json")
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
args = parser.parse_args(strict=True)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != app_model.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
new_name = args.get(self._PATCH_NAME_FIELD, None)
raw_value = args.get(self._PATCH_VALUE_FIELD, None)
if new_name is None and raw_value is None:
return variable
new_value = None
if raw_value is not None:
if variable.value_type == SegmentType.FILE:
if not isinstance(raw_value, dict):
raise InvalidArgumentError(description=f"expected dict for file, got {type(raw_value)}")
raw_value = build_from_mapping(mapping=raw_value, tenant_id=app_model.tenant_id)
elif variable.value_type == SegmentType.ARRAY_FILE:
if not isinstance(raw_value, list):
raise InvalidArgumentError(description=f"expected list for files, got {type(raw_value)}")
if len(raw_value) > 0 and not isinstance(raw_value[0], dict):
raise InvalidArgumentError(description=f"expected dict for files[0], got {type(raw_value)}")
raw_value = build_from_mappings(mappings=raw_value, tenant_id=app_model.tenant_id)
new_value = build_segment_with_type(variable.value_type, raw_value)
draft_var_srv.update_variable(variable, name=new_name, value=new_value)
db.session.commit()
return variable
@_api_prerequisite
def delete(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != app_model.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
draft_var_srv.delete_variable(variable)
db.session.commit()
return Response("", 204)
class VariableResetApi(Resource):
@_api_prerequisite
def put(self, app_model: App, variable_id: str):
draft_var_srv = WorkflowDraftVariableService(
session=db.session(),
)
workflow_srv = WorkflowService()
draft_workflow = workflow_srv.get_draft_workflow(app_model)
if draft_workflow is None:
raise NotFoundError(
f"Draft workflow not found, app_id={app_model.id}",
)
variable = draft_var_srv.get_variable(variable_id=variable_id)
if variable is None:
raise NotFoundError(description=f"variable not found, id={variable_id}")
if variable.app_id != app_model.id:
raise NotFoundError(description=f"variable not found, id={variable_id}")
resetted = draft_var_srv.reset_variable(draft_workflow, variable)
db.session.commit()
if resetted is None:
return Response("", 204)
else:
return marshal(resetted, _WORKFLOW_DRAFT_VARIABLE_FIELDS)
def _get_variable_list(app_model: App, node_id) -> WorkflowDraftVariableList:
with Session(bind=db.engine, expire_on_commit=False) as session:
draft_var_srv = WorkflowDraftVariableService(
session=session,
)
if node_id == CONVERSATION_VARIABLE_NODE_ID:
draft_vars = draft_var_srv.list_conversation_variables(app_model.id)
elif node_id == SYSTEM_VARIABLE_NODE_ID:
draft_vars = draft_var_srv.list_system_variables(app_model.id)
else:
draft_vars = draft_var_srv.list_node_variables(app_id=app_model.id, node_id=node_id)
return draft_vars
class ConversationVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App):
# NOTE(QuantumGhost): Prefill conversation variables into the draft variables table
# so their IDs can be returned to the caller.
workflow_srv = WorkflowService()
draft_workflow = workflow_srv.get_draft_workflow(app_model)
if draft_workflow is None:
raise NotFoundError(description=f"draft workflow not found, id={app_model.id}")
draft_var_srv = WorkflowDraftVariableService(db.session())
draft_var_srv.prefill_conversation_variable_default_values(draft_workflow)
db.session.commit()
return _get_variable_list(app_model, CONVERSATION_VARIABLE_NODE_ID)
class SystemVariableCollectionApi(Resource):
@_api_prerequisite
@marshal_with(_WORKFLOW_DRAFT_VARIABLE_LIST_FIELDS)
def get(self, app_model: App):
return _get_variable_list(app_model, SYSTEM_VARIABLE_NODE_ID)
class EnvironmentVariableCollectionApi(Resource):
@_api_prerequisite
def get(self, app_model: App):
"""
Get draft workflow
"""
# fetch draft workflow by app_model
workflow_service = WorkflowService()
workflow = workflow_service.get_draft_workflow(app_model=app_model)
if workflow is None:
raise DraftWorkflowNotExist()
env_vars = workflow.environment_variables
env_vars_list = []
for v in env_vars:
env_vars_list.append(
{
"id": v.id,
"type": "env",
"name": v.name,
"description": v.description,
"selector": v.selector,
"value_type": v.value_type.exposed_type().value,
"value": v.value,
# Do not track edited for env vars.
"edited": False,
"visible": True,
"editable": True,
}
)
return {"items": env_vars_list}
api.add_resource(
WorkflowVariableCollectionApi,
"/apps/<uuid:app_id>/workflows/draft/variables",
)
api.add_resource(NodeVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/nodes/<string:node_id>/variables")
api.add_resource(VariableApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>")
api.add_resource(VariableResetApi, "/apps/<uuid:app_id>/workflows/draft/variables/<uuid:variable_id>/reset")
api.add_resource(ConversationVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/conversation-variables")
api.add_resource(SystemVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/system-variables")
api.add_resource(EnvironmentVariableCollectionApi, "/apps/<uuid:app_id>/workflows/draft/environment-variables")

View File

@ -2,6 +2,7 @@ from datetime import datetime
from decimal import Decimal
import pytz
import sqlalchemy as sa
from flask import jsonify
from flask_login import current_user
from flask_restful import Resource, reqparse
@ -71,7 +72,7 @@ WHERE
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "runs": i.runs})
@ -133,7 +134,7 @@ WHERE
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append({"date": str(i.date), "terminal_count": i.terminal_count})
@ -195,7 +196,7 @@ WHERE
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append(
{
@ -277,7 +278,7 @@ GROUP BY
response_data = []
with db.engine.begin() as conn:
rs = conn.execute(db.text(sql_query), arg_dict)
rs = conn.execute(sa.text(sql_query), arg_dict)
for i in rs:
response_data.append(
{"date": str(i.date), "interactions": float(i.interactions.quantize(Decimal("0.01")))}

View File

@ -8,6 +8,15 @@ from libs.login import current_user
from models import App, AppMode
def _load_app_model(app_id: str) -> Optional[App]:
app_model = (
db.session.query(App)
.where(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal")
.first()
)
return app_model
def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[AppMode], None] = None):
def decorator(view_func):
@wraps(view_func)
@ -20,18 +29,12 @@ def get_app_model(view: Optional[Callable] = None, *, mode: Union[AppMode, list[
del kwargs["app_id"]
app_model = (
db.session.query(App)
.filter(App.id == app_id, App.tenant_id == current_user.current_tenant_id, App.status == "normal")
.first()
)
app_model = _load_app_model(app_id)
if not app_model:
raise AppNotFoundError()
app_mode = AppMode.value_of(app_model.mode)
if app_mode == AppMode.CHANNEL:
raise AppNotFoundError()
if mode is not None:
if isinstance(mode, list):

View File

@ -1,5 +1,3 @@
import datetime
from flask import request
from flask_restful import Resource, reqparse
@ -7,6 +5,7 @@ from constants.languages import supported_language
from controllers.console import api
from controllers.console.error import AlreadyActivateError
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import StrLen, email, extract_remote_ip, timezone
from models.account import AccountStatus
from services.account_service import AccountService, RegisterService
@ -65,7 +64,7 @@ class ActivateApi(Resource):
account.timezone = args["timezone"]
account.interface_theme = "light"
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
token_pair = AccountService.login(account, ip_address=extract_remote_ip(request))

View File

@ -41,7 +41,7 @@ class OAuthDataSource(Resource):
if not internal_secret:
return ({"error": "Internal secret is not set"},)
oauth_provider.save_internal_access_token(internal_secret)
return {"data": ""}
return {"data": "internal"}
else:
auth_url = oauth_provider.get_authorization_url()
return {"data": auth_url}, 200
@ -81,7 +81,7 @@ class OAuthDataSourceBinding(Resource):
oauth_provider.get_access_token(code)
except requests.exceptions.HTTPError as e:
logging.exception(
f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}"
"An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text
)
return {"error": "OAuth data source process failed"}, 400
@ -103,7 +103,9 @@ class OAuthDataSourceSync(Resource):
try:
oauth_provider.sync_data_source(binding_id)
except requests.exceptions.HTTPError as e:
logging.exception(f"An error occurred during the OAuthCallback process with {provider}: {e.response.text}")
logging.exception(
"An error occurred during the OAuthCallback process with %s: %s", provider, e.response.text
)
return {"error": "OAuth data source process failed"}, 400
return {"result": "success"}, 200

View File

@ -27,7 +27,19 @@ class InvalidTokenError(BaseHTTPException):
class PasswordResetRateLimitExceededError(BaseHTTPException):
error_code = "password_reset_rate_limit_exceeded"
description = "Too many password reset emails have been sent. Please try again in 1 minutes."
description = "Too many password reset emails have been sent. Please try again in 1 minute."
code = 429
class EmailChangeRateLimitExceededError(BaseHTTPException):
error_code = "email_change_rate_limit_exceeded"
description = "Too many email change emails have been sent. Please try again in 1 minute."
code = 429
class OwnerTransferRateLimitExceededError(BaseHTTPException):
error_code = "owner_transfer_rate_limit_exceeded"
description = "Too many owner transfer emails have been sent. Please try again in 1 minute."
code = 429
@ -65,3 +77,39 @@ class EmailPasswordResetLimitError(BaseHTTPException):
error_code = "email_password_reset_limit"
description = "Too many failed password reset attempts. Please try again in 24 hours."
code = 429
class EmailChangeLimitError(BaseHTTPException):
error_code = "email_change_limit"
description = "Too many failed email change attempts. Please try again in 24 hours."
code = 429
class EmailAlreadyInUseError(BaseHTTPException):
error_code = "email_already_in_use"
description = "A user with this email already exists."
code = 400
class OwnerTransferLimitError(BaseHTTPException):
error_code = "owner_transfer_limit"
description = "Too many failed owner transfer attempts. Please try again in 24 hours."
code = 429
class NotOwnerError(BaseHTTPException):
error_code = "not_owner"
description = "You are not the owner of the workspace."
code = 400
class CannotTransferOwnerToSelfError(BaseHTTPException):
error_code = "cannot_transfer_owner_to_self"
description = "You cannot transfer ownership to yourself."
code = 400
class MemberNotInTenantError(BaseHTTPException):
error_code = "member_not_in_tenant"
description = "The member is not in the workspace."
code = 400

View File

@ -1,5 +1,4 @@
import logging
from datetime import UTC, datetime
from typing import Optional
import requests
@ -13,6 +12,7 @@ from configs import dify_config
from constants.languages import languages
from events.tenant_event import tenant_was_created
from extensions.ext_database import db
from libs.datetime_utils import naive_utc_now
from libs.helper import extract_remote_ip
from libs.oauth import GitHubOAuth, GoogleOAuth, OAuthUserInfo
from models import Account
@ -80,7 +80,7 @@ class OAuthCallback(Resource):
user_info = oauth_provider.get_user_info(token)
except requests.exceptions.RequestException as e:
error_text = e.response.text if e.response else str(e)
logging.exception(f"An error occurred during the OAuth process with {provider}: {error_text}")
logging.exception("An error occurred during the OAuth process with %s: %s", provider, error_text)
return {"error": "OAuth process failed"}, 400
if invite_token and RegisterService.is_valid_invite_token(invite_token):
@ -110,7 +110,7 @@ class OAuthCallback(Resource):
if account.status == AccountStatus.PENDING.value:
account.status = AccountStatus.ACTIVE.value
account.initialized_at = datetime.now(UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
try:

View File

@ -1,4 +1,3 @@
import datetime
import json
from flask import request
@ -15,6 +14,7 @@ from core.rag.extractor.entity.extract_setting import ExtractSetting
from core.rag.extractor.notion_extractor import NotionExtractor
from extensions.ext_database import db
from fields.data_source_fields import integrate_list_fields, integrate_notion_info_list_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import DataSourceOauthBinding, Document
from services.dataset_service import DatasetService, DocumentService
@ -30,7 +30,7 @@ class DataSourceApi(Resource):
# get workspace data source integrates
data_source_integrates = (
db.session.query(DataSourceOauthBinding)
.filter(
.where(
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.disabled == False,
)
@ -88,7 +88,7 @@ class DataSourceApi(Resource):
if action == "enable":
if data_source_binding.disabled:
data_source_binding.disabled = False
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.add(data_source_binding)
db.session.commit()
else:
@ -97,7 +97,7 @@ class DataSourceApi(Resource):
if action == "disable":
if not data_source_binding.disabled:
data_source_binding.disabled = True
data_source_binding.updated_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
data_source_binding.updated_at = naive_utc_now()
db.session.add(data_source_binding)
db.session.commit()
else:
@ -171,7 +171,7 @@ class DataSourceNotionApi(Resource):
page_id = str(page_id)
with Session(db.engine) as session:
data_source_binding = session.execute(
select(DataSourceOauthBinding).filter(
select(DataSourceOauthBinding).where(
db.and_(
DataSourceOauthBinding.tenant_id == current_user.current_tenant_id,
DataSourceOauthBinding.provider == "notion",

View File

@ -41,7 +41,7 @@ def _validate_name(name):
def _validate_description_length(description):
if len(description) > 400:
if description and len(description) > 400:
raise ValueError("Description cannot exceed 400 characters.")
return description
@ -113,7 +113,7 @@ class DatasetListApi(Resource):
)
parser.add_argument(
"description",
type=str,
type=_validate_description_length,
nullable=True,
required=False,
default="",
@ -211,10 +211,6 @@ class DatasetApi(Resource):
else:
data["embedding_available"] = True
if data.get("permission") == "partial_members":
part_users_list = DatasetPermissionService.get_dataset_partial_member_list(dataset_id_str)
data.update({"partial_member_list": part_users_list})
return data, 200
@setup_required
@ -416,7 +412,7 @@ class DatasetIndexingEstimateApi(Resource):
file_ids = args["info_list"]["file_info_list"]["file_ids"]
file_details = (
db.session.query(UploadFile)
.filter(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids))
.where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id.in_(file_ids))
.all()
)
@ -521,14 +517,14 @@ class DatasetIndexingStatusApi(Resource):
dataset_id = str(dataset_id)
documents = (
db.session.query(Document)
.filter(Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id)
.where(Document.dataset_id == dataset_id, Document.tenant_id == current_user.current_tenant_id)
.all()
)
documents_status = []
for document in documents:
completed_segments = (
db.session.query(DocumentSegment)
.filter(
.where(
DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document.id),
DocumentSegment.status != "re_segment",
@ -537,7 +533,7 @@ class DatasetIndexingStatusApi(Resource):
)
total_segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.count()
)
# Create a dictionary with document attributes and additional fields
@ -572,7 +568,7 @@ class DatasetApiKeyApi(Resource):
def get(self):
keys = (
db.session.query(ApiToken)
.filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
.where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
.all()
)
return {"items": keys}
@ -588,7 +584,7 @@ class DatasetApiKeyApi(Resource):
current_key_count = (
db.session.query(ApiToken)
.filter(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
.where(ApiToken.type == self.resource_type, ApiToken.tenant_id == current_user.current_tenant_id)
.count()
)
@ -624,7 +620,7 @@ class DatasetApiDeleteApi(Resource):
key = (
db.session.query(ApiToken)
.filter(
.where(
ApiToken.tenant_id == current_user.current_tenant_id,
ApiToken.type == self.resource_type,
ApiToken.id == api_key_id,
@ -635,7 +631,7 @@ class DatasetApiDeleteApi(Resource):
if key is None:
flask_restful.abort(404, message="API key not found")
db.session.query(ApiToken).filter(ApiToken.id == api_key_id).delete()
db.session.query(ApiToken).where(ApiToken.id == api_key_id).delete()
db.session.commit()
return {"result": "success"}, 204
@ -686,6 +682,8 @@ class DatasetRetrievalSettingApi(Resource):
| VectorType.TABLESTORE
| VectorType.HUAWEI_CLOUD
| VectorType.TENCENT
| VectorType.MATRIXONE
| VectorType.CLICKZETTA
):
return {
"retrieval_method": [
@ -733,6 +731,8 @@ class DatasetRetrievalSettingMockApi(Resource):
| VectorType.TABLESTORE
| VectorType.TENCENT
| VectorType.HUAWEI_CLOUD
| VectorType.MATRIXONE
| VectorType.CLICKZETTA
):
return {
"retrieval_method": [

View File

@ -1,11 +1,10 @@
import logging
from argparse import ArgumentTypeError
from datetime import UTC, datetime
from typing import cast
from typing import Literal, cast
from flask import request
from flask_login import current_user
from flask_restful import Resource, fields, marshal, marshal_with, reqparse
from flask_restful import Resource, marshal, marshal_with, reqparse
from sqlalchemy import asc, desc, select
from werkzeug.exceptions import Forbidden, NotFound
@ -43,19 +42,17 @@ from core.model_runtime.errors.invoke import InvokeAuthorizationError
from core.plugin.impl.exc import PluginDaemonClientSideError
from core.rag.extractor.entity.extract_setting import ExtractSetting
from extensions.ext_database import db
from extensions.ext_redis import redis_client
from fields.document_fields import (
dataset_and_document_fields,
document_fields,
document_status_fields,
document_with_segments_fields,
)
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import Dataset, DatasetProcessRule, Document, DocumentSegment, UploadFile
from services.dataset_service import DatasetService, DocumentService
from services.entities.knowledge_entities.knowledge_entities import KnowledgeConfig
from tasks.add_document_to_index_task import add_document_to_index_task
from tasks.remove_document_from_index_task import remove_document_from_index_task
class DocumentResource(Resource):
@ -127,7 +124,7 @@ class GetProcessRuleApi(Resource):
# get the latest process rule
dataset_process_rule = (
db.session.query(DatasetProcessRule)
.filter(DatasetProcessRule.dataset_id == document.dataset_id)
.where(DatasetProcessRule.dataset_id == document.dataset_id)
.order_by(DatasetProcessRule.created_at.desc())
.limit(1)
.one_or_none()
@ -179,7 +176,7 @@ class DatasetDocumentListApi(Resource):
if search:
search = f"%{search}%"
query = query.filter(Document.name.like(search))
query = query.where(Document.name.like(search))
if sort.startswith("-"):
sort_logic = desc
@ -215,7 +212,7 @@ class DatasetDocumentListApi(Resource):
for document in documents:
completed_segments = (
db.session.query(DocumentSegment)
.filter(
.where(
DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document.id),
DocumentSegment.status != "re_segment",
@ -224,7 +221,7 @@ class DatasetDocumentListApi(Resource):
)
total_segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.count()
)
document.completed_segments = completed_segments
@ -242,12 +239,10 @@ class DatasetDocumentListApi(Resource):
return response
documents_and_batch_fields = {"documents": fields.List(fields.Nested(document_fields)), "batch": fields.String}
@setup_required
@login_required
@account_initialization_required
@marshal_with(documents_and_batch_fields)
@marshal_with(dataset_and_document_fields)
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def post(self, dataset_id):
@ -293,6 +288,8 @@ class DatasetDocumentListApi(Resource):
try:
documents, batch = DocumentService.save_document_with_dataset_id(dataset, knowledge_config, current_user)
dataset = DatasetService.get_dataset(dataset_id)
except ProviderTokenNotInitError as ex:
raise ProviderNotInitializeError(ex.description)
except QuotaExceededError:
@ -300,7 +297,7 @@ class DatasetDocumentListApi(Resource):
except ModelCurrentlyNotSupportError:
raise ProviderModelCurrentlyNotSupportError()
return {"documents": documents, "batch": batch}
return {"dataset": dataset, "documents": documents, "batch": batch}
@setup_required
@login_required
@ -420,7 +417,7 @@ class DocumentIndexingEstimateApi(DocumentResource):
file = (
db.session.query(UploadFile)
.filter(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id)
.where(UploadFile.tenant_id == document.tenant_id, UploadFile.id == file_id)
.first()
)
@ -495,7 +492,7 @@ class DocumentBatchIndexingEstimateApi(DocumentResource):
file_id = data_source_info["upload_file_id"]
file_detail = (
db.session.query(UploadFile)
.filter(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id)
.where(UploadFile.tenant_id == current_user.current_tenant_id, UploadFile.id == file_id)
.first()
)
@ -571,7 +568,7 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
for document in documents:
completed_segments = (
db.session.query(DocumentSegment)
.filter(
.where(
DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document.id),
DocumentSegment.status != "re_segment",
@ -580,7 +577,7 @@ class DocumentBatchIndexingStatusApi(DocumentResource):
)
total_segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.where(DocumentSegment.document_id == str(document.id), DocumentSegment.status != "re_segment")
.count()
)
# Create a dictionary with document attributes and additional fields
@ -614,7 +611,7 @@ class DocumentIndexingStatusApi(DocumentResource):
completed_segments = (
db.session.query(DocumentSegment)
.filter(
.where(
DocumentSegment.completed_at.isnot(None),
DocumentSegment.document_id == str(document_id),
DocumentSegment.status != "re_segment",
@ -623,7 +620,7 @@ class DocumentIndexingStatusApi(DocumentResource):
)
total_segments = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment")
.where(DocumentSegment.document_id == str(document_id), DocumentSegment.status != "re_segment")
.count()
)
@ -645,7 +642,7 @@ class DocumentIndexingStatusApi(DocumentResource):
return marshal(document_dict, document_status_fields)
class DocumentDetailApi(DocumentResource):
class DocumentApi(DocumentResource):
METADATA_CHOICES = {"all", "only", "without"}
@setup_required
@ -733,45 +730,6 @@ class DocumentDetailApi(DocumentResource):
return response, 200
class DocumentProcessingApi(DocumentResource):
@setup_required
@login_required
@account_initialization_required
@cloud_edition_billing_rate_limit_check("knowledge")
def patch(self, dataset_id, document_id, action):
dataset_id = str(dataset_id)
document_id = str(document_id)
document = self.get_document(dataset_id, document_id)
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor
if not current_user.is_dataset_editor:
raise Forbidden()
if action == "pause":
if document.indexing_status != "indexing":
raise InvalidActionError("Document not in indexing state.")
document.paused_by = current_user.id
document.paused_at = datetime.now(UTC).replace(tzinfo=None)
document.is_paused = True
db.session.commit()
elif action == "resume":
if document.indexing_status not in {"paused", "error"}:
raise InvalidActionError("Document not in paused or error state.")
document.paused_by = None
document.paused_at = None
document.is_paused = False
db.session.commit()
else:
raise InvalidActionError()
return {"result": "success"}, 200
class DocumentDeleteApi(DocumentResource):
@setup_required
@login_required
@account_initialization_required
@ -795,6 +753,41 @@ class DocumentDeleteApi(DocumentResource):
return {"result": "success"}, 204
class DocumentProcessingApi(DocumentResource):
@setup_required
@login_required
@account_initialization_required
@cloud_edition_billing_rate_limit_check("knowledge")
def patch(self, dataset_id, document_id, action: Literal["pause", "resume"]):
dataset_id = str(dataset_id)
document_id = str(document_id)
document = self.get_document(dataset_id, document_id)
# The role of the current user in the ta table must be admin, owner, dataset_operator, or editor
if not current_user.is_dataset_editor:
raise Forbidden()
if action == "pause":
if document.indexing_status != "indexing":
raise InvalidActionError("Document not in indexing state.")
document.paused_by = current_user.id
document.paused_at = naive_utc_now()
document.is_paused = True
db.session.commit()
elif action == "resume":
if document.indexing_status not in {"paused", "error"}:
raise InvalidActionError("Document not in paused or error state.")
document.paused_by = None
document.paused_at = None
document.is_paused = False
db.session.commit()
return {"result": "success"}, 200
class DocumentMetadataApi(DocumentResource):
@setup_required
@login_required
@ -833,7 +826,7 @@ class DocumentMetadataApi(DocumentResource):
document.doc_metadata[key] = value
document.doc_type = doc_type
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
document.updated_at = naive_utc_now()
db.session.commit()
return {"result": "success", "message": "Document metadata updated."}, 200
@ -845,7 +838,7 @@ class DocumentStatusApi(DocumentResource):
@account_initialization_required
@cloud_edition_billing_resource_check("vector_space")
@cloud_edition_billing_rate_limit_check("knowledge")
def patch(self, dataset_id, action):
def patch(self, dataset_id, action: Literal["enable", "disable", "archive", "un_archive"]):
dataset_id = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id)
if dataset is None:
@ -862,77 +855,16 @@ class DocumentStatusApi(DocumentResource):
DatasetService.check_dataset_permission(dataset, current_user)
document_ids = request.args.getlist("document_id")
for document_id in document_ids:
document = self.get_document(dataset_id, document_id)
indexing_cache_key = "document_{}_indexing".format(document.id)
cache_result = redis_client.get(indexing_cache_key)
if cache_result is not None:
raise InvalidActionError(f"Document:{document.name} is being indexed, please try again later")
try:
DocumentService.batch_update_document_status(dataset, document_ids, action, current_user)
except services.errors.document.DocumentIndexingError as e:
raise InvalidActionError(str(e))
except ValueError as e:
raise InvalidActionError(str(e))
except NotFound as e:
raise NotFound(str(e))
if action == "enable":
if document.enabled:
continue
document.enabled = True
document.disabled_at = None
document.disabled_by = None
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
# Set cache to prevent indexing the same document multiple times
redis_client.setex(indexing_cache_key, 600, 1)
add_document_to_index_task.delay(document_id)
elif action == "disable":
if not document.completed_at or document.indexing_status != "completed":
raise InvalidActionError(f"Document: {document.name} is not completed.")
if not document.enabled:
continue
document.enabled = False
document.disabled_at = datetime.now(UTC).replace(tzinfo=None)
document.disabled_by = current_user.id
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
# Set cache to prevent indexing the same document multiple times
redis_client.setex(indexing_cache_key, 600, 1)
remove_document_from_index_task.delay(document_id)
elif action == "archive":
if document.archived:
continue
document.archived = True
document.archived_at = datetime.now(UTC).replace(tzinfo=None)
document.archived_by = current_user.id
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
if document.enabled:
# Set cache to prevent indexing the same document multiple times
redis_client.setex(indexing_cache_key, 600, 1)
remove_document_from_index_task.delay(document_id)
elif action == "un_archive":
if not document.archived:
continue
document.archived = False
document.archived_at = None
document.archived_by = None
document.updated_at = datetime.now(UTC).replace(tzinfo=None)
db.session.commit()
# Set cache to prevent indexing the same document multiple times
redis_client.setex(indexing_cache_key, 600, 1)
add_document_to_index_task.delay(document_id)
else:
raise InvalidActionError()
return {"result": "success"}, 200
@ -1034,7 +966,7 @@ class DocumentRetryApi(DocumentResource):
raise DocumentAlreadyFinishedError()
retry_documents.append(document)
except Exception:
logging.exception(f"Failed to retry document, document id: {document_id}")
logging.exception("Failed to retry document, document id: %s", document_id)
continue
# retry document
DocumentService.retry_document(dataset_id, retry_documents)
@ -1101,11 +1033,10 @@ api.add_resource(
api.add_resource(DocumentBatchIndexingEstimateApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-estimate")
api.add_resource(DocumentBatchIndexingStatusApi, "/datasets/<uuid:dataset_id>/batch/<string:batch>/indexing-status")
api.add_resource(DocumentIndexingStatusApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/indexing-status")
api.add_resource(DocumentDetailApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
api.add_resource(DocumentApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
api.add_resource(
DocumentProcessingApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/<string:action>"
)
api.add_resource(DocumentDeleteApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>")
api.add_resource(DocumentMetadataApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/metadata")
api.add_resource(DocumentStatusApi, "/datasets/<uuid:dataset_id>/documents/status/<string:action>/batch")
api.add_resource(DocumentPauseApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/processing/pause")

View File

@ -1,6 +1,5 @@
import uuid
import pandas as pd
from flask import request
from flask_login import current_user
from flask_restful import Resource, marshal, reqparse
@ -14,8 +13,6 @@ from controllers.console.datasets.error import (
ChildChunkDeleteIndexError,
ChildChunkIndexingError,
InvalidActionError,
NoFileUploadedError,
TooManyFilesError,
)
from controllers.console.wraps import (
account_initialization_required,
@ -32,6 +29,7 @@ from extensions.ext_redis import redis_client
from fields.segment_fields import child_chunk_fields, segment_fields
from libs.login import login_required
from models.dataset import ChildChunk, DocumentSegment
from models.model import UploadFile
from services.dataset_service import DatasetService, DocumentService, SegmentService
from services.entities.knowledge_entities.knowledge_entities import ChildChunkUpdateArgs, SegmentUpdateArgs
from services.errors.chunk import ChildChunkDeleteIndexError as ChildChunkDeleteIndexServiceError
@ -78,7 +76,7 @@ class DatasetDocumentSegmentListApi(Resource):
query = (
select(DocumentSegment)
.filter(
.where(
DocumentSegment.document_id == str(document_id),
DocumentSegment.tenant_id == current_user.current_tenant_id,
)
@ -86,19 +84,19 @@ class DatasetDocumentSegmentListApi(Resource):
)
if status_list:
query = query.filter(DocumentSegment.status.in_(status_list))
query = query.where(DocumentSegment.status.in_(status_list))
if hit_count_gte is not None:
query = query.filter(DocumentSegment.hit_count >= hit_count_gte)
query = query.where(DocumentSegment.hit_count >= hit_count_gte)
if keyword:
query = query.where(DocumentSegment.content.ilike(f"%{keyword}%"))
if args["enabled"].lower() != "all":
if args["enabled"].lower() == "true":
query = query.filter(DocumentSegment.enabled == True)
query = query.where(DocumentSegment.enabled == True)
elif args["enabled"].lower() == "false":
query = query.filter(DocumentSegment.enabled == False)
query = query.where(DocumentSegment.enabled == False)
segments = db.paginate(select=query, page=page, per_page=limit, max_per_page=100, error_out=False)
@ -184,7 +182,7 @@ class DatasetDocumentSegmentApi(Resource):
raise ProviderNotInitializeError(ex.description)
segment_ids = request.args.getlist("segment_id")
document_indexing_cache_key = "document_{}_indexing".format(document.id)
document_indexing_cache_key = f"document_{document.id}_indexing"
cache_result = redis_client.get(document_indexing_cache_key)
if cache_result is not None:
raise InvalidActionError("Document is being indexed, please try again later")
@ -285,7 +283,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -331,7 +329,7 @@ class DatasetDocumentSegmentUpdateApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -365,37 +363,28 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
document = DocumentService.get_document(dataset_id, document_id)
if not document:
raise NotFound("Document not found.")
# get file from request
file = request.files["file"]
# check file
if "file" not in request.files:
raise NoFileUploadedError()
if len(request.files) > 1:
raise TooManyFilesError()
parser = reqparse.RequestParser()
parser.add_argument("upload_file_id", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
upload_file_id = args["upload_file_id"]
upload_file = db.session.query(UploadFile).where(UploadFile.id == upload_file_id).first()
if not upload_file:
raise NotFound("UploadFile not found.")
# check file type
if not file.filename or not file.filename.endswith(".csv"):
if not upload_file.name or not upload_file.name.lower().endswith(".csv"):
raise ValueError("Invalid file type. Only CSV files are allowed")
try:
# Skip the first row
df = pd.read_csv(file)
result = []
for index, row in df.iterrows():
if document.doc_form == "qa_model":
data = {"content": row.iloc[0], "answer": row.iloc[1]}
else:
data = {"content": row.iloc[0]}
result.append(data)
if len(result) == 0:
raise ValueError("The CSV file is empty.")
# async job
job_id = str(uuid.uuid4())
indexing_cache_key = "segment_batch_import_{}".format(str(job_id))
indexing_cache_key = f"segment_batch_import_{str(job_id)}"
# send batch add segments task
redis_client.setnx(indexing_cache_key, "waiting")
batch_create_segment_to_index_task.delay(
str(job_id), result, dataset_id, document_id, current_user.current_tenant_id, current_user.id
str(job_id), upload_file_id, dataset_id, document_id, current_user.current_tenant_id, current_user.id
)
except Exception as e:
return {"error": str(e)}, 500
@ -406,7 +395,7 @@ class DatasetDocumentSegmentBatchImportApi(Resource):
@account_initialization_required
def get(self, job_id):
job_id = str(job_id)
indexing_cache_key = "segment_batch_import_{}".format(job_id)
indexing_cache_key = f"segment_batch_import_{job_id}"
cache_result = redis_client.get(indexing_cache_key)
if cache_result is None:
raise ValueError("The job does not exist.")
@ -436,7 +425,7 @@ class ChildChunkAddApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -493,7 +482,7 @@ class ChildChunkAddApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -540,7 +529,7 @@ class ChildChunkAddApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -586,7 +575,7 @@ class ChildChunkUpdateApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -595,7 +584,7 @@ class ChildChunkUpdateApi(Resource):
child_chunk_id = str(child_chunk_id)
child_chunk = (
db.session.query(ChildChunk)
.filter(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
.where(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
.first()
)
if not child_chunk:
@ -635,7 +624,7 @@ class ChildChunkUpdateApi(Resource):
segment_id = str(segment_id)
segment = (
db.session.query(DocumentSegment)
.filter(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.where(DocumentSegment.id == str(segment_id), DocumentSegment.tenant_id == current_user.current_tenant_id)
.first()
)
if not segment:
@ -644,7 +633,7 @@ class ChildChunkUpdateApi(Resource):
child_chunk_id = str(child_chunk_id)
child_chunk = (
db.session.query(ChildChunk)
.filter(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
.where(ChildChunk.id == str(child_chunk_id), ChildChunk.tenant_id == current_user.current_tenant_id)
.first()
)
if not child_chunk:

View File

@ -1,36 +1,6 @@
from libs.exception import BaseHTTPException
class NoFileUploadedError(BaseHTTPException):
error_code = "no_file_uploaded"
description = "Please upload your file."
code = 400
class TooManyFilesError(BaseHTTPException):
error_code = "too_many_files"
description = "Only one file is allowed."
code = 400
class FileTooLargeError(BaseHTTPException):
error_code = "file_too_large"
description = "File size exceeded. {message}"
code = 413
class UnsupportedFileTypeError(BaseHTTPException):
error_code = "unsupported_file_type"
description = "File type not allowed."
code = 415
class HighQualityDatasetOnlyError(BaseHTTPException):
error_code = "high_quality_dataset_only"
description = "Current operation only supports 'high-quality' datasets."
code = 400
class DatasetNotInitializedError(BaseHTTPException):
error_code = "dataset_not_initialized"
description = "The dataset is still being initialized or indexing. Please wait a moment."

View File

@ -1,3 +1,5 @@
from typing import Literal
from flask_login import current_user
from flask_restful import Resource, marshal_with, reqparse
from werkzeug.exceptions import NotFound
@ -22,8 +24,8 @@ class DatasetMetadataCreateApi(Resource):
@marshal_with(dataset_metadata_fields)
def post(self, dataset_id):
parser = reqparse.RequestParser()
parser.add_argument("type", type=str, required=True, nullable=True, location="json")
parser.add_argument("name", type=str, required=True, nullable=True, location="json")
parser.add_argument("type", type=str, required=True, nullable=False, location="json")
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
metadata_args = MetadataArgs(**args)
@ -56,7 +58,7 @@ class DatasetMetadataApi(Resource):
@marshal_with(dataset_metadata_fields)
def patch(self, dataset_id, metadata_id):
parser = reqparse.RequestParser()
parser.add_argument("name", type=str, required=True, nullable=True, location="json")
parser.add_argument("name", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
dataset_id_str = str(dataset_id)
@ -100,7 +102,7 @@ class DatasetMetadataBuiltInFieldActionApi(Resource):
@login_required
@account_initialization_required
@enterprise_license_required
def post(self, dataset_id, action):
def post(self, dataset_id, action: Literal["enable", "disable"]):
dataset_id_str = str(dataset_id)
dataset = DatasetService.get_dataset(dataset_id_str)
if dataset is None:
@ -127,7 +129,7 @@ class DocumentMetadataEditApi(Resource):
DatasetService.check_dataset_permission(dataset, current_user)
parser = reqparse.RequestParser()
parser.add_argument("operation_data", type=list, required=True, nullable=True, location="json")
parser.add_argument("operation_data", type=list, required=True, nullable=False, location="json")
args = parser.parse_args()
metadata_args = MetadataOperationData(**args)

View File

@ -0,0 +1,62 @@
from flask_login import current_user
from flask_restful import Resource
from werkzeug.exceptions import NotFound
from controllers.console import api
from controllers.console.wraps import (
account_initialization_required,
setup_required,
)
from core.file import helpers as file_helpers
from extensions.ext_database import db
from models.dataset import Dataset
from models.model import UploadFile
from services.dataset_service import DocumentService
class UploadFileApi(Resource):
@setup_required
@account_initialization_required
def get(self, dataset_id, document_id):
"""Get upload file."""
# check dataset
dataset_id = str(dataset_id)
dataset = (
db.session.query(Dataset)
.filter(Dataset.tenant_id == current_user.current_tenant_id, Dataset.id == dataset_id)
.first()
)
if not dataset:
raise NotFound("Dataset not found.")
# check document
document_id = str(document_id)
document = DocumentService.get_document(dataset.id, document_id)
if not document:
raise NotFound("Document not found.")
# check upload file
if document.data_source_type != "upload_file":
raise ValueError(f"Document data source type ({document.data_source_type}) is not upload_file.")
data_source_info = document.data_source_info_dict
if data_source_info and "upload_file_id" in data_source_info:
file_id = data_source_info["upload_file_id"]
upload_file = db.session.query(UploadFile).where(UploadFile.id == file_id).first()
if not upload_file:
raise NotFound("UploadFile not found.")
else:
raise ValueError("Upload file id not found in document data source info.")
url = file_helpers.get_signed_file_url(upload_file_id=upload_file.id)
return {
"id": upload_file.id,
"name": upload_file.name,
"size": upload_file.size,
"extension": upload_file.extension,
"url": url,
"download_url": f"{url}&as_attachment=true",
"mime_type": upload_file.mime_type,
"created_by": upload_file.created_by,
"created_at": upload_file.created_at.timestamp(),
}, 200
api.add_resource(UploadFileApi, "/datasets/<uuid:dataset_id>/documents/<uuid:document_id>/upload-file")

View File

@ -4,7 +4,7 @@ from controllers.console import api
from controllers.console.datasets.error import WebsiteCrawlError
from controllers.console.wraps import account_initialization_required, setup_required
from libs.login import login_required
from services.website_service import WebsiteService
from services.website_service import WebsiteCrawlApiRequest, WebsiteCrawlStatusApiRequest, WebsiteService
class WebsiteCrawlApi(Resource):
@ -24,10 +24,16 @@ class WebsiteCrawlApi(Resource):
parser.add_argument("url", type=str, required=True, nullable=True, location="json")
parser.add_argument("options", type=dict, required=True, nullable=True, location="json")
args = parser.parse_args()
WebsiteService.document_create_args_validate(args)
# crawl url
# Create typed request and validate
try:
result = WebsiteService.crawl_url(args)
api_request = WebsiteCrawlApiRequest.from_args(args)
except ValueError as e:
raise WebsiteCrawlError(str(e))
# Crawl URL using typed request
try:
result = WebsiteService.crawl_url(api_request)
except Exception as e:
raise WebsiteCrawlError(str(e))
return result, 200
@ -43,9 +49,16 @@ class WebsiteCrawlStatusApi(Resource):
"provider", type=str, choices=["firecrawl", "watercrawl", "jinareader"], required=True, location="args"
)
args = parser.parse_args()
# get crawl status
# Create typed request and validate
try:
result = WebsiteService.get_crawl_status(job_id, args["provider"])
api_request = WebsiteCrawlStatusApiRequest.from_args(args, job_id)
except ValueError as e:
raise WebsiteCrawlError(str(e))
# Get crawl status using typed request
try:
result = WebsiteService.get_crawl_status_typed(api_request)
except Exception as e:
raise WebsiteCrawlError(str(e))
return result, 200

View File

@ -76,30 +76,6 @@ class EmailSendIpLimitError(BaseHTTPException):
code = 429
class FileTooLargeError(BaseHTTPException):
error_code = "file_too_large"
description = "File size exceeded. {message}"
code = 413
class UnsupportedFileTypeError(BaseHTTPException):
error_code = "unsupported_file_type"
description = "File type not allowed."
code = 415
class TooManyFilesError(BaseHTTPException):
error_code = "too_many_files"
description = "Only one file is allowed."
code = 400
class NoFileUploadedError(BaseHTTPException):
error_code = "no_file_uploaded"
description = "Please upload your file."
code = 400
class UnauthorizedAndForceLogout(BaseHTTPException):
error_code = "unauthorized_and_force_logout"
description = "Unauthorized and force logout."
@ -127,7 +103,7 @@ class EducationActivateLimitError(BaseHTTPException):
code = 429
class CompilanceRateLimitError(BaseHTTPException):
error_code = "compilance_rate_limit"
class ComplianceRateLimitError(BaseHTTPException):
error_code = "compliance_rate_limit"
description = "Rate limit exceeded for downloading compliance report."
code = 429

View File

@ -18,7 +18,6 @@ from controllers.console.app.error import (
from controllers.console.explore.wraps import InstalledAppResource
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from core.model_runtime.errors.invoke import InvokeError
from models.model import AppMode
from services.audio_service import AudioService
from services.errors.audio import (
AudioTooLargeServiceError,
@ -79,19 +78,9 @@ class ChatTextApi(InstalledAppResource):
message_id = args.get("message_id", None)
text = args.get("text", None)
if (
app_model.mode in {AppMode.ADVANCED_CHAT.value, AppMode.WORKFLOW.value}
and app_model.workflow
and app_model.workflow.features_dict
):
text_to_speech = app_model.workflow.features_dict.get("text_to_speech")
voice = args.get("voice") or text_to_speech.get("voice")
else:
try:
voice = args.get("voice") or app_model.app_model_config.text_to_speech_dict.get("voice")
except Exception:
voice = None
response = AudioService.transcript_tts(app_model=app_model, message_id=message_id, voice=voice, text=text)
voice = args.get("voice", None)
response = AudioService.transcript_tts(app_model=app_model, text=text, voice=voice, message_id=message_id)
return response
except services.errors.app_model_config.AppModelConfigBrokenError:
logging.exception("App model config broken.")

View File

@ -1,5 +1,4 @@
import logging
from datetime import UTC, datetime
from flask_login import current_user
from flask_restful import reqparse
@ -27,6 +26,7 @@ from core.errors.error import (
from core.model_runtime.errors.invoke import InvokeError
from extensions.ext_database import db
from libs import helper
from libs.datetime_utils import naive_utc_now
from libs.helper import uuid_value
from models.model import AppMode
from services.app_generate_service import AppGenerateService
@ -51,7 +51,7 @@ class CompletionApi(InstalledAppResource):
streaming = args["response_mode"] == "streaming"
args["auto_generate_name"] = False
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
installed_app.last_used_at = naive_utc_now()
db.session.commit()
try:
@ -111,7 +111,7 @@ class ChatApi(InstalledAppResource):
args["auto_generate_name"] = False
installed_app.last_used_at = datetime.now(UTC).replace(tzinfo=None)
installed_app.last_used_at = naive_utc_now()
db.session.commit()
try:

View File

@ -1,5 +1,4 @@
import logging
from datetime import UTC, datetime
from typing import Any
from flask import request
@ -13,6 +12,7 @@ from controllers.console.explore.wraps import InstalledAppResource
from controllers.console.wraps import account_initialization_required, cloud_edition_billing_resource_check
from extensions.ext_database import db
from fields.installed_app_fields import installed_app_list_fields
from libs.datetime_utils import naive_utc_now
from libs.login import login_required
from models import App, InstalledApp, RecommendedApp
from services.account_service import TenantService
@ -34,11 +34,11 @@ class InstalledAppsListApi(Resource):
if app_id:
installed_apps = (
db.session.query(InstalledApp)
.filter(and_(InstalledApp.tenant_id == current_tenant_id, InstalledApp.app_id == app_id))
.where(and_(InstalledApp.tenant_id == current_tenant_id, InstalledApp.app_id == app_id))
.all()
)
else:
installed_apps = db.session.query(InstalledApp).filter(InstalledApp.tenant_id == current_tenant_id).all()
installed_apps = db.session.query(InstalledApp).where(InstalledApp.tenant_id == current_tenant_id).all()
current_user.role = TenantService.get_user_role(current_user, current_user.current_tenant)
installed_app_list: list[dict[str, Any]] = [
@ -58,23 +58,40 @@ class InstalledAppsListApi(Resource):
# filter out apps that user doesn't have access to
if FeatureService.get_system_features().webapp_auth.enabled:
user_id = current_user.id
res = []
app_ids = [installed_app["app"].id for installed_app in installed_app_list]
webapp_settings = EnterpriseService.WebAppAuth.batch_get_app_access_mode_by_id(app_ids)
# Pre-filter out apps without setting or with sso_verified
filtered_installed_apps = []
app_id_to_app_code = {}
for installed_app in installed_app_list:
webapp_setting = webapp_settings.get(installed_app["app"].id)
if not webapp_setting:
app_id = installed_app["app"].id
webapp_setting = webapp_settings.get(app_id)
if not webapp_setting or webapp_setting.access_mode == "sso_verified":
continue
if webapp_setting.access_mode == "sso_verified":
continue
app_code = AppService.get_app_code_by_id(str(installed_app["app"].id))
if EnterpriseService.WebAppAuth.is_user_allowed_to_access_webapp(
user_id=user_id,
app_code=app_code,
):
app_code = AppService.get_app_code_by_id(str(app_id))
app_id_to_app_code[app_id] = app_code
filtered_installed_apps.append(installed_app)
app_codes = list(app_id_to_app_code.values())
# Batch permission check
permissions = EnterpriseService.WebAppAuth.batch_is_user_allowed_to_access_webapps(
user_id=user_id,
app_codes=app_codes,
)
# Keep only allowed apps
res = []
for installed_app in filtered_installed_apps:
app_id = installed_app["app"].id
app_code = app_id_to_app_code[app_id]
if permissions.get(app_code):
res.append(installed_app)
installed_app_list = res
logger.debug(f"installed_app_list: {installed_app_list}, user_id: {user_id}")
logger.debug("installed_app_list: %s, user_id: %s", installed_app_list, user_id)
installed_app_list.sort(
key=lambda app: (
@ -94,12 +111,12 @@ class InstalledAppsListApi(Resource):
parser.add_argument("app_id", type=str, required=True, help="Invalid app_id")
args = parser.parse_args()
recommended_app = db.session.query(RecommendedApp).filter(RecommendedApp.app_id == args["app_id"]).first()
recommended_app = db.session.query(RecommendedApp).where(RecommendedApp.app_id == args["app_id"]).first()
if recommended_app is None:
raise NotFound("App not found")
current_tenant_id = current_user.current_tenant_id
app = db.session.query(App).filter(App.id == args["app_id"]).first()
app = db.session.query(App).where(App.id == args["app_id"]).first()
if app is None:
raise NotFound("App not found")
@ -109,7 +126,7 @@ class InstalledAppsListApi(Resource):
installed_app = (
db.session.query(InstalledApp)
.filter(and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id))
.where(and_(InstalledApp.app_id == args["app_id"], InstalledApp.tenant_id == current_tenant_id))
.first()
)
@ -122,7 +139,7 @@ class InstalledAppsListApi(Resource):
tenant_id=current_tenant_id,
app_owner_tenant_id=app.tenant_id,
is_pinned=False,
last_used_at=datetime.now(UTC).replace(tzinfo=None),
last_used_at=naive_utc_now(),
)
db.session.add(new_installed_app)
db.session.commit()

View File

@ -5,7 +5,6 @@ from flask_restful import marshal_with, reqparse
from flask_restful.inputs import int_range
from werkzeug.exceptions import InternalServerError, NotFound
import services
from controllers.console.app.error import (
AppMoreLikeThisDisabledError,
CompletionRequestError,
@ -29,7 +28,11 @@ from models.model import AppMode
from services.app_generate_service import AppGenerateService
from services.errors.app import MoreLikeThisDisabledError
from services.errors.conversation import ConversationNotExistsError
from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError
from services.errors.message import (
FirstMessageNotExistsError,
MessageNotExistsError,
SuggestedQuestionsAfterAnswerDisabledError,
)
from services.message_service import MessageService
@ -52,9 +55,9 @@ class MessageListApi(InstalledAppResource):
return MessageService.pagination_by_first_id(
app_model, current_user, args["conversation_id"], args["first_id"], args["limit"]
)
except services.errors.conversation.ConversationNotExistsError:
except ConversationNotExistsError:
raise NotFound("Conversation Not Exists.")
except services.errors.message.FirstMessageNotExistsError:
except FirstMessageNotExistsError:
raise NotFound("First Message Not Exists.")
@ -77,7 +80,7 @@ class MessageFeedbackApi(InstalledAppResource):
rating=args.get("rating"),
content=args.get("content"),
)
except services.errors.message.MessageNotExistsError:
except MessageNotExistsError:
raise NotFound("Message Not Exists.")
return {"result": "success"}

View File

@ -28,7 +28,7 @@ def installed_app_required(view=None):
installed_app = (
db.session.query(InstalledApp)
.filter(
.where(
InstalledApp.id == str(installed_app_id), InstalledApp.tenant_id == current_user.current_tenant_id
)
.first()

View File

@ -8,7 +8,13 @@ from werkzeug.exceptions import Forbidden
import services
from configs import dify_config
from constants import DOCUMENT_EXTENSIONS
from controllers.common.errors import FilenameNotExistsError
from controllers.common.errors import (
FilenameNotExistsError,
FileTooLargeError,
NoFileUploadedError,
TooManyFilesError,
UnsupportedFileTypeError,
)
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_resource_check,
@ -18,13 +24,6 @@ from fields.file_fields import file_fields, upload_config_fields
from libs.login import login_required
from services.file_service import FileService
from .error import (
FileTooLargeError,
NoFileUploadedError,
TooManyFilesError,
UnsupportedFileTypeError,
)
PREVIEW_WORDS_LIMIT = 3000
@ -49,7 +48,6 @@ class FileApi(Resource):
@marshal_with(file_fields)
@cloud_edition_billing_resource_check("documents")
def post(self):
file = request.files["file"]
source_str = request.form.get("source")
source: Literal["datasets"] | None = "datasets" if source_str == "datasets" else None
@ -58,6 +56,7 @@ class FileApi(Resource):
if len(request.files) > 1:
raise TooManyFilesError()
file = request.files["file"]
if not file.filename:
raise FilenameNotExistsError

View File

@ -7,18 +7,17 @@ from flask_restful import Resource, marshal_with, reqparse
import services
from controllers.common import helpers
from controllers.common.errors import RemoteFileUploadError
from controllers.common.errors import (
FileTooLargeError,
RemoteFileUploadError,
UnsupportedFileTypeError,
)
from core.file import helpers as file_helpers
from core.helper import ssrf_proxy
from fields.file_fields import file_fields_with_signed_url, remote_file_info_fields
from models.account import Account
from services.file_service import FileService
from .error import (
FileTooLargeError,
UnsupportedFileTypeError,
)
class RemoteFileInfoApi(Resource):
@marshal_with(remote_file_info_fields)

View File

@ -18,7 +18,7 @@ class VersionApi(Resource):
check_update_url = dify_config.CHECK_UPDATE_URL
result = {
"version": dify_config.CURRENT_VERSION,
"version": dify_config.project.version,
"release_date": "",
"release_notes": "",
"can_auto_update": False,
@ -32,9 +32,9 @@ class VersionApi(Resource):
return result
try:
response = requests.get(check_update_url, {"current_version": args.get("current_version")})
response = requests.get(check_update_url, {"current_version": args.get("current_version")}, timeout=(3, 10))
except Exception as error:
logging.warning("Check update version error: {}.".format(str(error)))
logging.warning("Check update version error: %s.", str(error))
result["version"] = args.get("current_version")
return result
@ -55,7 +55,7 @@ def _has_new_version(*, latest_version: str, current_version: str) -> bool:
# Compare versions
return latest > current
except version.InvalidVersion:
logging.warning(f"Invalid version format: latest={latest_version}, current={current_version}")
logging.warning("Invalid version format: latest=%s, current=%s", latest_version, current_version)
return False

View File

@ -21,7 +21,7 @@ def plugin_permission_required(
with Session(db.engine) as session:
permission = (
session.query(TenantPluginPermission)
.filter(
.where(
TenantPluginPermission.tenant_id == tenant_id,
)
.first()

View File

@ -1,13 +1,21 @@
import datetime
import pytz
from flask import request
from flask_login import current_user
from flask_restful import Resource, fields, marshal_with, reqparse
from sqlalchemy import select
from sqlalchemy.orm import Session
from configs import dify_config
from constants.languages import supported_language
from controllers.console import api
from controllers.console.auth.error import (
EmailAlreadyInUseError,
EmailChangeLimitError,
EmailCodeError,
InvalidEmailError,
InvalidTokenError,
)
from controllers.console.error import AccountInFreezeError, AccountNotFound, EmailSendIpLimitError
from controllers.console.workspace.error import (
AccountAlreadyInitedError,
CurrentPasswordIncorrectError,
@ -18,15 +26,18 @@ from controllers.console.workspace.error import (
from controllers.console.wraps import (
account_initialization_required,
cloud_edition_billing_enabled,
enable_change_email,
enterprise_license_required,
only_edition_cloud,
setup_required,
)
from extensions.ext_database import db
from fields.member_fields import account_fields
from libs.helper import TimestampField, timezone
from libs.datetime_utils import naive_utc_now
from libs.helper import TimestampField, email, extract_remote_ip, timezone
from libs.login import login_required
from models import AccountIntegrate, InvitationCode
from models.account import Account
from services.account_service import AccountService
from services.billing_service import BillingService
from services.errors.account import CurrentPasswordIncorrectError as ServiceCurrentPasswordIncorrectError
@ -57,7 +68,7 @@ class AccountInitApi(Resource):
# check invitation code
invitation_code = (
db.session.query(InvitationCode)
.filter(
.where(
InvitationCode.code == args["invitation_code"],
InvitationCode.status == "unused",
)
@ -68,7 +79,7 @@ class AccountInitApi(Resource):
raise InvalidInvitationCodeError()
invitation_code.status = "used"
invitation_code.used_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
invitation_code.used_at = naive_utc_now()
invitation_code.used_by_tenant_id = account.current_tenant_id
invitation_code.used_by_account_id = account.id
@ -76,7 +87,7 @@ class AccountInitApi(Resource):
account.timezone = args["timezone"]
account.interface_theme = "light"
account.status = "active"
account.initialized_at = datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
account.initialized_at = naive_utc_now()
db.session.commit()
return {"result": "success"}
@ -217,7 +228,7 @@ class AccountIntegrateApi(Resource):
def get(self):
account = current_user
account_integrates = db.session.query(AccountIntegrate).filter(AccountIntegrate.account_id == account.id).all()
account_integrates = db.session.query(AccountIntegrate).where(AccountIntegrate.account_id == account.id).all()
base_url = request.url_root.rstrip("/")
oauth_base_path = "/console/api/oauth/login"
@ -369,6 +380,143 @@ class EducationAutoCompleteApi(Resource):
return BillingService.EducationIdentity.autocomplete(args["keywords"], args["page"], args["limit"])
class ChangeEmailSendEmailApi(Resource):
@enable_change_email
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
parser.add_argument("language", type=str, required=False, location="json")
parser.add_argument("phase", type=str, required=False, location="json")
parser.add_argument("token", type=str, required=False, location="json")
args = parser.parse_args()
ip_address = extract_remote_ip(request)
if AccountService.is_email_send_ip_limit(ip_address):
raise EmailSendIpLimitError()
if args["language"] is not None and args["language"] == "zh-Hans":
language = "zh-Hans"
else:
language = "en-US"
account = None
user_email = args["email"]
if args["phase"] is not None and args["phase"] == "new_email":
if args["token"] is None:
raise InvalidTokenError()
reset_data = AccountService.get_change_email_data(args["token"])
if reset_data is None:
raise InvalidTokenError()
user_email = reset_data.get("email", "")
if user_email != current_user.email:
raise InvalidEmailError()
else:
with Session(db.engine) as session:
account = session.execute(select(Account).filter_by(email=args["email"])).scalar_one_or_none()
if account is None:
raise AccountNotFound()
token = AccountService.send_change_email_email(
account=account, email=args["email"], old_email=user_email, language=language, phase=args["phase"]
)
return {"result": "success", "data": token}
class ChangeEmailCheckApi(Resource):
@enable_change_email
@setup_required
@login_required
@account_initialization_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
parser.add_argument("code", type=str, required=True, location="json")
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
user_email = args["email"]
is_change_email_error_rate_limit = AccountService.is_change_email_error_rate_limit(args["email"])
if is_change_email_error_rate_limit:
raise EmailChangeLimitError()
token_data = AccountService.get_change_email_data(args["token"])
if token_data is None:
raise InvalidTokenError()
if user_email != token_data.get("email"):
raise InvalidEmailError()
if args["code"] != token_data.get("code"):
AccountService.add_change_email_error_rate_limit(args["email"])
raise EmailCodeError()
# Verified, revoke the first token
AccountService.revoke_change_email_token(args["token"])
# Refresh token data by generating a new token
_, new_token = AccountService.generate_change_email_token(
user_email, code=args["code"], old_email=token_data.get("old_email"), additional_data={}
)
AccountService.reset_change_email_error_rate_limit(args["email"])
return {"is_valid": True, "email": token_data.get("email"), "token": new_token}
class ChangeEmailResetApi(Resource):
@enable_change_email
@setup_required
@login_required
@account_initialization_required
@marshal_with(account_fields)
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("new_email", type=email, required=True, location="json")
parser.add_argument("token", type=str, required=True, nullable=False, location="json")
args = parser.parse_args()
if AccountService.is_account_in_freeze(args["new_email"]):
raise AccountInFreezeError()
if not AccountService.check_email_unique(args["new_email"]):
raise EmailAlreadyInUseError()
reset_data = AccountService.get_change_email_data(args["token"])
if not reset_data:
raise InvalidTokenError()
AccountService.revoke_change_email_token(args["token"])
old_email = reset_data.get("old_email", "")
if current_user.email != old_email:
raise AccountNotFound()
updated_account = AccountService.update_account_email(current_user, email=args["new_email"])
AccountService.send_change_email_completed_notify_email(
email=args["new_email"],
)
return updated_account
class CheckEmailUnique(Resource):
@setup_required
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("email", type=email, required=True, location="json")
args = parser.parse_args()
if AccountService.is_account_in_freeze(args["email"]):
raise AccountInFreezeError()
if not AccountService.check_email_unique(args["email"]):
raise EmailAlreadyInUseError()
return {"result": "success"}
# Register API resources
api.add_resource(AccountInitApi, "/account/init")
api.add_resource(AccountProfileApi, "/account/profile")
@ -385,5 +533,10 @@ api.add_resource(AccountDeleteUpdateFeedbackApi, "/account/delete/feedback")
api.add_resource(EducationVerifyApi, "/account/education/verify")
api.add_resource(EducationApi, "/account/education")
api.add_resource(EducationAutoCompleteApi, "/account/education/autocomplete")
# Change email
api.add_resource(ChangeEmailSendEmailApi, "/account/change-email")
api.add_resource(ChangeEmailCheckApi, "/account/change-email/validity")
api.add_resource(ChangeEmailResetApi, "/account/change-email/reset")
api.add_resource(CheckEmailUnique, "/account/change-email/check-email-unique")
# api.add_resource(AccountEmailApi, '/account/email')
# api.add_resource(AccountEmailVerifyApi, '/account/email-verify')

Some files were not shown because too many files have changed in this diff Show More